signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class AuthenticationService { /** * Authenticates a user using the given credentials and optional * authentication token , returning the authentication token associated with * the user ' s Guacamole session , which may be newly generated . If an * existing token is provided , the authentication procedure will attempt to * update or reuse the provided token , but it is possible that a new token * will be returned . Note that this function CANNOT return null . * @ param credentials * The credentials to use when authenticating the user . * @ param token * The authentication token to use if attempting to re - authenticate an * existing session , or null to request a new token . * @ return * The authentication token associated with the newly created or * existing session . * @ throws GuacamoleException * If the authentication or re - authentication attempt fails . */ public String authenticate ( Credentials credentials , String token ) throws GuacamoleException { } }
// Pull existing session if token provided GuacamoleSession existingSession ; if ( token != null ) existingSession = tokenSessionMap . get ( token ) ; else existingSession = null ; // Get up - to - date AuthenticatedUser and associated UserContexts AuthenticatedUser authenticatedUser = getAuthenticatedUser ( existingSession , credentials ) ; List < DecoratedUserContext > userContexts = getUserContexts ( existingSession , authenticatedUser , credentials ) ; // Update existing session , if it exists String authToken ; if ( existingSession != null ) { authToken = token ; existingSession . setAuthenticatedUser ( authenticatedUser ) ; existingSession . setUserContexts ( userContexts ) ; } // If no existing session , generate a new token / session pair else { authToken = authTokenGenerator . getToken ( ) ; tokenSessionMap . put ( authToken , new GuacamoleSession ( environment , authenticatedUser , userContexts ) ) ; logger . debug ( "Login was successful for user \"{}\"." , authenticatedUser . getIdentifier ( ) ) ; } return authToken ;
public class GetStageResult { /** * A map that defines the stage variables for a < a > Stage < / a > resource . Variable names can have alphanumeric and * underscore characters , and the values must match < code > [ A - Za - z0-9 - . _ ~ : / ? # & amp ; = , ] + < / code > . * @ param variables * A map that defines the stage variables for a < a > Stage < / a > resource . Variable names can have alphanumeric * and underscore characters , and the values must match < code > [ A - Za - z0-9 - . _ ~ : / ? # & amp ; = , ] + < / code > . * @ return Returns a reference to this object so that method calls can be chained together . */ public GetStageResult withVariables ( java . util . Map < String , String > variables ) { } }
setVariables ( variables ) ; return this ;
public class AuroraProtocol { /** * Connect aurora probable master . Aurora master change in time . The only way to check that a * server is a master is to asked him . * @ param listener aurora failover to call back if master is found * @ param globalInfo server global variables information * @ param probableMaster probable master host */ private static void searchProbableMaster ( AuroraListener listener , final GlobalStateInfo globalInfo , HostAddress probableMaster ) { } }
AuroraProtocol protocol = getNewProtocol ( listener . getProxy ( ) , globalInfo , listener . getUrlParser ( ) ) ; try { protocol . setHostAddress ( probableMaster ) ; protocol . connect ( ) ; listener . removeFromBlacklist ( protocol . getHostAddress ( ) ) ; if ( listener . isMasterHostFailReconnect ( ) && protocol . isMasterConnection ( ) ) { protocol . setMustBeMasterConnection ( true ) ; listener . foundActiveMaster ( protocol ) ; } else if ( listener . isSecondaryHostFailReconnect ( ) && ! protocol . isMasterConnection ( ) ) { protocol . setMustBeMasterConnection ( false ) ; listener . foundActiveSecondary ( protocol ) ; } else { protocol . close ( ) ; protocol = getNewProtocol ( listener . getProxy ( ) , globalInfo , listener . getUrlParser ( ) ) ; } } catch ( SQLException e ) { listener . addToBlacklist ( protocol . getHostAddress ( ) ) ; }
public class SerializerIntrinsics { /** * ! and pop register stack . */ public final void fstp ( Mem dst ) { } }
assert ( dst . size ( ) == 4 || dst . size ( ) == 8 || dst . size ( ) == 10 ) ; emitX86 ( INST_FSTP , dst ) ;
public class PeerMessage { /** * Parse the given buffer into a peer protocol message . * Parses the provided byte array and builds the corresponding PeerMessage * subclass object . * @ param buffer The byte buffer containing the message data . * @ param torrent The torrent this message is about . * @ return A PeerMessage subclass instance . * @ throws ParseException When the message is invalid , can ' t be parsed or * does not match the protocol requirements . */ public static PeerMessage parse ( ByteBuffer buffer , TorrentInfo torrent ) throws ParseException { } }
int length = buffer . getInt ( ) ; if ( length == 0 ) { return KeepAliveMessage . parse ( buffer , torrent ) ; } else if ( length != buffer . remaining ( ) ) { throw new ParseException ( "Message size did not match announced " + "size!" , 0 ) ; } Type type = Type . get ( buffer . get ( ) ) ; if ( type == null ) { throw new ParseException ( "Unknown message ID!" , buffer . position ( ) - 1 ) ; } switch ( type ) { case CHOKE : return ChokeMessage . parse ( buffer . slice ( ) , torrent ) ; case UNCHOKE : return UnchokeMessage . parse ( buffer . slice ( ) , torrent ) ; case INTERESTED : return InterestedMessage . parse ( buffer . slice ( ) , torrent ) ; case NOT_INTERESTED : return NotInterestedMessage . parse ( buffer . slice ( ) , torrent ) ; case HAVE : return HaveMessage . parse ( buffer . slice ( ) , torrent ) ; case BITFIELD : return BitfieldMessage . parse ( buffer . slice ( ) , torrent ) ; case REQUEST : return RequestMessage . parse ( buffer . slice ( ) , torrent ) ; case PIECE : return PieceMessage . parse ( buffer . slice ( ) , torrent ) ; case CANCEL : return CancelMessage . parse ( buffer . slice ( ) , torrent ) ; default : throw new IllegalStateException ( "Message type should have " + "been properly defined by now." ) ; }
public class CDL { /** * Produce a comma delimited text row from a JSONArray . Values containing * the comma character will be quoted . Troublesome characters may be * removed . * @ param ja A JSONArray of strings . * @ return A string ending in NEWLINE . */ public static String rowToString ( JSONArray ja ) { } }
StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < ja . length ( ) ; i += 1 ) { if ( i > 0 ) { sb . append ( ',' ) ; } Object object = ja . opt ( i ) ; if ( object != null ) { String string = object . toString ( ) ; if ( string . length ( ) > 0 && ( string . indexOf ( ',' ) >= 0 || string . indexOf ( '\n' ) >= 0 || string . indexOf ( '\r' ) >= 0 || string . indexOf ( 0 ) >= 0 || string . charAt ( 0 ) == '"' ) ) { sb . append ( '"' ) ; int length = string . length ( ) ; for ( int j = 0 ; j < length ; j += 1 ) { char c = string . charAt ( j ) ; if ( c >= ' ' && c != '"' ) { sb . append ( c ) ; } } sb . append ( '"' ) ; } else { sb . append ( string ) ; } } } sb . append ( '\n' ) ; return sb . toString ( ) ;
public class ContentBasedLocalBundleRepository { /** * This method can be used to obtain the ' base ' bundle for a given ' selected ' bundle . * The { @ link ContentBasedLocalBundleRepository # selectBundle } Will select a Bundle , which may or * may not be an ifix . * When the selectedBundle is an ifix , this method will return the corresponding bundle that has been ' ifixed ' . * If the selectedBundle is not an ifix , this method will return the selected bundle . * @ param baseLocation The base location . * @ param symbolicName The desired symbolic name . * @ param versionRange The range of versions that can be selected . * @ return The file representing the chosen bundle . */ public File selectBaseBundle ( String baseLocation , final String symbolicName , final VersionRange versionRange ) { } }
readCache ( ) ; return selectResource ( baseLocation , symbolicName , versionRange , true , // performURICheck = true true // selectBaseBundle = true ) ;
public class StackTraceFilter { /** * Example how the filter works ( + / - means good / bad ) : * [ a + , b + , c - , d + , e + , f - , g + ] - > [ a + , b + , d + , e + , g + ] * Basically removes all bad from the middle . * < strike > If any good are in the middle of bad those are also removed . < / strike > */ public StackTraceElement [ ] filter ( StackTraceElement [ ] target , boolean keepTop ) { } }
// TODO : profile // TODO : investigate " keepTop " commit history - no effect ! final List < StackTraceElement > filtered = new ArrayList < StackTraceElement > ( ) ; for ( StackTraceElement element : target ) { if ( CLEANER . isIn ( element ) ) { filtered . add ( element ) ; } } StackTraceElement [ ] result = new StackTraceElement [ filtered . size ( ) ] ; return filtered . toArray ( result ) ;
public class ZooKeeperUpdatingListener { /** * Creates a ZooKeeper server listener , which registers server into ZooKeeper . * < p > If you need a fully customized { @ link ZooKeeperUpdatingListener } instance , use * { @ link ZooKeeperUpdatingListenerBuilder } instead . * @ param zkConnectionStr ZooKeeper connection string * @ param zNodePath ZooKeeper node path ( under which this server will be registered ) */ public static ZooKeeperUpdatingListener of ( String zkConnectionStr , String zNodePath ) { } }
return new ZooKeeperUpdatingListenerBuilder ( zkConnectionStr , zNodePath ) . build ( ) ;
public class MultiRowIterator { /** * to move the iterator to the next item */ void advance ( ) { } }
// we are always at the leaf level when this method is invoked // so we calculate the seed for generating the row by combining the seed that generated the clustering components int depth = clusteringComponents . length - 1 ; long parentSeed = clusteringSeeds [ depth ] ; long rowSeed = seed ( clusteringComponents [ depth ] . peek ( ) , generator . clusteringComponents . get ( depth ) . type , parentSeed ) ; // and then fill the row with the _ non - clustering _ values for the position we _ were _ at , as this is what we ' ll deliver for ( int i = clusteringSeeds . length ; i < row . row . length ; i ++ ) { Generator gen = generator . valueComponents . get ( i - clusteringSeeds . length ) ; gen . setSeed ( rowSeed ) ; row . row [ i ] = gen . generate ( ) ; } // then we advance the leaf level setHasNext ( advance ( depth , false ) ) ;
public class PipedReader { /** * Tell whether this stream is ready to be read . A piped character * stream is ready if the circular buffer is not empty . * @ exception IOException if the pipe is * < a href = PipedInputStream . html # BROKEN > < code > broken < / code > < / a > , * { @ link # connect ( java . io . PipedWriter ) unconnected } , or closed . */ public synchronized boolean ready ( ) throws IOException { } }
if ( ! connected ) { throw new IOException ( "Pipe not connected" ) ; } else if ( closedByReader ) { throw new IOException ( "Pipe closed" ) ; } else if ( writeSide != null && ! writeSide . isAlive ( ) && ! closedByWriter && ( in < 0 ) ) { throw new IOException ( "Write end dead" ) ; } if ( in < 0 ) { return false ; } else { return true ; }
public class CmdLineParser { /** * Wraps a line so that the resulting parts are not longer than a given maximum length . * @ param line Line to wrap * @ param maxLength maximum length for the resulting parts * @ return list of all wrapped parts */ private List < String > wrapLines ( String line , final int maxLength ) { } }
List < String > rv = new ArrayList < String > ( ) ; for ( String restOfLine : line . split ( "\\n" ) ) { while ( restOfLine . length ( ) > maxLength ) { // try to wrap at space , but don ' t try too hard as some languages don ' t even have whitespaces . int lineLength ; String candidate = restOfLine . substring ( 0 , maxLength ) ; int sp = candidate . lastIndexOf ( ' ' ) ; if ( sp > maxLength * 3 / 5 ) lineLength = sp ; else lineLength = maxLength ; rv . add ( restOfLine . substring ( 0 , lineLength ) ) ; restOfLine = restOfLine . substring ( lineLength ) . trim ( ) ; } rv . add ( restOfLine ) ; } return rv ;
public class TextBuilder { /** * Create a link in the current paragraph . * @ param text the text * @ param url the destination * @ return this for fluent style */ public TextBuilder link ( final String text , final URL url ) { } }
this . curParagraphBuilder . link ( text , url ) ; return this ;
public class RegularExpressionConverter { /** * Converts a PERL style regular expression into Java style . < br > * < br > * The leading and ending slash and the modifiers will be removed . * @ param regex * A PERL style regular expression * @ param faultTolerant * Fault - tolerant translating the flags * @ return Pattern */ public static Pattern convertPerlRegexToPattern ( @ Nonnull final String regex , @ Nonnull final boolean faultTolerant ) { } }
Check . notNull ( regex , "regex" ) ; String pattern = regex . trim ( ) ; final Matcher matcher = faultTolerant ? PERL_STYLE_TOLERANT . matcher ( pattern ) : PERL_STYLE . matcher ( pattern ) ; if ( ! matcher . matches ( ) ) { throw new IllegalArgumentException ( "The given regular expression '" + pattern + "' seems to be not in PERL style or has unsupported modifiers." ) ; } pattern = pattern . substring ( 1 ) ; final int lastIndex = pattern . lastIndexOf ( '/' ) ; pattern = pattern . substring ( 0 , lastIndex ) ; final int flags = Flag . convertToBitmask ( Flag . parse ( matcher . group ( 1 ) ) ) ; return Pattern . compile ( pattern , flags ) ;
public class LimitHandler { /** * Handles the next request in the handler chain * @ param exchange The HttpServerExchange * @ throws Exception Thrown when an exception occurs */ protected void nextHandler ( HttpServerExchange exchange ) throws Exception { } }
if ( this . attachment . hasBasicAuthentication ( ) ) { HttpHandler httpHandler = RequestUtils . wrapBasicAuthentication ( Application . getInstance ( LocaleHandler . class ) , this . attachment . getUsername ( ) , getPassword ( ) ) ; httpHandler . handleRequest ( exchange ) ; } else { Application . getInstance ( LocaleHandler . class ) . handleRequest ( exchange ) ; }
public class UntagResourceRequest { /** * A list of one or more tag keys for the tags that you are removing . Specify only the tag keys , not the tag values . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setTagKeyList ( java . util . Collection ) } or { @ link # withTagKeyList ( java . util . Collection ) } if you want to * override the existing values . * @ param tagKeyList * A list of one or more tag keys for the tags that you are removing . Specify only the tag keys , not the tag * values . * @ return Returns a reference to this object so that method calls can be chained together . */ public UntagResourceRequest withTagKeyList ( String ... tagKeyList ) { } }
if ( this . tagKeyList == null ) { setTagKeyList ( new java . util . ArrayList < String > ( tagKeyList . length ) ) ; } for ( String ele : tagKeyList ) { this . tagKeyList . add ( ele ) ; } return this ;
public class Check { /** * Ensures that a passed { @ code byte } is less than another { @ code byte } . * @ param expected * Expected value * @ param check * Comparable to be checked * @ return the passed { @ code byte } argument { @ code check } * @ throws IllegalNotLesserThanException * if the argument value { @ code check } is not lesser than value { @ code expected } */ @ ArgumentsChecked @ Throws ( IllegalNotLesserThanException . class ) public static byte lesserThan ( final byte expected , final byte check ) { } }
if ( expected <= check ) { throw new IllegalNotLesserThanException ( check ) ; } return check ;
public class ListManagementImagesImpl { /** * Add an image to the list with list Id equal to list Id passed . * @ param listId List Id of the image list . * @ param imageStream The image file . * @ param addImageFileInputOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the Image object */ public Observable < ServiceResponse < Image > > addImageFileInputWithServiceResponseAsync ( String listId , byte [ ] imageStream , AddImageFileInputOptionalParameter addImageFileInputOptionalParameter ) { } }
if ( this . client . baseUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.baseUrl() is required and cannot be null." ) ; } if ( listId == null ) { throw new IllegalArgumentException ( "Parameter listId is required and cannot be null." ) ; } if ( imageStream == null ) { throw new IllegalArgumentException ( "Parameter imageStream is required and cannot be null." ) ; } final Integer tag = addImageFileInputOptionalParameter != null ? addImageFileInputOptionalParameter . tag ( ) : null ; final String label = addImageFileInputOptionalParameter != null ? addImageFileInputOptionalParameter . label ( ) : null ; return addImageFileInputWithServiceResponseAsync ( listId , imageStream , tag , label ) ;
public class DataSourceFactory { /** * Get a pooled data source for the provided connection parameters . * @ param url The JDBC database URL of the form < code > jdbc : subprotocol : subname < / code > * @ param properties A list of key / value configuration parameters to pass as connection arguments . Normally at * least a " user " and " password " property should be included * @ param poolSpec A connection pool spec * @ return A pooled database connection */ private static PoolingDataSource < PoolableConnection > getPoolingDataSource ( final String url , final ConcurrentMap < String , String > properties , final ConnectionProperties poolSpec ) { } }
// assert in private method assert url != null : "The url cannot be null" ; assert properties != null : "The properties cannot be null" ; assert poolSpec != null : "The pol spec cannot be null" ; LOG . debug ( "Creating new pooled data source for '" + url + "'" ) ; // convert the properties hashmap to java properties final Properties props = new Properties ( ) ; props . putAll ( properties ) ; // create a Apache DBCP pool configuration from the pool spec final GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig ( ) ; poolConfig . setMaxTotal ( poolSpec . getMaxTotal ( ) ) ; poolConfig . setMaxIdle ( poolSpec . getMaxIdle ( ) ) ; poolConfig . setMinIdle ( poolSpec . getMinIdle ( ) ) ; poolConfig . setMaxWaitMillis ( poolSpec . getMaxWaitMillis ( ) ) ; poolConfig . setTestOnCreate ( poolSpec . isTestOnCreate ( ) ) ; poolConfig . setTestOnBorrow ( poolSpec . isTestOnBorrow ( ) ) ; poolConfig . setTestOnReturn ( poolSpec . isTestOnReturn ( ) ) ; poolConfig . setTestWhileIdle ( poolSpec . isTestWhileIdle ( ) ) ; poolConfig . setTimeBetweenEvictionRunsMillis ( poolSpec . getTimeBetweenEvictionRunsMillis ( ) ) ; poolConfig . setNumTestsPerEvictionRun ( poolSpec . getNumTestsPerEvictionRun ( ) ) ; poolConfig . setMinEvictableIdleTimeMillis ( poolSpec . getMinEvictableIdleTimeMillis ( ) ) ; poolConfig . setSoftMinEvictableIdleTimeMillis ( poolSpec . getSoftMinEvictableIdleTimeMillis ( ) ) ; poolConfig . setLifo ( poolSpec . isLifo ( ) ) ; // create the pool and assign the factory to the pool final org . apache . commons . dbcp2 . ConnectionFactory connFactory = new DriverManagerConnectionFactory ( url , props ) ; final PoolableConnectionFactory poolConnFactory = new PoolableConnectionFactory ( connFactory , null ) ; poolConnFactory . setDefaultAutoCommit ( poolSpec . isDefaultAutoCommit ( ) ) ; poolConnFactory . setDefaultReadOnly ( poolSpec . isDefaultReadOnly ( ) ) ; poolConnFactory . setDefaultTransactionIsolation ( poolSpec . getDefaultTransactionIsolation ( ) ) ; poolConnFactory . setCacheState ( poolSpec . isCacheState ( ) ) ; poolConnFactory . setValidationQuery ( poolSpec . getValidationQuery ( ) ) ; poolConnFactory . setMaxConnLifetimeMillis ( poolSpec . getMaxConnLifetimeMillis ( ) ) ; final GenericObjectPool < PoolableConnection > connPool = new GenericObjectPool < > ( poolConnFactory , poolConfig ) ; poolConnFactory . setPool ( connPool ) ; // create a new pooled data source return new PoolingDataSource < > ( connPool ) ;
public class AbstractJUnit4InitMethodNotRun { /** * Matches if all of the following conditions are true : 1 ) The method matches { @ link * # methodMatcher ( ) } , ( looks like setUp ( ) or tearDown ( ) , and none of the overrides in the * hierarchy of the method have the appropriate @ Before or @ After annotations ) 2 ) The method is * not annotated with @ Test 3 ) The enclosing class has an @ RunWith annotation and does not extend * TestCase . This marks that the test is intended to run with JUnit 4. */ @ Override public Description matchMethod ( MethodTree methodTree , VisitorState state ) { } }
boolean matches = allOf ( methodMatcher ( ) , not ( hasAnnotationOnAnyOverriddenMethod ( JUNIT_TEST ) ) , enclosingClass ( isJUnit4TestClass ) ) . matches ( methodTree , state ) ; if ( ! matches ) { return Description . NO_MATCH ; } // For each annotationReplacement , replace the first annotation that matches . If any of them // matches , don ' t try and do the rest of the work . Description description ; for ( AnnotationReplacements replacement : annotationReplacements ( ) ) { description = tryToReplaceAnnotation ( methodTree , state , replacement . badAnnotation , replacement . goodAnnotation ) ; if ( description != null ) { return description ; } } // Search for another @ Before annotation on the method and replace the import // if we find one String correctAnnotation = correctAnnotation ( ) ; String unqualifiedClassName = getUnqualifiedClassName ( correctAnnotation ) ; for ( AnnotationTree annotationNode : methodTree . getModifiers ( ) . getAnnotations ( ) ) { Symbol annoSymbol = ASTHelpers . getSymbol ( annotationNode ) ; if ( annoSymbol . getSimpleName ( ) . contentEquals ( unqualifiedClassName ) ) { SuggestedFix . Builder suggestedFix = SuggestedFix . builder ( ) . removeImport ( annoSymbol . getQualifiedName ( ) . toString ( ) ) . addImport ( correctAnnotation ) ; makeProtectedPublic ( methodTree , state , suggestedFix ) ; return describeMatch ( annotationNode , suggestedFix . build ( ) ) ; } } // Add correctAnnotation ( ) to the unannotated method // ( and convert protected to public if it is ) SuggestedFix . Builder suggestedFix = SuggestedFix . builder ( ) . addImport ( correctAnnotation ) ; makeProtectedPublic ( methodTree , state , suggestedFix ) ; suggestedFix . prefixWith ( methodTree , "@" + unqualifiedClassName + "\n" ) ; return describeMatch ( methodTree , suggestedFix . build ( ) ) ;
public class MultiWordChunker2 { /** * Lazy init , thanks to Artur Trzewik */ private void lazyInit ( ) { } }
if ( tokenToPosTagMap != null ) { return ; } Map < String , List < MultiWordEntry > > map = new HashMap < > ( ) ; try ( InputStream stream = JLanguageTool . getDataBroker ( ) . getFromResourceDirAsStream ( filename ) ) { List < String > posTokens = loadWords ( stream ) ; for ( String posToken : posTokens ) { String [ ] tokenAndTag = posToken . split ( "\t" ) ; if ( tokenAndTag . length != 2 ) { throw new RuntimeException ( "Invalid format in " + filename + ": '" + posToken + "', expected two tab-separated parts" ) ; } String [ ] tokens = tokenAndTag [ 0 ] . split ( " " ) ; String posTag = tokenAndTag [ 1 ] ; List < MultiWordEntry > multiwordItems ; if ( map . containsKey ( tokens [ 0 ] ) ) { multiwordItems = map . get ( tokens [ 0 ] ) ; } else { multiwordItems = new ArrayList < > ( ) ; map . put ( tokens [ 0 ] , multiwordItems ) ; } multiwordItems . add ( new MultiWordEntry ( Arrays . asList ( tokens ) , posTag ) ) ; } tokenToPosTagMap = map ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; }
public class SoyErrors { /** * Performs a case insensitive Levenshtein edit distance based on the 2 rows implementation . * @ param s The first string * @ param t The second string * @ param maxDistance The distance to beat , if we can ' t do better , stop trying * @ return an integer describing the number of edits needed to transform s into t * @ see " https : / / en . wikipedia . org / wiki / Levenshtein _ distance # Iterative _ with _ two _ matrix _ rows " */ private static int distance ( String s , String t , int maxDistance ) { } }
// create two work vectors of integer distances // it is possible to reduce this to only one array , but performance isn ' t that important here . // We could also avoid calculating a lot of the entries by taking maxDistance into account in // the inner loop . This would only be worth optimizing if it showed up in a profile . int [ ] v0 = new int [ t . length ( ) + 1 ] ; int [ ] v1 = new int [ t . length ( ) + 1 ] ; // initialize v0 ( the previous row of distances ) // this row is A [ 0 ] [ i ] : edit distance for an empty s // the distance is just the number of characters to delete from t for ( int i = 0 ; i < v0 . length ; i ++ ) { v0 [ i ] = i ; } for ( int i = 0 ; i < s . length ( ) ; i ++ ) { // calculate v1 ( current row distances ) from the previous row v0 // first element of v1 is A [ i + 1 ] [ 0] // edit distance is delete ( i + 1 ) chars from s to match empty t v1 [ 0 ] = i + 1 ; int bestThisRow = v1 [ 0 ] ; char sChar = Ascii . toLowerCase ( s . charAt ( i ) ) ; // use formula to fill in the rest of the row for ( int j = 0 ; j < t . length ( ) ; j ++ ) { char tChar = Ascii . toLowerCase ( t . charAt ( j ) ) ; v1 [ j + 1 ] = Math . min ( v1 [ j ] + 1 , // deletion Math . min ( v0 [ j + 1 ] + 1 , // insertion v0 [ j ] + ( ( sChar == tChar ) ? 0 : 1 ) ) ) ; // substitution bestThisRow = Math . min ( bestThisRow , v1 [ j + 1 ] ) ; } if ( bestThisRow > maxDistance ) { // if we couldn ' t possibly do better than maxDistance , stop trying . return maxDistance + 1 ; } // swap v1 ( current row ) to v0 ( previous row ) for next iteration . no need to clear previous // row since we always update all of v1 on each iteration . int [ ] tmp = v0 ; v0 = v1 ; v1 = tmp ; } // The best answer is the last slot in v0 ( due to the swap on the last iteration ) return v0 [ t . length ( ) ] ;
public class IssuingCardPinService { /** * Create a IssuingCardPinService with the provided { @ link EphemeralKeyProvider } . * @ param keyProvider an { @ link EphemeralKeyProvider } used to get * { @ link CustomerEphemeralKey EphemeralKeys } as needed */ @ NonNull public static IssuingCardPinService create ( @ NonNull Context context , @ NonNull EphemeralKeyProvider keyProvider ) { } }
return new IssuingCardPinService ( keyProvider , new StripeApiHandler ( context ) ) ;
public class ToTextStream { /** * Called when a Comment is to be constructed . * Note that Xalan will normally invoke the other version of this method . * % REVIEW % In fact , is this one ever needed , or was it a mistake ? * @ param data The comment data . * @ throws org . xml . sax . SAXException Any SAX exception , possibly * wrapping another exception . */ public void comment ( String data ) throws org . xml . sax . SAXException { } }
final int length = data . length ( ) ; if ( length > m_charsBuff . length ) { m_charsBuff = new char [ length * 2 + 1 ] ; } data . getChars ( 0 , length , m_charsBuff , 0 ) ; comment ( m_charsBuff , 0 , length ) ;
public class Index { /** * set the value collection Specifies a collection name . If you are indexing an external collection * external = " Yes " , specify the collection name , including fully qualified path . * @ param collection value to set * @ throws PageException */ public void setCollection ( String collection ) throws PageException { } }
try { this . collection = pageContext . getConfig ( ) . getSearchEngine ( pageContext ) . getCollectionByName ( collection . toLowerCase ( ) . trim ( ) ) ; } catch ( SearchException e ) { throw Caster . toPageException ( e ) ; }
public class DatatypeConverter { /** * Parse the string representation of a double . * @ param value string representation * @ return Java representation * @ throws ParseException */ public static Number parseDouble ( String value ) throws ParseException { } }
Number result = null ; value = parseString ( value ) ; // If we still have a value if ( value != null && ! value . isEmpty ( ) && ! value . equals ( "-1 -1" ) ) { int index = value . indexOf ( "E+" ) ; if ( index != - 1 ) { value = value . substring ( 0 , index ) + 'E' + value . substring ( index + 2 , value . length ( ) ) ; } if ( value . indexOf ( 'E' ) != - 1 ) { result = DOUBLE_FORMAT . get ( ) . parse ( value ) ; } else { result = Double . valueOf ( value ) ; } } return result ;
public class RubyObject { /** * Executes a method of any Object by Java reflection . * @ param o * an Object * @ param methodName * name of the method * @ param arg * a Float * @ return the result of the method called */ public static < E > E send ( Object o , String methodName , Float arg ) { } }
return send ( o , methodName , ( Object ) arg ) ;
public class SheetBindingErrors { /** * フィールドに対するフォーマッタを登録する 。 * @ since 2.0 * @ param field フィールド名 * @ param fieldType フィールドのクラスタイプ * @ param formatter フォーマッタ * @ param strippedIndex 登録するときにフィールドパスから 、 インデックス情報を除去するかどうか 。 */ @ SuppressWarnings ( { } }
"unchecked" , "rawtypes" } ) public void registerFieldFormatter ( final String field , final Class < ? > fieldType , final FieldFormatter < ? > formatter , final boolean strippedIndex ) { String fieldPath = buildFieldPath ( field ) ; if ( strippedIndex ) { // パスからインデックスやキーを削除する List < String > strippedPaths = new ArrayList < > ( ) ; fieldFormatterRegistry . addStrippedPropertyPaths ( strippedPaths , "" , fieldPath ) ; if ( strippedPaths . size ( ) > 0 ) { // 辞書順位並び変えて先頭に来るのが 、 インデックスを全て削除されたパス Collections . sort ( strippedPaths ) ; fieldPath = strippedPaths . get ( 0 ) ; } } fieldFormatterRegistry . registerFormatter ( fieldPath , ( Class ) fieldType , ( FieldFormatter ) formatter ) ;
public class AwtExceptionHandlerAdapterHack { /** * Sets the { @ link # SUN _ AWT _ EXCEPTION _ HANDLER _ KEY } system property to register this class as the event thread ' s * exception handler . When called back , this class simply forwards to the delegate . * @ param exceptionHandlerDelegate the " real " exception handler to delegate to when an uncaught exception occurs . */ public static void registerExceptionHandler ( RegisterableExceptionHandler exceptionHandlerDelegate ) { } }
AwtExceptionHandlerAdapterHack . exceptionHandlerDelegate = exceptionHandlerDelegate ; // Registers this class with the system properties so Sun ' s JDK can pick it up . Always sets even if previously set . System . getProperties ( ) . put ( SUN_AWT_EXCEPTION_HANDLER_KEY , AwtExceptionHandlerAdapterHack . class . getName ( ) ) ;
public class JUtils { /** * Format a prefixed name as MACRO _ CASE . * @ param name The name to macro - case . * @ return THE _ MACRO _ CASED _ NAME */ public static String macroCase ( String name ) { } }
return Strings . c_case ( name ) . toUpperCase ( Locale . US ) ;
public class DisassociateDelegateFromResourceRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DisassociateDelegateFromResourceRequest disassociateDelegateFromResourceRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( disassociateDelegateFromResourceRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( disassociateDelegateFromResourceRequest . getOrganizationId ( ) , ORGANIZATIONID_BINDING ) ; protocolMarshaller . marshall ( disassociateDelegateFromResourceRequest . getResourceId ( ) , RESOURCEID_BINDING ) ; protocolMarshaller . marshall ( disassociateDelegateFromResourceRequest . getEntityId ( ) , ENTITYID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class IFixUtils { /** * This method checks to see whether any of the ifix jars or any ifix static content files are missing from the runtime . It ensures that * ifix jar files have their base bundles available , and all hashes of the new versions of the ifix files are checked against any they find * within the runtime . * @ param installDir - The install directory of the product extension ( including core and usr ) . * @ param features - The list of ProvisioningFeatureDefinition found in the install dir . * @ param repo - This is the bundle repository that is mapped to the install location of the product extension . * @ param console - The CommandConsole to write messages to . * @ return - A Set if the ifix IDs that need to be reapplied . */ public static Set < String > getIFixesThatMustBeReapplied ( File installDir , Map < String , ProvisioningFeatureDefinition > features , ContentBasedLocalBundleRepository repo , CommandConsole console ) { } }
Set < String > ifixesToReApply = new HashSet < String > ( ) ; Map < File , Map < String , Version > > allBaseBundleJarContent = new HashMap < File , Map < String , Version > > ( ) ; Set < File > allBundleJarContent = new HashSet < File > ( ) ; Set < File > allStaticFileContent = new HashSet < File > ( ) ; // Process all subsystem content of all manifests and store them in the set . We ' ll use that to process the ifix files . processSubsystemContent ( installDir , features , repo , allBaseBundleJarContent , allBundleJarContent , allStaticFileContent , console ) ; // Get a list of all the LibertyProfile metadata files that use to match up with the ifixes . Map < String , BundleFile > bundleFiles = processLPMFXmls ( installDir , console ) ; // Iterate over each file that is found in all the ifix xmls . If the same file is listed in multiple ifix xml , we will have // been given the new version that should be on the system , if it relevant to the runtime . for ( Map . Entry < String , IFixInfo > ifixInfoEntry : processIFixXmls ( installDir , bundleFiles , console ) . entrySet ( ) ) { // Get the relative file name of the ifix file e . g . lib / test _ 1.0.0.20130101 . jar String updateFileName = ifixInfoEntry . getKey ( ) ; // Get the IfixInfo object that contains the latest version of this file . IFixInfo ifixInfo = ifixInfoEntry . getValue ( ) ; // Loop through all the updated files in the current ifixInfo and when we ' ve found the required file to // process , do the rest of the processing . for ( UpdatedFile updatedFile : ifixInfo . getUpdates ( ) . getFiles ( ) ) { if ( updatedFile . getId ( ) . equals ( updateFileName ) ) { File updateFile = new File ( installDir , updateFileName ) ; // Check to see if we ' re dealing with a static content file or not . If not see if it is a bundle . If not then ignore as we // don ' t need it . if ( allStaticFileContent . contains ( updateFile ) ) { // Check that the file exists in the runtime . if ( updateFile . exists ( ) ) { // Get the hash of the static file from the ifix xml file . String ifixHash = updatedFile . getHash ( ) ; // Now calculate the new hash and compare the 2 . If they are NOT the same the ifix needs to be re - applied . try { // Now calculate the new hash and compare each hash . If they are NOT the same the ifix needs to be re - applied . if ( ! equalsHashes ( updateFile , ifixHash ) ) ifixesToReApply . add ( ifixInfo . getId ( ) ) ; } catch ( IOException ioe ) { console . printlnErrorMessage ( getMessage ( "ifixutils.unable.to.read.file" , updateFile . getAbsolutePath ( ) , ioe . getMessage ( ) ) ) ; } } else { // If the static file doesn ' t appear on disk , then we need to re - apply the ifix . ifixesToReApply . add ( ifixInfo . getId ( ) ) ; } // Process jar files . If the ifix jar doesn ' t exist , then check to see whether we have the relevant // features installed that would require the ifix to be re - applied . } else { // If we ' re not dealing with static files , then we should be dealing with bundles and or static jars . // If we have the actual ifix jar in the runtime , we need to check that it is the correct file . Check that hash . if ( allBundleJarContent . contains ( updateFile ) ) { // Get the hash of the bundle from the ifix xml file . String ifixHash = updatedFile . getHash ( ) ; try { // Now calculate the new hash and compare both hashes . If they are NOT the same the ifix needs to be re - applied . if ( ! equalsHashes ( updateFile , ifixHash ) ) ifixesToReApply . add ( ifixInfo . getId ( ) ) ; } catch ( IOException ioe ) { console . printlnErrorMessage ( getMessage ( "ifixutils.unable.to.read.file" , updateFile . getAbsolutePath ( ) , ioe . getMessage ( ) ) ) ; } } else { // If the ifix jar isn ' t in the runtime , we need to check that the base bundle it is an ifix for does . If this doesn ' t // exist we can then ignore the ifix . // Find the BundleFile associated with the ifix file . BundleFile bundleFile = bundleFiles . get ( updatedFile . getId ( ) ) ; // If we don ' t have one , then we ' re not a jar file we know about . We know ignore this file . if ( bundleFile != null ) { // Get the symbolic name and version of the ifix jar we ' re dealing with . String bundleSymbolicName = bundleFile . getSymbolicName ( ) ; Version bundleVersion = new Version ( bundleFile . getVersion ( ) ) ; // Now iterate over all the bundles we know about in the runtime , and see if we can find a matching base bundle . // If we can we need to reapply the ifix , otherwise we can ignore . boolean found = false ; for ( Iterator < Map < String , Version > > iter = allBaseBundleJarContent . values ( ) . iterator ( ) ; iter . hasNext ( ) && ! found ; ) { for ( Map . Entry < String , Version > bundleEntry : iter . next ( ) . entrySet ( ) ) { // If we have a matching symbolic name and the version matches ( ignoring the qualifier ) then we have a base bundle on the // system . String entrySymbolicName = bundleEntry . getKey ( ) ; Version entryVersion = bundleEntry . getValue ( ) ; if ( bundleSymbolicName . equals ( entrySymbolicName ) && bundleVersion . getMajor ( ) == entryVersion . getMajor ( ) && bundleVersion . getMinor ( ) == entryVersion . getMinor ( ) && bundleVersion . getMicro ( ) == entryVersion . getMicro ( ) ) { found = true ; } } } // If we have found a base bundle , report we need to reinstall the ifix . if ( found ) ifixesToReApply . add ( ifixInfo . getId ( ) ) ; } } } } } } return ifixesToReApply ;
public class MessageMgrBuilder { /** * Activate a message type and sets a type handler with max count set to 100. * An existing handler will be overwritten . * @ param type message type to be activated , nothing will be set if null * @ return self to allow for chaining */ public MessageMgrBuilder setHandler ( E_MessageType type ) { } }
if ( type != null ) { this . messageHandlers . put ( type , new MessageTypeHandler ( type ) ) ; } else { this . buildErrors . addError ( "{}: cannot add handler for empty type" , this . getClass ( ) . getSimpleName ( ) ) ; } return this ;
public class VectorPackingPropagator { /** * the fix point procedure without heap , on each dimension : * - check rule 1.0 : if sumItemSizes < sumBinLoadInf or sumItemSizes > sumBinLoadSups then fail * - filter according to rule 1.1 , for each bin : sumItemSizes - ( sumBinLoadSup - sup ( binLoad ) ) < = binLoad < = sumItemSizes - ( sumBinLoadInf - inf ( binLoad ) ) * @ throws ContradictionException if a contradiction ( rules 1 ) is raised */ @ SuppressWarnings ( "squid:S3346" ) private void fixPoint ( ) throws ContradictionException { } }
boolean noFixPoint = true ; while ( noFixPoint ) { for ( int d = 0 ; d < nbDims ; d ++ ) { if ( sumISizes [ d ] > sumLoadSup [ d ] . get ( ) || sumISizes [ d ] < sumLoadInf [ d ] . get ( ) ) { fails ( ) ; } } noFixPoint = false ; for ( int d = 0 ; d < nbDims ; d ++ ) { for ( int b = 0 ; b < nbBins ; b ++ ) { assert ( loads [ d ] [ b ] . getLB ( ) >= assignedLoad [ d ] [ b ] . get ( ) && loads [ d ] [ b ] . getUB ( ) <= potentialLoad [ d ] [ b ] . get ( ) ) ; noFixPoint |= filterLoadInf ( d , b , ( int ) sumISizes [ d ] - sumLoadSup [ d ] . get ( ) + loads [ d ] [ b ] . getUB ( ) ) ; noFixPoint |= filterLoadSup ( d , b , ( int ) sumISizes [ d ] - sumLoadInf [ d ] . get ( ) + loads [ d ] [ b ] . getLB ( ) ) ; } } }
public class ApplicationProxy { /** * Constructor . */ public void init ( String strServer , String strBaseServletPath , String strRemoteApp ) { } }
super . init ( null , null ) ; m_strServer = strServer ; m_strBaseServletPath = strBaseServletPath ; m_strRemoteApp = strRemoteApp ;
public class ConfigManager { /** * Check if the config files have changed since they were last read * @ return true if the modification time of the file is greater * than that of the last successful reload , false otherwise * @ param init true when the config manager is being initialized . * false on reloads */ private boolean isConfigChanged ( boolean init ) throws IOException { } }
if ( init && ( configFileName == null || ( poolsConfigFileName == null && conf . onlyAllowConfiguredPools ( ) ) ) ) { throw new IOException ( "ClusterManager needs a config and a " + "pools file to start" ) ; } if ( configFileName == null && poolsConfigFileName == null ) { return false ; } boolean configChanged = false ; if ( configFileName != null ) { File file = new File ( configFileName ) ; configChanged |= ( file . lastModified ( ) == 0 || file . lastModified ( ) > lastSuccessfulReload ) ; } if ( poolsConfigFileName != null ) { File file = new File ( poolsConfigFileName ) ; configChanged |= ( file . lastModified ( ) == 0 || file . lastModified ( ) > lastSuccessfulReload ) ; } return configChanged ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public MediaEjectControlReserved createMediaEjectControlReservedFromString ( EDataType eDataType , String initialValue ) { } }
MediaEjectControlReserved result = MediaEjectControlReserved . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class PaxWicketPageTracker { /** * { @ inheritDoc } * Default implementation of the * { @ code ServiceTrackerCustomizer . addingService } method . * This method is only called when this < code > ServiceTracker < / code > object * has been constructed with a < code > null ServiceTrackerCustomizer < / code > * argument . * The default implementation returns the result of calling * < code > getService < / code > , on the < code > BundleContext < / code > object with * which this < code > ServiceTracker < / code > object was created , passing the * specified < code > ServiceReference < / code > object . * This method can be overridden in a subclass to customize the service * object to be tracked for the service being added . In that case , take care * not to rely on the default implementation of removedService that will * unget the service . * @ see org . osgi . util . tracker . ServiceTrackerCustomizer */ @ Override public final PageFactory < ? extends IRequestablePage > addingService ( ServiceReference < PageFactory < ? extends IRequestablePage > > reference ) { } }
PageFactory < ? extends IRequestablePage > pageSource = super . addingService ( reference ) ; paxWicketPageFactory . add ( pageSource ) ; return pageSource ;
public class NamedParameterStatement { /** * - - - - - PRIVATE METHODS */ private void processQuery ( ) throws SQLException { } }
if ( preparedStatement != null ) { return ; } if ( query == null ) { throw new RuntimeException ( "Invalid NamedParameterStatement: no query!" ) ; } logger . debug ( "Orig Query: {}" , query ) ; StringBuffer builder = new StringBuffer ( ) ; String queryByPlugin = query ; for ( INpsPlugin plugin : plugins ) { queryByPlugin = plugin . process ( queryByPlugin , params ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Apply plugin: name=[{}] query=[{}] params={}" , plugin . getClass ( ) . getSimpleName ( ) , queryByPlugin , params ) ; } } Matcher matcher = PARAM_PATTERN . matcher ( queryByPlugin ) ; int noOfParams = 0 ; while ( matcher . find ( ) ) { if ( matcher . group ( 4 ) != null ) { noOfParams ++ ; String param = matcher . group ( "PARAM" ) ; logger . debug ( "Param: {}" , param ) ; if ( ! paramsPlacement . containsKey ( param ) ) { paramsPlacement . put ( param , new ArrayList < > ( ) ) ; } paramsPlacement . get ( param ) . add ( noOfParams ) ; Object paramValue = params . get ( param ) ; StringBuilder paramReplacementBuilder = new StringBuilder ( "?" ) ; if ( paramValue != null && ( paramValue instanceof Collection || paramValue . getClass ( ) . isArray ( ) ) ) { int size = paramValue instanceof Collection ? ( ( Collection ) paramValue ) . size ( ) : ( ( Object [ ] ) paramValue ) . length ; for ( int i = 1 ; i < size ; i ++ ) { paramReplacementBuilder . append ( ",?" ) ; } } matcher . appendReplacement ( builder , paramReplacementBuilder . toString ( ) ) ; } } matcher . appendTail ( builder ) ; finalQuery = builder . toString ( ) ; logger . debug ( "Final SQL: {}" , finalQuery ) ; logger . debug ( "Number of params: {}" , noOfParams ) ; if ( ! ignoreExtraPassedParam ) { for ( String param : params . keySet ( ) ) { if ( ! paramsPlacement . containsKey ( param ) ) { throw new SQLException ( "Passed parameter not found: " + param ) ; } } } preparedStatement = connection . prepareStatement ( finalQuery ) ; if ( fetchSize != null ) { preparedStatement . setFetchSize ( fetchSize ) ; } if ( queryTimeout != null ) { preparedStatement . setQueryTimeout ( queryTimeout ) ; } if ( maxRows != null ) { preparedStatement . setMaxRows ( maxRows ) ; } builder = new StringBuffer ( ) ; matcher = PARAM_Q_MARK_PATTERN . matcher ( finalQuery ) ; int idx = 1 ; while ( matcher . find ( ) ) { if ( matcher . group ( "QMARK" ) != null ) { String replacement = String . format ( " ?%s " , idx ++ ) ; matcher . appendReplacement ( builder , replacement ) ; } } matcher . appendTail ( builder ) ; finalIndexedQuery = builder . toString ( ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcPixelTexture ( ) { } }
if ( ifcPixelTextureEClass == null ) { ifcPixelTextureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 423 ) ; } return ifcPixelTextureEClass ;
public class AbstractChainableEvent { /** * Assigns the chain identifier for the specified event to the chain identifier * value of this instance . This requires the specified event to be an instance * of ChainableEvent . * @ param event the event to chain * @ return a chained event */ private Event linkChainIdentifier ( Event event ) { } }
if ( event instanceof ChainableEvent ) { ChainableEvent chainableEvent = ( ChainableEvent ) event ; chainableEvent . setChainIdentifier ( this . getChainIdentifier ( ) ) ; return chainableEvent ; } return event ;
public class ApiOvhEmailexchange { /** * Add new mailing list member * REST : POST / email / exchange / { organizationName } / service / { exchangeService } / mailingList / { mailingListAddress } / member / contact * @ param memberAccountId [ required ] Member account id * @ param memberContactId [ required ] Member contact id * @ param organizationName [ required ] The internal name of your exchange organization * @ param exchangeService [ required ] The internal name of your exchange service * @ param mailingListAddress [ required ] The mailing list address */ public OvhTask organizationName_service_exchangeService_mailingList_mailingListAddress_member_contact_POST ( String organizationName , String exchangeService , String mailingListAddress , Long memberAccountId , Long memberContactId ) throws IOException { } }
String qPath = "/email/exchange/{organizationName}/service/{exchangeService}/mailingList/{mailingListAddress}/member/contact" ; StringBuilder sb = path ( qPath , organizationName , exchangeService , mailingListAddress ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "memberAccountId" , memberAccountId ) ; addBody ( o , "memberContactId" , memberContactId ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhTask . class ) ;
public class NodeArbitrateEvent { /** * 创建相应的node节点 , 说明 : node节点的生命周期为EPHEMERAL * < pre > * 1 . 是个同步调用 * < / pre > */ public void init ( Long nid ) { } }
String path = ManagePathUtils . getNode ( nid ) ; try { zookeeper . create ( path , new byte [ 0 ] , CreateMode . EPHEMERAL ) ; // 创建为临时节点 } catch ( ZkException e ) { throw new ArbitrateException ( "Node_init" , nid . toString ( ) , e ) ; }
public class UnifiedResponse { /** * Utility method for setting the MimeType application / force - download and set * the respective content disposition filename . * @ param sFilename * The filename to be used . * @ return this */ @ Nonnull public final UnifiedResponse setDownloadFilename ( @ Nonnull @ Nonempty final String sFilename ) { } }
setMimeType ( CMimeType . APPLICATION_FORCE_DOWNLOAD ) ; setContentDispositionFilename ( sFilename ) ; return this ;
public class BootstrapContextImpl { /** * DS method to activate this component . * Best practice : this should be a protected method , not public or private * @ param context DeclarativeService defined / populated component context * @ throws Exception if unable to start the resource adapter */ @ Trivial protected void activate ( ComponentContext context ) throws Exception { } }
Dictionary < String , ? > props = context . getProperties ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "activate" , props ) ; resourceAdapterID = ( String ) props . get ( "id" ) ; contextProviders . activate ( context ) ; bvalRef . activate ( context ) ; tranInflowManagerRef . activate ( context ) ; tranSyncRegistryRef . activate ( context ) ; jcaSecurityContextRef . activate ( context ) ; componentContext = context ; properties = props ; raMetaData = resourceAdapterSvc . getResourceAdapterMetaData ( ) ; myAppName = raMetaData != null ? raMetaData . getJ2EEName ( ) . getApplication ( ) : null ; Object svc = bvalRef . getService ( ) ; if ( svc != null ) { // Isolate and dynamic load BeanValidationHelpImpl class to avoid javax . validation bundle dependency // when beanValidation feature is not deployed . if ( System . getSecurityManager ( ) == null ) bvalHelper = ( BeanValidationHelper ) componentContext . getBundleContext ( ) . getBundle ( ) . loadClass ( "com.ibm.ws.jca.internal.BeanValidationHelperImpl" ) . newInstance ( ) ; else bvalHelper = AccessController . doPrivileged ( new PrivilegedExceptionAction < BeanValidationHelper > ( ) { @ Override public BeanValidationHelper run ( ) throws IllegalAccessException , ClassNotFoundException , InstantiationException { return ( BeanValidationHelper ) componentContext . getBundleContext ( ) . getBundle ( ) . loadClass ( "com.ibm.ws.jca.internal.BeanValidationHelperImpl" ) . newInstance ( ) ; } } ) ; bvalHelper . setBeanValidationSvc ( svc ) ; } resourceAdapter = configureResourceAdapter ( ) ; if ( resourceAdapter != null ) { propagateThreadContext = ! "(service.pid=com.ibm.ws.context.manager)" . equals ( properties . get ( "contextService.target" ) ) ; workManager = new WorkManagerImpl ( this ) ; // Normally it ' s a bad practice to do this in activate . But here we have a requirement to keep the // reference count until some subsequent processing occurs after deactivate . contextSvc = Utils . priv . getService ( componentContext , contextSvcRef ) ; jcasu = new JcaServiceUtilities ( ) ; raThreadContextDescriptor = captureRaThreadContext ( contextSvc ) ; raClassLoader = resourceAdapterSvc . getClassLoader ( ) ; raClassLoader = raClassLoader == null ? null : classLoadingSvc . createThreadContextClassLoader ( raClassLoader ) ; ArrayList < ThreadContext > threadContext = startTask ( raThreadContextDescriptor ) ; try { beginContext ( raMetaData ) ; try { ClassLoader previousClassLoader = jcasu . beginContextClassLoader ( raClassLoader ) ; try { resourceAdapter . start ( this ) ; } finally { jcasu . endContextClassLoader ( raClassLoader , previousClassLoader ) ; } } finally { endContext ( raMetaData ) ; } } finally { stopTask ( raThreadContextDescriptor , threadContext ) ; } } latches . put ( resourceAdapterID , latch ) ; // only add latch if activate is successful
public class MwRevisionImpl { /** * Resets all member fields that hold information about the revision that is * currently being processed . */ void resetCurrentRevisionData ( ) { } }
this . revisionId = NO_REVISION_ID ; // impossible as an id in MediaWiki this . parentRevisionId = NO_REVISION_ID ; this . text = null ; this . comment = null ; this . format = null ; this . timeStamp = null ; this . model = null ;
public class CodeBuilderFactory { /** * Create an injector that override the given injectors with the modules . * @ param originalInjector the original injector . * @ param modules the overriding modules . * @ return the new injector . */ public static Injector createOverridingInjector ( Injector originalInjector , com . google . inject . Module module ) { } }
final Map < Key < ? > , Binding < ? > > bindings = originalInjector . getBindings ( ) ; return Guice . createInjector ( Modules2 . mixin ( ( binder ) -> { for ( Binding < ? > binding : bindings . values ( ) ) { final Type typeLiteral = binding . getKey ( ) . getTypeLiteral ( ) . getType ( ) ; if ( typeLiteral != null ) { final String typeName = typeLiteral . getTypeName ( ) ; if ( isValid ( typeName ) ) { binding . applyTo ( binder ) ; } } }
public class LambdaDslObject { /** * Accepts any key , and each key is mapped to a map that must match the following object definition . * Note : this needs the Java system property " pact . matching . wildcard " set to value " true " when the pact file is verified . * @ param exampleKey Example key to use for generating bodies */ public LambdaDslObject eachKeyLike ( String exampleKey , Consumer < LambdaDslObject > nestedObject ) { } }
final PactDslJsonBody objectLike = object . eachKeyLike ( exampleKey ) ; final LambdaDslObject dslObject = new LambdaDslObject ( objectLike ) ; nestedObject . accept ( dslObject ) ; objectLike . closeObject ( ) ; return this ;
public class WorkspaceContainerFacade { /** * Returns current workspace state . */ public int getState ( ) { } }
boolean hasSuspendedComponents = false ; boolean hasResumedComponents = false ; List < Suspendable > suspendableComponents = getComponentInstancesOfType ( Suspendable . class ) ; for ( Suspendable component : suspendableComponents ) { if ( component . isSuspended ( ) ) { hasSuspendedComponents = true ; } else { hasResumedComponents = true ; } } if ( hasSuspendedComponents && ! hasResumedComponents ) { return ManageableRepository . SUSPENDED ; } else if ( ! hasSuspendedComponents ) { return ManageableRepository . ONLINE ; } else { return ManageableRepository . UNDEFINED ; }
public class RSAUtils { /** * Encrypt data with RSA public key . * Note : input data is divided into chunks of * { @ code size = key ' s size ( in bytes ) - paddingSizeInBytes } , so that long input data can be * encrypted . * @ param base64PublicKeyData * RSA public key in base64 ( base64 of { @ link RSAPublicKey # getEncoded ( ) } ) * @ param data * @ param cipherTransformation * cipher - transformation to use . If empty , { @ link # DEFAULT _ CIPHER _ TRANSFORMATION } * will be used * @ param paddingSizeInBytes * @ return * @ throws NoSuchAlgorithmException * @ throws InvalidKeySpecException * @ throws InvalidKeyException * @ throws NoSuchPaddingException * @ throws IllegalBlockSizeException * @ throws BadPaddingException * @ throws IOException */ public static byte [ ] encryptWithPublicKey ( String base64PublicKeyData , byte [ ] data , String cipherTransformation , int paddingSizeInBytes ) throws NoSuchAlgorithmException , InvalidKeySpecException , InvalidKeyException , NoSuchPaddingException , IllegalBlockSizeException , BadPaddingException , IOException { } }
RSAPublicKey publicKey = buildPublicKey ( base64PublicKeyData ) ; return encrypt ( publicKey , data , cipherTransformation , paddingSizeInBytes ) ;
public class ReceiveMessageRequest { /** * The name of the message attribute , where < i > N < / i > is the index . * < ul > * < li > * The name can contain alphanumeric characters and the underscore ( < code > _ < / code > ) , hyphen ( < code > - < / code > ) , and * period ( < code > . < / code > ) . * < / li > * < li > * The name is case - sensitive and must be unique among all attribute names for the message . * < / li > * < li > * The name must not start with AWS - reserved prefixes such as < code > AWS . < / code > or < code > Amazon . < / code > ( or any * casing variants ) . * < / li > * < li > * The name must not start or end with a period ( < code > . < / code > ) , and it should not have periods in succession ( * < code > . . < / code > ) . * < / li > * < li > * The name can be up to 256 characters long . * < / li > * < / ul > * When using < code > ReceiveMessage < / code > , you can send a list of attribute names to receive , or you can return all * of the attributes by specifying < code > All < / code > or < code > . * < / code > in your request . You can also use all message * attributes starting with a prefix , for example < code > bar . * < / code > . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setMessageAttributeNames ( java . util . Collection ) } or * { @ link # withMessageAttributeNames ( java . util . Collection ) } if you want to override the existing values . * @ param messageAttributeNames * The name of the message attribute , where < i > N < / i > is the index . < / p > * < ul > * < li > * The name can contain alphanumeric characters and the underscore ( < code > _ < / code > ) , hyphen ( < code > - < / code > ) , * and period ( < code > . < / code > ) . * < / li > * < li > * The name is case - sensitive and must be unique among all attribute names for the message . * < / li > * < li > * The name must not start with AWS - reserved prefixes such as < code > AWS . < / code > or < code > Amazon . < / code > ( or * any casing variants ) . * < / li > * < li > * The name must not start or end with a period ( < code > . < / code > ) , and it should not have periods in * succession ( < code > . . < / code > ) . * < / li > * < li > * The name can be up to 256 characters long . * < / li > * < / ul > * When using < code > ReceiveMessage < / code > , you can send a list of attribute names to receive , or you can * return all of the attributes by specifying < code > All < / code > or < code > . * < / code > in your request . You can * also use all message attributes starting with a prefix , for example < code > bar . * < / code > . * @ return Returns a reference to this object so that method calls can be chained together . */ public ReceiveMessageRequest withMessageAttributeNames ( String ... messageAttributeNames ) { } }
if ( this . messageAttributeNames == null ) { setMessageAttributeNames ( new com . amazonaws . internal . SdkInternalList < String > ( messageAttributeNames . length ) ) ; } for ( String ele : messageAttributeNames ) { this . messageAttributeNames . add ( ele ) ; } return this ;
public class Metadata { /** * Gets a metadata value , returning the appropriate value when multiple are found . * @ param name the name of the metadata . * @ return the tiff object with the value of the metadata . */ public TiffObject get ( String name ) { } }
TiffObject result = null ; String container = null ; ArrayList < TiffObject > found = new ArrayList < > ( ) ; if ( contains ( name ) ) { // Find objects with this exact name if ( metadata . get ( name ) . getObjectList ( ) . size ( ) == 1 ) { found . add ( getFirst ( name ) ) ; } else { for ( TiffObject to : metadata . get ( name ) . getObjectList ( ) ) { found . add ( to ) ; } } } else { // Find objects with similar or equivalent name for ( String key : metadata . keySet ( ) ) { boolean similar = key . toLowerCase ( ) . equals ( name . toLowerCase ( ) ) ; if ( ! similar ) similar = name . toLowerCase ( ) . equals ( "date" ) && key . toLowerCase ( ) . equals ( "datetime" ) ; if ( ! similar ) similar = name . toLowerCase ( ) . equals ( "date" ) && key . toLowerCase ( ) . equals ( "creatordate" ) ; if ( ! similar ) similar = name . toLowerCase ( ) . equals ( "description" ) && key . toLowerCase ( ) . equals ( "imagedescription" ) ; if ( ! similar ) similar = name . toLowerCase ( ) . equals ( "creator" ) && key . toLowerCase ( ) . equals ( "artist" ) ; if ( ! similar ) similar = name . toLowerCase ( ) . equals ( "creator" ) && key . toLowerCase ( ) . equals ( "creatortool" ) ; if ( similar ) { for ( TiffObject to : metadata . get ( key ) . getObjectList ( ) ) { found . add ( to ) ; } } } } // Return the most prioritary result if ( found . size ( ) == 1 ) { result = found . get ( 0 ) ; } else { for ( TiffObject to : found ) { if ( result == null ) { result = to ; container = to . getContainer ( ) ; } else if ( to . getContainer ( ) != null ) { // Preferences in ( descending ) order : EXIF , XMP , IPTC , Tiff tag if ( container == null || to . getContainer ( ) . equals ( "EXIF" ) || ( to . getContainer ( ) . equals ( "XMP" ) && container . equals ( "IPTC" ) ) ) { result = to ; container = to . getContainer ( ) ; } } } } return result ;
public class CodeVisitor { /** * Sets a short value . */ public void setShort ( int offset , int value ) { } }
byte [ ] code = getCode ( ) ; code [ offset + 0 ] = ( byte ) ( value >> 8 ) ; code [ offset + 1 ] = ( byte ) ( value ) ;
public class BufferedISPNCache { /** * { @ inheritDoc } */ public Object putIfAbsent ( CacheKey key , Object value , long lifespan , TimeUnit unit ) { } }
return parentCache . putIfAbsent ( key , value , lifespan , unit ) ;
public class DateParameter { /** * Write data to socket in binary format . * @ param pos socket output stream * @ throws IOException if socket error occur */ public void writeBinary ( final PacketOutputStream pos ) throws IOException { } }
Calendar calendar = Calendar . getInstance ( timeZone ) ; calendar . setTimeInMillis ( date . getTime ( ) ) ; pos . write ( ( byte ) 7 ) ; // length pos . writeShort ( ( short ) calendar . get ( Calendar . YEAR ) ) ; pos . write ( ( byte ) ( ( calendar . get ( Calendar . MONTH ) + 1 ) & 0xff ) ) ; pos . write ( ( byte ) ( calendar . get ( Calendar . DAY_OF_MONTH ) & 0xff ) ) ; pos . write ( ( byte ) 0 ) ; pos . write ( ( byte ) 0 ) ; pos . write ( ( byte ) 0 ) ;
public class ZookeeperPathUtils { /** * 客户端当前正在工作的running节点 */ public static String getDestinationClientRunning ( String destination , short clientId ) { } }
return MessageFormat . format ( DESTINATION_CLIENTID_RUNNING_NODE , destination , String . valueOf ( clientId ) ) ;
public class MetadataService { /** * Restores a { @ link ProjectMetadata } whose name equals to the specified { @ code projectName } . */ public CompletableFuture < Revision > restoreProject ( Author author , String projectName ) { } }
requireNonNull ( author , "author" ) ; requireNonNull ( projectName , "projectName" ) ; final Change < JsonNode > change = Change . ofJsonPatch ( METADATA_JSON , new RemoveOperation ( PROJECT_REMOVAL ) . toJsonNode ( ) ) ; return metadataRepo . push ( projectName , Project . REPO_DOGMA , author , "Restore the project: " + projectName , change ) ;
public class Tools { /** * LibO shortens menu items with more than ~ 100 characters by dropping text in the middle . * That isn ' t really sensible , so we shorten the text here in order to preserve the important parts . */ public static String shortenComment ( String comment ) { } }
int maxCommentLength = 100 ; String shortComment = comment ; if ( shortComment . length ( ) > maxCommentLength ) { // if there is text in brackets , drop it ( beginning at the end ) while ( shortComment . lastIndexOf ( " [" ) > 0 && shortComment . lastIndexOf ( ']' ) > shortComment . lastIndexOf ( " [" ) && shortComment . length ( ) > maxCommentLength ) { shortComment = shortComment . substring ( 0 , shortComment . lastIndexOf ( " [" ) ) + shortComment . substring ( shortComment . lastIndexOf ( ']' ) + 1 ) ; } while ( shortComment . lastIndexOf ( " (" ) > 0 && shortComment . lastIndexOf ( ')' ) > shortComment . lastIndexOf ( " (" ) && shortComment . length ( ) > maxCommentLength ) { shortComment = shortComment . substring ( 0 , shortComment . lastIndexOf ( " (" ) ) + shortComment . substring ( shortComment . lastIndexOf ( ')' ) + 1 ) ; } // in case it ' s still not short enough , shorten at the end if ( shortComment . length ( ) > maxCommentLength ) { shortComment = shortComment . substring ( 0 , maxCommentLength - 1 ) + "…" ; } } return shortComment ;
public class CassandraExtractor { /** * Returns a list of hosts on which the given split resides . */ @ Override public List < String > getPreferredLocations ( Partition tokenRange ) { } }
return ( ( DeepPartition ) tokenRange ) . splitWrapper ( ) . getReplicas ( ) ;
public class HtmlEscape { /** * Perform an HTML 4 level 2 ( result is ASCII ) < strong > escape < / strong > operation on a < tt > Reader < / tt > input , * writing results to a < tt > Writer < / tt > . * < em > Level 2 < / em > means this method will escape : * < ul > * < li > The five markup - significant characters : < tt > & lt ; < / tt > , < tt > & gt ; < / tt > , < tt > & amp ; < / tt > , * < tt > & quot ; < / tt > and < tt > & # 39 ; < / tt > < / li > * < li > All non ASCII characters . < / li > * < / ul > * This escape will be performed by replacing those chars by the corresponding HTML 4 Named Character References * ( e . g . < tt > ' & amp ; acute ; ' < / tt > ) when such NCR exists for the replaced character , and replacing by a decimal * character reference ( e . g . < tt > ' & amp ; # 8345 ; ' < / tt > ) when there there is no NCR for the replaced character . * This method calls { @ link # escapeHtml ( Reader , Writer , HtmlEscapeType , HtmlEscapeLevel ) } with the following * preconfigured values : * < ul > * < li > < tt > type < / tt > : * { @ link org . unbescape . html . HtmlEscapeType # HTML4 _ NAMED _ REFERENCES _ DEFAULT _ TO _ DECIMAL } < / li > * < li > < tt > level < / tt > : * { @ link org . unbescape . html . HtmlEscapeLevel # LEVEL _ 2 _ ALL _ NON _ ASCII _ PLUS _ MARKUP _ SIGNIFICANT } < / li > * < / ul > * This method is < strong > thread - safe < / strong > . * @ param reader the < tt > Reader < / tt > reading the text to be escaped . * @ param writer the < tt > java . io . Writer < / tt > to which the escaped result will be written . Nothing will * be written at all to this writer if input is < tt > null < / tt > . * @ throws IOException if an input / output exception occurs * @ since 1.1.2 */ public static void escapeHtml4 ( final Reader reader , final Writer writer ) throws IOException { } }
escapeHtml ( reader , writer , HtmlEscapeType . HTML4_NAMED_REFERENCES_DEFAULT_TO_DECIMAL , HtmlEscapeLevel . LEVEL_2_ALL_NON_ASCII_PLUS_MARKUP_SIGNIFICANT ) ;
public class WebAppSecurityConfigImpl { /** * { @ inheritDoc } */ @ Override public boolean getAllowFailOverToFormLogin ( ) { } }
if ( allowFailOverToAuthMethod != null && allowFailOverToAuthMethod . equalsIgnoreCase ( LoginConfiguration . FORM ) ) return true ; else return false ;
public class xHelmNotationParser { /** * Extracts the HELM string from the root node of the XHELM document * @ param rootElement root element * @ return the complex notation string */ public static String getHELMNotationString ( Element rootElement ) { } }
Element helmNotationElement = rootElement . getChild ( "HelmNotation" ) ; return helmNotationElement . getText ( ) ;
public class PollingThread { /** * Creates a request packet for the specified action . * @ param action Action to perform . * @ param session Session receiving the request . * @ return The fully formed request . */ private Request getRequest ( Action action , BrokerSession session ) { } }
switch ( action ) { case QUERY : if ( query == null ) { query = new Request ( action ) ; query . addParameter ( "UID" , session . getId ( ) ) ; } return query ; case PING : if ( ping == null ) { ping = new Request ( action ) ; } return ping ; default : return null ; }
public class CPFriendlyURLEntryUtil { /** * Returns the cp friendly url entry where groupId = & # 63 ; and classNameId = & # 63 ; and classPK = & # 63 ; and languageId = & # 63 ; and urlTitle = & # 63 ; or returns < code > null < / code > if it could not be found . Uses the finder cache . * @ param groupId the group ID * @ param classNameId the class name ID * @ param classPK the class pk * @ param languageId the language ID * @ param urlTitle the url title * @ return the matching cp friendly url entry , or < code > null < / code > if a matching cp friendly url entry could not be found */ public static CPFriendlyURLEntry fetchByG_C_C_L_U ( long groupId , long classNameId , long classPK , String languageId , String urlTitle ) { } }
return getPersistence ( ) . fetchByG_C_C_L_U ( groupId , classNameId , classPK , languageId , urlTitle ) ;
public class ClassLoaderUtil { /** * Check whether a classloader can load the given class . */ @ SuppressWarnings ( "unchecked" ) public static < T > boolean hasClass ( Class < T > type , ClassLoader classloader ) { } }
try { loadClass ( type . getName ( ) , classloader ) ; return true ; } catch ( RuntimeException e ) { if ( e . getCause ( ) instanceof ClassNotFoundException ) { return false ; } throw e ; }
public class MarkLogicClientImpl { /** * executes INSERT of single triple * @ param baseURI * @ param subject * @ param predicate * @ param object * @ param tx * @ param contexts * @ throws MarkLogicSesameException */ public void performAdd ( String baseURI , Resource subject , URI predicate , Value object , Transaction tx , Resource ... contexts ) throws MarkLogicSesameException { } }
StringBuilder sb = new StringBuilder ( ) ; if ( notNull ( contexts ) && contexts . length > 0 ) { if ( notNull ( baseURI ) ) sb . append ( "BASE <" + baseURI + ">\n" ) ; sb . append ( "INSERT DATA { " ) ; for ( int i = 0 ; i < contexts . length ; i ++ ) { if ( notNull ( contexts [ i ] ) ) { sb . append ( "GRAPH <" + contexts [ i ] . stringValue ( ) + "> { ?s ?p ?o .} " ) ; } else { sb . append ( "GRAPH <" + DEFAULT_GRAPH_URI + "> { ?s ?p ?o .} " ) ; } } sb . append ( "}" ) ; } else { sb . append ( "INSERT DATA { GRAPH <" + DEFAULT_GRAPH_URI + "> {?s ?p ?o .}}" ) ; } SPARQLQueryDefinition qdef = sparqlManager . newQueryDefinition ( sb . toString ( ) ) ; if ( notNull ( ruleset ) ) { qdef . setRulesets ( ruleset ) ; } if ( notNull ( graphPerms ) ) { qdef . setUpdatePermissions ( graphPerms ) ; } if ( notNull ( baseURI ) && ! baseURI . isEmpty ( ) ) { qdef . setBaseUri ( baseURI ) ; } if ( notNull ( subject ) ) qdef . withBinding ( "s" , subject . stringValue ( ) ) ; if ( notNull ( predicate ) ) qdef . withBinding ( "p" , predicate . stringValue ( ) ) ; if ( notNull ( object ) ) bindObject ( qdef , "o" , object ) ; sparqlManager . executeUpdate ( qdef , tx ) ;
public class AbstractJTACrud { /** * Deletes the given entity instance ( specified by its id ) . The instance is removed from the persistent layer . * @ param id the id */ @ Override public void delete ( final I id ) { } }
if ( entity != null ) { inTransaction ( new Callable < Void > ( ) { @ Override public Void call ( ) throws Exception { final T entity = findOne ( id ) ; T attached = getAttached ( entity ) ; if ( attached != null ) { entityManager . remove ( attached ) ; } return null ; } } ) ; }
public class DescribeAccountAttributesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeAccountAttributesRequest describeAccountAttributesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeAccountAttributesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Validate { /** * Method without varargs to increase performance */ public static < T > void isNull ( final T object , final String message ) { } }
INSTANCE . isNull ( object , message ) ;
public class BitUtil { /** * Is an address aligned on a boundary . * @ param address to be tested . * @ param alignment boundary the address is tested against . * @ return true if the address is on the aligned boundary otherwise false . * @ throws IllegalArgumentException if the alignment is not a power of 2. */ public static boolean isAligned ( final long address , final int alignment ) { } }
if ( ! BitUtil . isPowerOfTwo ( alignment ) ) { throw new IllegalArgumentException ( "alignment must be a power of 2: alignment=" + alignment ) ; } return ( address & ( alignment - 1 ) ) == 0 ;
public class Token { /** * Escape given string so that tokenize ( escapeString ( str ) ) . get ( 0 ) . getString = = = str . * @ param input String to be escaped * @ return escaped string */ public static String escapeString ( String input ) { } }
StringBuilder escaped = new StringBuilder ( input . length ( ) + 10 ) ; escaped . append ( '"' ) ; for ( int i = 0 ; i < input . length ( ) ; i ++ ) { if ( input . charAt ( i ) == '"' ) { escaped . append ( "\"\"" ) ; } else { escaped . append ( input . charAt ( i ) ) ; } } escaped . append ( '"' ) ; return escaped . toString ( ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getExternalAlgorithmALGTYPE ( ) { } }
if ( externalAlgorithmALGTYPEEEnum == null ) { externalAlgorithmALGTYPEEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 120 ) ; } return externalAlgorithmALGTYPEEEnum ;
public class CmsGalleryService { /** * Gets an initialized CmsObject to be used for the actual search for a given search bean . < p > * @ param searchObj the search object * @ return the initialized CmsObject * @ throws CmsException if something goes wrong */ protected CmsObject getSearchCms ( CmsGallerySearchBean searchObj ) throws CmsException { } }
CmsObject searchCms = getCmsObject ( ) ; if ( searchObj . isIncludeExpired ( ) ) { searchCms = OpenCms . initCmsObject ( getCmsObject ( ) ) ; searchCms . getRequestContext ( ) . setRequestTime ( CmsResource . DATE_RELEASED_EXPIRED_IGNORE ) ; } return searchCms ;
public class Album { /** * Preview Album . */ public static BasicGalleryWrapper < GalleryAlbumWrapper , AlbumFile , String , AlbumFile > galleryAlbum ( Activity activity ) { } }
return new GalleryAlbumWrapper ( activity ) ;
public class FilterValues { /** * Returns a new FilterValues instance with the next blank parameters filled in . * @ param values parameter values to fill in ; if null or empty , this * FilterValues instance is returned * @ throws IllegalStateException if no blank parameters or if too many * parameter values supplied * @ throws IllegalArgumentException if type doesn ' t match */ public FilterValues < S > withValues ( Object ... values ) { } }
if ( values == null ) { return this ; } if ( values . length > getBlankParameterCount ( ) ) { throw new IllegalStateException ( "Too many values supplied" ) ; } FilterValues < S > filterValues = this ; for ( Object value : values ) { filterValues = filterValues . with ( value ) ; } return filterValues ;
public class N { /** * Returns an immutable empty map if the specified Map is < code > null < / code > , otherwise itself is returned . * @ param map * @ return */ public static < K , V > Map < K , V > nullToEmpty ( final Map < K , V > map ) { } }
return map == null ? N . < K , V > emptyMap ( ) : map ;
public class OcAgentTraceExporterHandler { /** * One stub can be used for both Export RPC and Config RPC . */ private static TraceServiceGrpc . TraceServiceStub getTraceServiceStub ( String endPoint , Boolean useInsecure , SslContext sslContext ) { } }
ManagedChannelBuilder < ? > channelBuilder ; if ( useInsecure ) { channelBuilder = ManagedChannelBuilder . forTarget ( endPoint ) . usePlaintext ( ) ; } else { channelBuilder = NettyChannelBuilder . forTarget ( endPoint ) . negotiationType ( NegotiationType . TLS ) . sslContext ( sslContext ) ; } ManagedChannel channel = channelBuilder . build ( ) ; return TraceServiceGrpc . newStub ( channel ) ;
public class BlockSelectorMarkupHandler { /** * DOCTYPE Clause events */ @ Override public void handleDocType ( final char [ ] buffer , final int keywordOffset , final int keywordLen , final int keywordLine , final int keywordCol , final int elementNameOffset , final int elementNameLen , final int elementNameLine , final int elementNameCol , final int typeOffset , final int typeLen , final int typeLine , final int typeCol , final int publicIdOffset , final int publicIdLen , final int publicIdLine , final int publicIdCol , final int systemIdOffset , final int systemIdLen , final int systemIdLine , final int systemIdCol , final int internalSubsetOffset , final int internalSubsetLen , final int internalSubsetLine , final int internalSubsetCol , final int outerOffset , final int outerLen , final int outerLine , final int outerCol ) throws ParseException { } }
if ( ! this . insideAllSelectorMatchingBlock ) { this . someSelectorsMatch = false ; for ( int i = 0 ; i < this . selectorsLen ; i ++ ) { if ( this . matchingMarkupLevelsPerSelector [ i ] > this . markupLevel ) { this . selectorMatches [ i ] = this . selectorFilters [ i ] . matchDocTypeClause ( true , this . markupLevel , this . markupBlocks [ this . markupLevel ] ) ; if ( this . selectorMatches [ i ] ) { this . someSelectorsMatch = true ; } } else { this . selectorMatches [ i ] = true ; this . someSelectorsMatch = true ; } } if ( this . someSelectorsMatch ) { markCurrentSelection ( ) ; this . selectedHandler . handleDocType ( buffer , keywordOffset , keywordLen , keywordLine , keywordCol , elementNameOffset , elementNameLen , elementNameLine , elementNameCol , typeOffset , typeLen , typeLine , typeCol , publicIdOffset , publicIdLen , publicIdLine , publicIdCol , systemIdOffset , systemIdLen , systemIdLine , systemIdCol , internalSubsetOffset , internalSubsetLen , internalSubsetLine , internalSubsetCol , outerOffset , outerLen , outerLine , outerCol ) ; unmarkCurrentSelection ( ) ; return ; } unmarkCurrentSelection ( ) ; this . nonSelectedHandler . handleDocType ( buffer , keywordOffset , keywordLen , keywordLine , keywordCol , elementNameOffset , elementNameLen , elementNameLine , elementNameCol , typeOffset , typeLen , typeLine , typeCol , publicIdOffset , publicIdLen , publicIdLine , publicIdCol , systemIdOffset , systemIdLen , systemIdLine , systemIdCol , internalSubsetOffset , internalSubsetLen , internalSubsetLine , internalSubsetCol , outerOffset , outerLen , outerLine , outerCol ) ; return ; } markCurrentSelection ( ) ; this . selectedHandler . handleDocType ( buffer , keywordOffset , keywordLen , keywordLine , keywordCol , elementNameOffset , elementNameLen , elementNameLine , elementNameCol , typeOffset , typeLen , typeLine , typeCol , publicIdOffset , publicIdLen , publicIdLine , publicIdCol , systemIdOffset , systemIdLen , systemIdLine , systemIdCol , internalSubsetOffset , internalSubsetLen , internalSubsetLine , internalSubsetCol , outerOffset , outerLen , outerLine , outerCol ) ; unmarkCurrentSelection ( ) ;
public class Channel { /** * Shutdown the channel with all resources released . * @ param force force immediate shutdown . */ public synchronized void shutdown ( boolean force ) { } }
if ( shutdown ) { return ; } String ltransactionListenerProcessorHandle = transactionListenerProcessorHandle ; transactionListenerProcessorHandle = null ; if ( null != ltransactionListenerProcessorHandle ) { try { unregisterBlockListener ( ltransactionListenerProcessorHandle ) ; } catch ( Exception e ) { logger . error ( format ( "Shutting down channel %s transactionListenerProcessorHandle" , name ) , e ) ; } } String lchaincodeEventUpgradeListenerHandle = chaincodeEventUpgradeListenerHandle ; chaincodeEventUpgradeListenerHandle = null ; if ( null != lchaincodeEventUpgradeListenerHandle ) { try { unregisterChaincodeEventListener ( lchaincodeEventUpgradeListenerHandle ) ; } catch ( Exception e ) { logger . error ( format ( "Shutting down channel %s chaincodeEventUpgradeListenr" , name ) , e ) ; } } initialized = false ; shutdown = true ; final ServiceDiscovery lserviceDiscovery = serviceDiscovery ; serviceDiscovery = null ; if ( null != lserviceDiscovery ) { lserviceDiscovery . shutdown ( ) ; } if ( chainCodeListeners != null ) { chainCodeListeners . clear ( ) ; } if ( blockListeners != null ) { blockListeners . clear ( ) ; } if ( client != null ) { client . removeChannel ( this ) ; } client = null ; for ( Peer peer : new ArrayList < > ( getPeers ( ) ) ) { try { removePeerInternal ( peer ) ; peer . shutdown ( force ) ; } catch ( Exception e ) { // Best effort . } } peers . clear ( ) ; // make sure . peerEndpointMap . clear ( ) ; ordererEndpointMap . clear ( ) ; // Make sure for ( Set < Peer > peerRoleSet : peerRoleSetMap . values ( ) ) { peerRoleSet . clear ( ) ; } for ( Orderer orderer : getOrderers ( ) ) { orderer . shutdown ( force ) ; } orderers . clear ( ) ; if ( null != eventQueueThread ) { eventQueueThread . interrupt ( ) ; eventQueueThread = null ; } ScheduledFuture < ? > lsweeper = sweeper ; sweeper = null ; if ( null != lsweeper ) { lsweeper . cancel ( true ) ; } ScheduledExecutorService lse = sweeperExecutorService ; sweeperExecutorService = null ; if ( null != lse ) { lse . shutdownNow ( ) ; }
public class StringParser { /** * Parse the given { @ link Object } as { @ link Integer } with radix * { @ value # DEFAULT _ RADIX } . * @ param aObject * The object to parse . May be < code > null < / code > . * @ return < code > null < / code > if the object does not represent a valid value . */ @ Nullable public static Integer parseIntObj ( @ Nullable final Object aObject ) { } }
return parseIntObj ( aObject , DEFAULT_RADIX , null ) ;
public class AbstractInterval { /** * Does this time interval contain the specified millisecond instant . * Non - zero duration intervals are inclusive of the start instant and * exclusive of the end . A zero duration interval cannot contain anything . * @ param millisInstant the instant to compare to , * millisecond instant from 1970-01-01T00:00:00Z * @ return true if this time interval contains the millisecond */ public boolean contains ( long millisInstant ) { } }
long thisStart = getStartMillis ( ) ; long thisEnd = getEndMillis ( ) ; return ( millisInstant >= thisStart && millisInstant < thisEnd ) ;
public class DateUtil { /** * 偏移天 * @ param date 日期 * @ param offset 偏移天数 , 正数向未来偏移 , 负数向历史偏移 * @ return 偏移后的日期 */ public static DateTime offsetDay ( Date date , int offset ) { } }
return offset ( date , DateField . DAY_OF_YEAR , offset ) ;
public class URLHelper { /** * Get the final representation of the URL using the specified elements . * @ param sPath * The main path . May be < code > null < / code > . * @ param sQueryParams * The set of all query parameters already concatenated with the * correct characters ( & amp ; and = ) . May be < code > null < / code > . * @ param sAnchor * An optional anchor to be added . May be < code > null < / code > . * @ return May be < code > null < / code > if path , anchor and parameters are * < code > null < / code > . */ @ Nullable public static String getURLString ( @ Nullable final String sPath , @ Nullable final String sQueryParams , @ Nullable final String sAnchor ) { } }
final boolean bHasPath = StringHelper . hasText ( sPath ) ; final boolean bHasQueryParams = StringHelper . hasText ( sQueryParams ) ; final boolean bHasAnchor = StringHelper . hasText ( sAnchor ) ; if ( GlobalDebug . isDebugMode ( ) ) { // Consistency checks if ( bHasPath ) { if ( sPath . contains ( QUESTIONMARK_STR ) ) if ( LOGGER . isWarnEnabled ( ) ) LOGGER . warn ( "Path contains the question mark ('?') character: '" + sPath + "'" ) ; if ( sPath . contains ( AMPERSAND_STR ) ) if ( LOGGER . isWarnEnabled ( ) ) LOGGER . warn ( "Path contains the ampersand ('&') character: '" + sPath + "'" ) ; if ( sPath . contains ( HASH_STR ) ) if ( LOGGER . isWarnEnabled ( ) ) LOGGER . warn ( "Path contains the hash ('#') character: '" + sPath + "'" ) ; } if ( bHasQueryParams ) { if ( sQueryParams . contains ( QUESTIONMARK_STR ) ) if ( LOGGER . isWarnEnabled ( ) ) LOGGER . warn ( "Query parameters contain the question mark ('?') character: '" + sQueryParams + "'" ) ; } if ( bHasAnchor ) { if ( sAnchor . contains ( HASH_STR ) ) if ( LOGGER . isWarnEnabled ( ) ) LOGGER . warn ( "Anchor contains the hash ('#') character: '" + sAnchor + "'" ) ; } } // return URL as is ? if ( ! bHasQueryParams && ! bHasAnchor ) { // Return URL as is ( may be null ) return sPath ; } final StringBuilder aSB = new StringBuilder ( ) ; if ( bHasPath ) aSB . append ( sPath ) ; if ( bHasQueryParams ) { final boolean bHasQuestionMark = aSB . indexOf ( QUESTIONMARK_STR ) >= 0 ; if ( bHasQuestionMark ) { // Only if the " ? " is not the last char otherwise the base href already // contains a parameter ! final char cLast = StringHelper . getLastChar ( aSB ) ; if ( cLast != QUESTIONMARK && cLast != AMPERSAND ) aSB . append ( AMPERSAND ) ; } else { // First parameter aSB . append ( QUESTIONMARK ) ; } // add all parameters aSB . append ( sQueryParams ) ; } // Append anchor if ( bHasAnchor ) { if ( StringHelper . getLastChar ( aSB ) != HASH ) aSB . append ( HASH ) ; aSB . append ( sAnchor ) ; } // Avoid empty URLs if ( aSB . length ( ) == 0 ) return QUESTIONMARK_STR ; return aSB . toString ( ) ;
public class Quint { /** * < p > next . < / p > * @ param t1 a T1 object . * @ param t2 a T2 object . * @ param t3 a T3 object . * @ param t4 a T4 object . * @ param t5 a T5 object . * @ param < T1 > a T1 object . * @ param < T2 > a T2 object . * @ param < T3 > a T3 object . * @ param < T4 > a T4 object . * @ param < T5 > a T5 object . * @ return a { @ link org . rapidpm . frp . model . serial . Quint } object . */ public static < T1 extends Serializable , T2 extends Serializable , T3 extends Serializable , T4 extends Serializable , T5 extends Serializable > Quint < T1 , T2 , T3 , T4 , T5 > next ( final T1 t1 , final T2 t2 , final T3 t3 , final T4 t4 , final T5 t5 ) { } }
return new Quint < > ( t1 , t2 , t3 , t4 , t5 ) ;
public class ChangesLog { /** * Returns workspace changed size accumulated during * some period . */ public long getWorkspaceChangedSize ( ) { } }
long wsDelta = 0 ; Iterator < ChangesItem > changes = iterator ( ) ; while ( changes . hasNext ( ) ) { wsDelta += changes . next ( ) . getWorkspaceChangedSize ( ) ; } return wsDelta ;
public class IncrementalIndexRow { /** * bytesInMemory estimates the size of IncrementalIndexRow key , it takes into account the timestamp ( long ) , * dims ( Object Array ) and dimensionDescsList ( List ) . Each of these are calculated as follows : * < ul > * < li > timestamp : Long . BYTES * < li > dims array : Integer . BYTES * array length + Long . BYTES ( dims object ) + dimsKeySize ( passed via constructor ) * < li > dimensionDescList : Long . BYTES ( shared pointer ) * < li > dimsKeySize : this value is passed in based on the key type ( int , long , double , String etc . ) * < / ul > * @ return long estimated bytesInMemory */ public long estimateBytesInMemory ( ) { } }
long sizeInBytes = Long . BYTES + Integer . BYTES * dims . length + Long . BYTES + Long . BYTES ; sizeInBytes += dimsKeySize ; return sizeInBytes ;
public class ParameterFinder { /** * Test . * @ param args */ public static void main ( String [ ] args ) { } }
HashMap < String , String > props = new HashMap < > ( ) ; props . put ( "Params_1.TAN2StepParams3.ParTAN2Step4.TAN2StepParams2.secfunc" , "Test 1" ) ; props . put ( "Params_2.TAN2StepParams3.ParTAN2Step4.TAN2StepParams2.1secfunc" , "Test 2" ) ; props . put ( "Params_1.PIN2StepParams3.ParTAN2Step4.TAN2StepParams2.2secfunc" , "Test 3" ) ; props . put ( "Params_1.TANStepParams3.ParTAN2Step4.TAN2StepParams2.3secfunc" , "Test 4" ) ; props . put ( "Params_1.TAN2StepParams3.ParTAN2Step4.TAN2StepParams2.Foo" , "Test 5" ) ; props . put ( "Params_2.TAN2StepPar.ParTAN2Step.TAN2StepParams.5secfunc" , "Test 5" ) ; HashMap < String , String > result = find ( props , "Params_*.TAN2StepPar*.ParTAN2Step*.TAN2StepParams*.*secfunc" ) ; Iterator < String > e = result . keySet ( ) . iterator ( ) ; while ( e . hasNext ( ) ) { String name = e . next ( ) ; String value = result . get ( name ) ; System . out . println ( name + ": " + value ) ; }
public class BoundedBuffer { /** * Inserts an object into the buffer . Note that * since there is no synchronization , it is assumed * that this is done outside the scope of this call . */ private final void insert ( T t ) { } }
buffer [ putIndex ] = t ; if ( ++ putIndex >= buffer . length ) { putIndex = 0 ; }
public class CPSpecificationOptionPersistenceImpl { /** * Caches the cp specification option in the entity cache if it is enabled . * @ param cpSpecificationOption the cp specification option */ @ Override public void cacheResult ( CPSpecificationOption cpSpecificationOption ) { } }
entityCache . putResult ( CPSpecificationOptionModelImpl . ENTITY_CACHE_ENABLED , CPSpecificationOptionImpl . class , cpSpecificationOption . getPrimaryKey ( ) , cpSpecificationOption ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_UUID_G , new Object [ ] { cpSpecificationOption . getUuid ( ) , cpSpecificationOption . getGroupId ( ) } , cpSpecificationOption ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_G_K , new Object [ ] { cpSpecificationOption . getGroupId ( ) , cpSpecificationOption . getKey ( ) } , cpSpecificationOption ) ; cpSpecificationOption . resetOriginalValues ( ) ;
public class TwoDropDownChoicesBean { /** * Gets the child choices . * @ return the child choices */ public List < T > getChildChoices ( ) { } }
final List < T > childChoices = getModelsMap ( ) . get ( getSelectedRootOption ( ) ) ; if ( CollectionExtensions . isEmpty ( childChoices ) ) { return Collections . emptyList ( ) ; } return childChoices ;
public class HashBasedHipsterGraph { /** * Returns a list of the edges in the graph . * @ return edges of the graph . */ @ Override public Iterable < GraphEdge < V , E > > edges ( ) { } }
return F . map ( vedges ( ) , new Function < Map . Entry < V , GraphEdge < V , E > > , GraphEdge < V , E > > ( ) { @ Override public GraphEdge < V , E > apply ( Map . Entry < V , GraphEdge < V , E > > entry ) { return entry . getValue ( ) ; } } ) ;
public class MetricsNames { /** * Create an MetricKey object based on metric name , metric type and tags associated . * The MetricKey object contains cache key for cache lookup and registry key for registry lookup . * @ param metric the metric name . * @ param tags the tag ( s ) associated with the metric . * @ return the MetricKey object contains cache lookup key and metric registry key . */ public static MetricKey metricKey ( String metric , String ... tags ) { } }
if ( tags == null || tags . length == 0 ) { // if no tags supplied , the original metric name is used for both cache key and registry key . return new MetricKey ( metric , metric ) ; } else { // if tag is supplied , append tag value to form cache key ; original metric name is registry key . StringBuilder sb = new StringBuilder ( metric ) ; Preconditions . checkArgument ( ( tags . length % 2 ) == 0 , "Tags is a set of key/value pair so the size must be even: %s" , tags . length ) ; for ( int i = 0 ; i < tags . length ; i += 2 ) { Preconditions . checkArgument ( ! Strings . isNullOrEmpty ( tags [ i ] ) || ! Strings . isNullOrEmpty ( tags [ i + 1 ] ) , "Tag name or value cannot be empty or null" ) ; sb . append ( '.' ) . append ( tags [ i + 1 ] ) ; } return new MetricKey ( sb . toString ( ) , metric ) ; }
public class Headers { /** * Indicates if a header exists with the given name * @ param name The header name * @ return true if a header exists with the given name , false otherwise */ public boolean contains ( String name ) { } }
for ( Header header : headers ) { if ( header . getName ( ) . equals ( name ) ) { return true ; } } return false ;
public class EntryUtility { /** * Gets the entry ID based on the specified entry file name . */ public static long getEntryId ( String entryFileName ) { } }
int ind1 = entryFileName . indexOf ( "_" ) + 1 ; if ( ind1 > 0 ) { int ind2 = entryFileName . indexOf ( "_" , ind1 ) ; if ( ind2 > ind1 ) { String str = entryFileName . substring ( ind1 , ind2 ) ; try { return Long . parseLong ( str ) ; } catch ( Exception e ) { } } } return 0 ;
public class CollectionValuedMap { /** * Replaces current Collection mapped to key with the specified Collection . * Use carefully ! */ public Collection < V > put ( K key , Collection < V > collection ) { } }
return map . put ( key , collection ) ;
public class CmsVfsService { /** * Helper method for converting a map which maps resources to resources to a list of " broken link " beans , * which have beans representing the source of the corresponding link as children . < p > * @ param linkMap a multimap from resource to resources * @ return a list of beans representing links which will be broken * @ throws CmsException if something goes wrong */ private List < CmsBrokenLinkBean > getBrokenLinkBeans ( Multimap < CmsResource , CmsResource > linkMap ) throws CmsException { } }
CmsBrokenLinkRenderer brokenLinkRenderer = new CmsBrokenLinkRenderer ( getCmsObject ( ) ) ; Multimap < CmsBrokenLinkBean , CmsBrokenLinkBean > resultMap = HashMultimap . create ( ) ; for ( CmsResource source : linkMap . keySet ( ) ) { for ( CmsResource target : linkMap . get ( source ) ) { CmsBrokenLinkBean targetBean = createSitemapBrokenLinkBean ( target ) ; addBrokenLinkAdditionalInfo ( getCmsObject ( ) , target , targetBean ) ; List < CmsBrokenLinkBean > brokenLinkBeans = brokenLinkRenderer . renderBrokenLink ( target , source ) ; for ( CmsBrokenLinkBean childBean : brokenLinkBeans ) { addBrokenLinkAdditionalInfo ( getCmsObject ( ) , source , childBean ) ; resultMap . put ( childBean , targetBean ) ; } } } // now convert multimap representation to parent / child representation for ( CmsBrokenLinkBean parent : resultMap . keySet ( ) ) { for ( CmsBrokenLinkBean child : resultMap . get ( parent ) ) { parent . addChild ( child ) ; } } return Lists . newArrayList ( resultMap . keySet ( ) ) ;
public class SchemeMatchFilter { /** * / * ( non - Javadoc ) * @ see org . archive . wayback . util . ObjectFilter # filterObject ( java . lang . Object ) */ @ Override public int filterObject ( CaptureSearchResult r ) { } }
String captureScheme = UrlOperations . urlToScheme ( r . getOriginalUrl ( ) ) ; if ( scheme == null ) { if ( captureScheme == null ) { return FILTER_INCLUDE ; } else { annotationTarget . addCloseMatch ( r . getOriginalHost ( ) , r . getOriginalUrl ( ) ) ; return FILTER_EXCLUDE ; } } if ( scheme . equals ( captureScheme ) ) { return FILTER_INCLUDE ; } else { if ( annotationTarget != null ) { annotationTarget . addCloseMatch ( r . getOriginalHost ( ) , r . getOriginalUrl ( ) ) ; } return FILTER_EXCLUDE ; }
public class WikipediaTemplateInfo { /** * Returns a list containing the ids of all pages that do not contain a template * the name of which equals any of the given Strings . * @ param templateNames * the names of the template that we want to match * @ return A list with the ids of all pages that do not contain any of the the * specified templates * @ throws WikiApiException * If there was any error retrieving the page object ( most * likely if the templates are corrupted ) */ public List < Integer > getPageIdsNotContainingTemplateNames ( List < String > templateNames ) throws WikiApiException { } }
return getFilteredPageIds ( templateNames , false ) ;
public class ApiOvhIp { /** * Alter this object properties * REST : PUT / ip / { ip } / game / { ipOnGame } * @ param body [ required ] New object properties * @ param ip [ required ] * @ param ipOnGame [ required ] */ public void ip_game_ipOnGame_PUT ( String ip , String ipOnGame , OvhGameMitigation body ) throws IOException { } }
String qPath = "/ip/{ip}/game/{ipOnGame}" ; StringBuilder sb = path ( qPath , ip , ipOnGame ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class UserDao { /** * Select users by ids , including disabled users . An empty list is returned * if list of ids is empty , without any db round trips . * Used by the Governance plugin */ public List < UserDto > selectByIds ( DbSession session , Collection < Integer > ids ) { } }
return executeLargeInputs ( ids , mapper ( session ) :: selectByIds ) ;