signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class BeanCodeGen { private List < String > readFile ( File file ) throws Exception { } }
List < String > content = new ArrayList < > ( 100 ) ; try ( BufferedReader is = new BufferedReader ( new InputStreamReader ( new FileInputStream ( file ) , "UTF-8" ) ) ) { String line ; while ( ( line = is . readLine ( ) ) != null ) { content . add ( line ) ; } return content ; }
public class BatchDetectSentimentResult { /** * A list of objects containing the results of the operation . The results are sorted in ascending order by the * < code > Index < / code > field and match the order of the documents in the input list . If all of the documents contain * an error , the < code > ResultList < / code > is empty . * @ param resultList * A list of objects containing the results of the operation . The results are sorted in ascending order by * the < code > Index < / code > field and match the order of the documents in the input list . If all of the * documents contain an error , the < code > ResultList < / code > is empty . */ public void setResultList ( java . util . Collection < BatchDetectSentimentItemResult > resultList ) { } }
if ( resultList == null ) { this . resultList = null ; return ; } this . resultList = new java . util . ArrayList < BatchDetectSentimentItemResult > ( resultList ) ;
public class CmsScheduledJobInfo { /** * Sets the cron expression for this job entry . < p > * @ param cronExpression the cron expression to set */ @ SuppressWarnings ( "unused" ) public void setCronExpression ( String cronExpression ) { } }
checkFrozen ( ) ; try { // check if the cron expression is valid new CronExpression ( cronExpression ) ; } catch ( Exception e ) { throw new CmsIllegalArgumentException ( Messages . get ( ) . container ( Messages . ERR_BAD_CRON_EXPRESSION_2 , getJobName ( ) , cronExpression ) ) ; } m_cronExpression = cronExpression ;
public class Estimator { /** * Create a LocalDate from the given date string . Returns null * for an empty or null date string . * @ param dateString the date string * @ return the new LocalDate */ protected final LocalDate createLocalDate ( final String dateString ) { } }
if ( dateString == null || dateString . isEmpty ( ) ) { return null ; } final DateParser parser = new DateParser ( dateString ) ; return new LocalDate ( parser . getEstimateCalendar ( ) ) ;
public class Files { /** * Loads properties from file path * @ param path the path to properties file * @ return the loaded properties */ public static Properties getFileAsProperties ( String path ) { } }
File file = new File ( nullToEmpty ( path ) ) ; return getFileAsProperties ( file ) ;
public class QueryBuilder { /** * Adds order expressions to selection if in " auto distinct " mode and * the query performs a join or fetch on a relation . In * this case , attributes from referenced entities inlucded in the sort * clause must be added to the select clause as well . * @ return number of attributes that were needed to compute a distinct */ protected int applyDistinct ( ) { } }
int numAutoSelections = 0 ; boolean distinct ; if ( query . autoDistinct ) { // distinct for many join / fetches or manual // in case of ViewQuery we may not need this here , but we need to do // the selection of order - by columns below distinct = query . autoDistinct && ! query . autoGroupBy && backend . hasManyRootsFetchesOrJoins ( ) ; if ( distinct ) { // we also need to select sorted attributes ( for references ) numAutoSelections = addOrderExpressionsToSelection ( ) ; } } else { distinct = query . distinct ; } if ( distinct ) { backend . distinct ( ) ; } return numAutoSelections ;
public class ParsedUnionStmt { /** * Return the leftmost child SELECT statement * @ return ParsedSelectStmt */ private ParsedSelectStmt getLeftmostSelectStmt ( ) { } }
assert ( ! m_children . isEmpty ( ) ) ; AbstractParsedStmt firstChild = m_children . get ( 0 ) ; if ( firstChild instanceof ParsedSelectStmt ) { return ( ParsedSelectStmt ) firstChild ; } else { assert ( firstChild instanceof ParsedUnionStmt ) ; return ( ( ParsedUnionStmt ) firstChild ) . getLeftmostSelectStmt ( ) ; }
public class JMPath { /** * Gets sub directory path list . * @ param startDirectoryPath the start directory path * @ param filter the filter * @ return the sub directory path list */ public static List < Path > getSubDirectoryPathList ( Path startDirectoryPath , Predicate < Path > filter ) { } }
return getSubPathList ( startDirectoryPath , Integer . MAX_VALUE , DirectoryAndNotSymbolicLinkFilter . and ( filter ) ) ;
public class ListTagsForVaultRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListTagsForVaultRequest listTagsForVaultRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listTagsForVaultRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listTagsForVaultRequest . getAccountId ( ) , ACCOUNTID_BINDING ) ; protocolMarshaller . marshall ( listTagsForVaultRequest . getVaultName ( ) , VAULTNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AWSDatabaseMigrationServiceClient { /** * Returns the task assessment results from Amazon S3 . This action always returns the latest results . * @ param describeReplicationTaskAssessmentResultsRequest * @ return Result of the DescribeReplicationTaskAssessmentResults operation returned by the service . * @ throws ResourceNotFoundException * The resource could not be found . * @ sample AWSDatabaseMigrationService . DescribeReplicationTaskAssessmentResults * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / dms - 2016-01-01 / DescribeReplicationTaskAssessmentResults " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeReplicationTaskAssessmentResultsResult describeReplicationTaskAssessmentResults ( DescribeReplicationTaskAssessmentResultsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeReplicationTaskAssessmentResults ( request ) ;
public class RequestHandlerSelectors { /** * Predicate that matches RequestHandler with handlers methods annotated with given annotation * @ param annotation - annotation to check * @ return this */ public static Predicate < RequestHandler > withMethodAnnotation ( final Class < ? extends Annotation > annotation ) { } }
return input -> input . isAnnotatedWith ( annotation ) ;
public class LocaleData { /** * Returns the size of paper used in the locale . The paper sizes returned are always in * < em > milli - meters < / em > . * @ param locale The locale for which the measurement system to be retrieved . * @ return The paper size used in the locale */ public static final PaperSize getPaperSize ( ULocale locale ) { } }
UResourceBundle obj = measurementTypeBundleForLocale ( locale , PAPER_SIZE ) ; int [ ] size = obj . getIntVector ( ) ; return new PaperSize ( size [ 0 ] , size [ 1 ] ) ;
public class Classification { /** * Getter method for instance variable { @ link # classifyRelation } . If the * variable is null the value for this instance variable of the parent * classification will be returned . * @ return value of instance variable { @ link # classifyRelation } * @ throws CacheReloadException on error */ public Type getClassifyRelationType ( ) throws CacheReloadException { } }
final Type ret ; if ( this . classifyRelation == null && this . parent != null ) { ret = getParentClassification ( ) . getClassifyRelationType ( ) ; } else { ret = Type . get ( this . classifyRelation ) ; } return ret ;
public class LayerDrawable { /** * Add a new layer to this drawable . The new layer is identified by an id . * @ param dr The drawable to add as a layer . * @ param themeAttrs Theme attributes extracted from the layer . * @ param id The id of the new layer . * @ param left The left padding of the new layer . * @ param top The top padding of the new layer . * @ param right The right padding of the new layer . * @ param bottom The bottom padding of the new layer . */ ChildDrawable addLayer ( Drawable dr , int [ ] themeAttrs , int id , int left , int top , int right , int bottom ) { } }
final ChildDrawable childDrawable = createLayer ( dr ) ; childDrawable . mId = id ; childDrawable . mThemeAttrs = themeAttrs ; if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . KITKAT ) childDrawable . mDrawable . setAutoMirrored ( isAutoMirrored ( ) ) ; childDrawable . mInsetL = left ; childDrawable . mInsetT = top ; childDrawable . mInsetR = right ; childDrawable . mInsetB = bottom ; addLayer ( childDrawable ) ; mLayerState . mChildrenChangingConfigurations |= dr . getChangingConfigurations ( ) ; dr . setCallback ( this ) ; return childDrawable ;
public class User { /** * This method checks with { @ link SecurityRealm } if the user is a valid user that can login to the security realm . * If { @ link SecurityRealm } is a kind that does not support querying information about other users , this will * use { @ link LastGrantedAuthoritiesProperty } to pick up the granted authorities as of the last time the user has * logged in . * @ return userDetails for the user , in case he ' s not found but seems legitimate , we provide a userDetails with minimum access * @ throws UsernameNotFoundException If this user is not a valid user in the backend { @ link SecurityRealm } . */ public @ Nonnull UserDetails getUserDetailsForImpersonation ( ) throws UsernameNotFoundException { } }
ImpersonatingUserDetailsService userDetailsService = new ImpersonatingUserDetailsService ( Jenkins . get ( ) . getSecurityRealm ( ) . getSecurityComponents ( ) . userDetails ) ; try { UserDetails userDetails = userDetailsService . loadUserByUsername ( id ) ; LOGGER . log ( Level . FINE , "Impersonation of the user {0} was a success" , id ) ; return userDetails ; } catch ( UserMayOrMayNotExistException e ) { LOGGER . log ( Level . FINE , "The user {0} may or may not exist in the SecurityRealm, so we provide minimum access" , id ) ; } catch ( UsernameNotFoundException e ) { if ( ALLOW_NON_EXISTENT_USER_TO_LOGIN ) { LOGGER . log ( Level . FINE , "The user {0} was not found in the SecurityRealm but we are required to let it pass, due to ALLOW_NON_EXISTENT_USER_TO_LOGIN" , id ) ; } else { LOGGER . log ( Level . FINE , "The user {0} was not found in the SecurityRealm" , id ) ; throw e ; } } catch ( DataAccessException e ) { // seems like it ' s in the same boat as UserMayOrMayNotExistException LOGGER . log ( Level . FINE , "The user {0} retrieval just threw a DataAccess exception with msg = {1}, so we provide minimum access" , new Object [ ] { id , e . getMessage ( ) } ) ; } return new LegitimateButUnknownUserDetails ( id ) ;
public class Utilities { /** * Utility method to get the formitems of a form object . * < p > Note that the entering json object has to be one * object of the main array , not THE main array itself , * i . e . a choice was already done . * @ param jsonObj the single object . * @ return the array of items of the contained form or < code > null < / code > if * no form is contained . * @ throws JSONException */ public static JSONArray getFormItems ( JSONObject formObj ) throws JSONException { } }
if ( formObj . has ( TAG_FORMITEMS ) ) { JSONArray formItemsArray = formObj . getJSONArray ( TAG_FORMITEMS ) ; return formItemsArray ; } return null ;
public class ZkClient { /** * Connect to ZooKeeper . * @ param maxMsToWaitUntilConnected * @ param watcher * @ throws ZkInterruptedException * if the connection timed out due to thread interruption * @ throws ZkTimeoutException * if the connection timed out * @ throws IllegalStateException * if the connection timed out due to thread interruption */ public void connect ( final long maxMsToWaitUntilConnected , Watcher watcher ) throws ZkInterruptedException , ZkTimeoutException , IllegalStateException { } }
boolean started = false ; acquireEventLock ( ) ; try { setShutdownTrigger ( false ) ; _eventThread = new ZkEventThread ( _connection . getServers ( ) ) ; _eventThread . start ( ) ; _connection . connect ( watcher ) ; LOG . debug ( "Awaiting connection to Zookeeper server" ) ; boolean waitSuccessful = waitUntilConnected ( maxMsToWaitUntilConnected , TimeUnit . MILLISECONDS ) ; if ( ! waitSuccessful ) { throw new ZkTimeoutException ( "Unable to connect to zookeeper server '" + _connection . getServers ( ) + "' with timeout of " + maxMsToWaitUntilConnected + " ms" ) ; } started = true ; } finally { getEventLock ( ) . unlock ( ) ; // we should close the zookeeper instance , otherwise it would keep // on trying to connect if ( ! started ) { close ( ) ; } }
public class FindOption { /** * Returns the value if this option exists in the specified { @ code options } map . * Otherwise , the default value would be returned . */ public T get ( @ Nullable Map < FindOption < ? > , ? > options ) { } }
if ( options == null ) { return defaultValue ( ) ; } @ SuppressWarnings ( "unchecked" ) final T value = ( T ) options . get ( this ) ; if ( value == null ) { return defaultValue ( ) ; } return value ;
public class Usage { /** * START SNIPPET : hasReturn */ public boolean hasReturn ( List < Statement > statements ) { } }
boolean hasReturn = false ; for ( Statement statement : statements ) { hasReturn = hasReturn || statement . match ( new Statement . MatchBlockWithDefault < Boolean > ( ) { @ Override public Boolean _case ( Return x ) { return true ; } @ Override public Boolean _default ( Statement x ) { return false ; } } ) ; } return hasReturn ;
public class CommandLineArgumentParser { /** * Parse command - line arguments , and store values in callerArguments object passed to ctor . * @ param messageStream Where to write error messages . * @ param args Command line tokens . * @ return true if command line is valid and the program should run , false if help or version was requested * @ throws CommandLineException if there is an invalid command line */ @ Override public boolean parseArguments ( final PrintStream messageStream , String [ ] args ) { } }
final OptionSet parsedArguments ; final OptionParser parser = getOptionParser ( ) ; try { // Preprocess the arguments before the parser sees them , replacing any tagged options // and their values with raw option names and surrogate key values , so that tagged // options can be recognized by the parser . The actual values will be retrieved using // the key when the fields ' s values are set . parsedArguments = parser . parse ( tagParser . preprocessTaggedOptions ( args ) ) ; } catch ( final OptionException e ) { throw new CommandLineException ( e . getMessage ( ) ) ; } // Check if special short circuiting arguments were set if ( isSpecialFlagSet ( parsedArguments , SpecialArgumentsCollection . HELP_FULLNAME ) ) { messageStream . print ( usage ( true , isSpecialFlagSet ( parsedArguments , SpecialArgumentsCollection . SHOW_HIDDEN_FULLNAME ) ) ) ; return false ; } else if ( isSpecialFlagSet ( parsedArguments , SpecialArgumentsCollection . VERSION_FULLNAME ) ) { messageStream . println ( getVersion ( ) ) ; return false ; } else if ( parsedArguments . has ( SpecialArgumentsCollection . ARGUMENTS_FILE_FULLNAME ) ) { // If a special arguments file was specified , read arguments from it and recursively call parseArguments ( ) final List < String > newArgs = expandFromArgumentFile ( parsedArguments ) ; if ( ! newArgs . isEmpty ( ) ) { // If we ' ve expanded any argument files , we need to do another pass on the entire list post - expansion , // so clear any tag surrogates created in this pass ( they ' ll be regenerated in the next pass ) . tagParser . resetTagSurrogates ( ) ; newArgs . addAll ( Arrays . asList ( args ) ) ; return parseArguments ( messageStream , newArgs . toArray ( new String [ newArgs . size ( ) ] ) ) ; } } return propagateParsedValues ( parsedArguments ) ;
public class AbstractRadixHeap { /** * Helper method for finding and caching the minimum . Assumes that the heap * contains at least one element . * @ param firstBucket * start looking for elements from this bucket */ private void findAndCacheMinimum ( int firstBucket ) { } }
if ( currentMin == null ) { // find first non - empty bucket currentMinBucket = EMPTY ; for ( int i = firstBucket ; i < this . buckets . length ; i ++ ) { if ( ! buckets [ i ] . isEmpty ( ) ) { currentMinBucket = i ; break ; } } // find new minimum and its position ( beware of cached values ) currentMinPos = EMPTY ; if ( currentMinBucket >= 0 ) { int pos = 0 ; for ( K val : buckets [ currentMinBucket ] ) { if ( currentMin == null || compare ( val , currentMin ) < 0 ) { currentMin = val ; currentMinPos = pos ; } ++ pos ; } } }
public class CommonOps_DSCC { /** * Permutes a vector in the inverse . output [ perm [ k ] ] = input [ k ] * @ param perm ( Input ) permutation vector * @ param input ( Input ) Vector which is to be permuted * @ param output ( Output ) Where the permuted vector is stored . * @ param N Number of elements in the vector . */ public static void permuteInv ( int [ ] perm , double [ ] input , double [ ] output , int N ) { } }
for ( int k = 0 ; k < N ; k ++ ) { output [ perm [ k ] ] = input [ k ] ; }
public class TransactionLocalMap { /** * Expunge a stale entry by rehashing any possibly colliding entries * lying between staleSlot and the next null slot . This also expunges * any other stale entries encountered before the trailing null . See * Knuth , Section 6.4 * @ param staleSlot index of slot known to have null key * @ return the index of the next null slot after staleSlot * ( all between staleSlot and this slot will have been checked * for expunging ) . */ private int expungeStaleEntry ( int staleSlot ) { } }
Entry [ ] tab = table ; int len = tab . length ; // expunge entry at staleSlot tab [ staleSlot ] . value = null ; tab [ staleSlot ] = null ; size -- ; // Rehash until we encounter null Entry e ; int i ; for ( i = nextIndex ( staleSlot , len ) ; ( e = tab [ i ] ) != null ; i = nextIndex ( i , len ) ) { int h = e . key . hashCode & ( len - 1 ) ; if ( h != i ) { tab [ i ] = null ; // Unlike Knuth 6.4 Algorithm R , we must scan until // null because multiple entries could have been stale . while ( tab [ h ] != null ) h = nextIndex ( h , len ) ; tab [ h ] = e ; } } return i ;
public class StringBuilders { /** * Appends in the following format : double quoted value . * @ param sb a string builder * @ param value a value * @ return { @ code " value " } */ public static StringBuilder appendDqValue ( final StringBuilder sb , final Object value ) { } }
return sb . append ( Chars . DQUOTE ) . append ( value ) . append ( Chars . DQUOTE ) ;
public class Formatter { /** * Returns a charset object for the given charset name . * @ throws NullPointerException is csn is null * @ throws UnsupportedEncodingException if the charset is not supported */ private static Charset toCharset ( String csn ) throws UnsupportedEncodingException { } }
Objects . requireNonNull ( csn , "charsetName" ) ; try { return Charset . forName ( csn ) ; } catch ( IllegalCharsetNameException | UnsupportedCharsetException unused ) { // UnsupportedEncodingException should be thrown throw new UnsupportedEncodingException ( csn ) ; }
public class Table { /** * For removal or addition of columns , constraints and indexes * Does not work in this form for FK ' s as Constraint . ConstraintCore * is not transfered to a referencing or referenced table */ Table moveDefinition ( Session session , int newType , ColumnSchema column , Constraint constraint , Index index , int colIndex , int adjust , OrderedHashSet dropConstraints , OrderedHashSet dropIndexes ) { } }
boolean newPK = false ; if ( constraint != null && constraint . constType == Constraint . PRIMARY_KEY ) { newPK = true ; } Table tn = new Table ( database , tableName , newType ) ; if ( tableType == TEMP_TABLE ) { tn . persistenceScope = persistenceScope ; } for ( int i = 0 ; i < getColumnCount ( ) ; i ++ ) { ColumnSchema col = ( ColumnSchema ) columnList . get ( i ) ; if ( i == colIndex ) { if ( column != null ) { tn . addColumn ( column ) ; } if ( adjust <= 0 ) { continue ; } } tn . addColumn ( col ) ; } if ( getColumnCount ( ) == colIndex ) { tn . addColumn ( column ) ; } int [ ] pkCols = null ; if ( hasPrimaryKey ( ) && ! dropConstraints . contains ( getPrimaryConstraint ( ) . getName ( ) ) ) { pkCols = primaryKeyCols ; pkCols = ArrayUtil . toAdjustedColumnArray ( pkCols , colIndex , adjust ) ; } else if ( newPK ) { pkCols = constraint . getMainColumns ( ) ; } tn . createPrimaryKey ( getIndex ( 0 ) . getName ( ) , pkCols , false ) ; for ( int i = 1 ; i < indexList . length ; i ++ ) { Index idx = indexList [ i ] ; if ( dropIndexes . contains ( idx . getName ( ) ) ) { continue ; } int [ ] colarr = ArrayUtil . toAdjustedColumnArray ( idx . getColumns ( ) , colIndex , adjust ) ; // A VoltDB extension to support indexed expressions and assume unique attribute Expression [ ] exprArr = idx . getExpressions ( ) ; boolean assumeUnique = idx . isAssumeUnique ( ) ; Expression predicate = idx . getPredicate ( ) ; // End of VoltDB extension idx = tn . createIndexStructure ( idx . getName ( ) , colarr , idx . getColumnDesc ( ) , null , idx . isUnique ( ) , idx . isMigrating ( ) , idx . isConstraint ( ) , idx . isForward ( ) ) ; // A VoltDB extension to support indexed expressions and assume unique attribute and partial indexes if ( exprArr != null ) { idx = idx . withExpressions ( adjustExprs ( exprArr , colIndex , adjust ) ) ; } if ( predicate != null ) { idx = idx . withPredicate ( adjustExpr ( predicate , colIndex , adjust ) ) ; } idx = idx . setAssumeUnique ( assumeUnique ) ; // End of VoltDB extension tn . addIndex ( idx ) ; } if ( index != null ) { tn . addIndex ( index ) ; } HsqlArrayList newList = new HsqlArrayList ( ) ; if ( newPK ) { constraint . core . mainIndex = tn . indexList [ 0 ] ; constraint . core . mainTable = tn ; constraint . core . mainTableName = tn . tableName ; newList . add ( constraint ) ; } for ( int i = 0 ; i < constraintList . length ; i ++ ) { Constraint c = constraintList [ i ] ; if ( dropConstraints . contains ( c . getName ( ) ) ) { continue ; } c = c . duplicate ( ) ; c . updateTable ( session , this , tn , colIndex , adjust ) ; newList . add ( c ) ; } if ( ! newPK && constraint != null ) { constraint . updateTable ( session , this , tn , - 1 , 0 ) ; newList . add ( constraint ) ; } tn . constraintList = new Constraint [ newList . size ( ) ] ; newList . toArray ( tn . constraintList ) ; tn . updateConstraintLists ( ) ; tn . setBestRowIdentifiers ( ) ; tn . triggerList = triggerList ; tn . triggerLists = triggerLists ; return tn ;
public class Base64Encoder { /** * Encodes a String into a base 64 String . The resulting encoding is chunked at 76 bytes . * @ param s String to encode . * @ return encoded string . */ public static String encode ( String s ) { } }
byte [ ] sBytes = s . getBytes ( ) ; sBytes = encode ( sBytes ) ; s = new String ( sBytes ) ; return s ;
public class EvaluationResult { /** * A list of the statements in the input policies that determine the result for this scenario . Remember that even if * multiple statements allow the operation on the resource , if only one statement denies that operation , then the * explicit deny overrides any allow . Inaddition , the deny statement is the only entry included in the result . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setMatchedStatements ( java . util . Collection ) } or { @ link # withMatchedStatements ( java . util . Collection ) } if * you want to override the existing values . * @ param matchedStatements * A list of the statements in the input policies that determine the result for this scenario . Remember that * even if multiple statements allow the operation on the resource , if only one statement denies that * operation , then the explicit deny overrides any allow . Inaddition , the deny statement is the only entry * included in the result . * @ return Returns a reference to this object so that method calls can be chained together . */ public EvaluationResult withMatchedStatements ( Statement ... matchedStatements ) { } }
if ( this . matchedStatements == null ) { setMatchedStatements ( new com . amazonaws . internal . SdkInternalList < Statement > ( matchedStatements . length ) ) ; } for ( Statement ele : matchedStatements ) { this . matchedStatements . add ( ele ) ; } return this ;
public class MPSubscriptionImpl { /** * Add an additional selection criteria to the to the subscription * Duplicate selection criterias are ignored */ public void addSelectionCriteria ( SelectionCriteria selCriteria ) throws SIDiscriminatorSyntaxException , SISelectorSyntaxException , SIResourceException { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "addSelectionCriteria" , new Object [ ] { selCriteria } ) ; // We should really check discriminator access at this stage // However since these checks require access to the connection which // does not belong on this object , I have decided to avoid the checks // at this stage as they will be done at message delivery time // Also , it is the case that in all current usage the discriminators will // be the same for all selectionCriteria boolean duplicateCriteria = _consumerDispatcher . getConsumerDispatcherState ( ) . addSelectionCriteria ( selCriteria ) ; if ( ! duplicateCriteria ) { // Add the new criteria to the matchspace try { _messageProcessor . getMessageProcessorMatching ( ) . addConsumerDispatcherMatchTarget ( _consumerDispatcher , _consumerDispatcher . getDestination ( ) . getUuid ( ) , selCriteria ) ; } catch ( SIDiscriminatorSyntaxException e ) { // No FFDC code needed // Remove the selection criteria as it was added to the list _consumerDispatcher . getConsumerDispatcherState ( ) . removeSelectionCriteria ( selCriteria ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "addSelectionCriteria" , e ) ; throw e ; } catch ( SISelectorSyntaxException e ) { // No FFDC code needed // Remove the selection criteria as it was added to the list _consumerDispatcher . getConsumerDispatcherState ( ) . removeSelectionCriteria ( selCriteria ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "addSelectionCriteria" , e ) ; throw e ; } // Persist change made to consumerDispatcherState Transaction tran = _messageProcessor . getTXManager ( ) . createAutoCommitTransaction ( ) ; if ( ! _consumerDispatcher . getReferenceStream ( ) . isUpdating ( ) ) { try { _consumerDispatcher . getReferenceStream ( ) . requestUpdate ( tran ) ; } catch ( MessageStoreException e ) { // MessageStoreException shouldn ' t occur so FFDC . FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.MPSubscriptionImpl.addSelectionCriteria" , "1:153:1.6" , this ) ; // Remove the selection criteria as it was added to the list _consumerDispatcher . getConsumerDispatcherState ( ) . removeSelectionCriteria ( selCriteria ) ; SibTr . exception ( tc , e ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "addSelectionCriteria" , "SIResourceException" ) ; throw new SIResourceException ( e ) ; } } } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "addSelectionCriteria" ) ;
public class gen_flatfile { /** * Main entry point * @ param args Full program args , including those that go to ToolRunner . * @ throws Exception */ public static void main ( String [ ] args ) throws Exception { } }
int exitCode = ToolRunner . run ( new gen_flatfile ( ) , args ) ; System . exit ( exitCode ) ;
public class nsip { /** * Use this API to add nsip resources . */ public static base_responses add ( nitro_service client , nsip resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { nsip addresources [ ] = new nsip [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { addresources [ i ] = new nsip ( ) ; addresources [ i ] . ipaddress = resources [ i ] . ipaddress ; addresources [ i ] . netmask = resources [ i ] . netmask ; addresources [ i ] . type = resources [ i ] . type ; addresources [ i ] . arp = resources [ i ] . arp ; addresources [ i ] . icmp = resources [ i ] . icmp ; addresources [ i ] . vserver = resources [ i ] . vserver ; addresources [ i ] . telnet = resources [ i ] . telnet ; addresources [ i ] . ftp = resources [ i ] . ftp ; addresources [ i ] . gui = resources [ i ] . gui ; addresources [ i ] . ssh = resources [ i ] . ssh ; addresources [ i ] . snmp = resources [ i ] . snmp ; addresources [ i ] . mgmtaccess = resources [ i ] . mgmtaccess ; addresources [ i ] . restrictaccess = resources [ i ] . restrictaccess ; addresources [ i ] . dynamicrouting = resources [ i ] . dynamicrouting ; addresources [ i ] . ospf = resources [ i ] . ospf ; addresources [ i ] . bgp = resources [ i ] . bgp ; addresources [ i ] . rip = resources [ i ] . rip ; addresources [ i ] . hostroute = resources [ i ] . hostroute ; addresources [ i ] . hostrtgw = resources [ i ] . hostrtgw ; addresources [ i ] . metric = resources [ i ] . metric ; addresources [ i ] . vserverrhilevel = resources [ i ] . vserverrhilevel ; addresources [ i ] . ospflsatype = resources [ i ] . ospflsatype ; addresources [ i ] . ospfarea = resources [ i ] . ospfarea ; addresources [ i ] . state = resources [ i ] . state ; addresources [ i ] . vrid = resources [ i ] . vrid ; addresources [ i ] . icmpresponse = resources [ i ] . icmpresponse ; addresources [ i ] . ownernode = resources [ i ] . ownernode ; addresources [ i ] . arpresponse = resources [ i ] . arpresponse ; addresources [ i ] . td = resources [ i ] . td ; } result = add_bulk_request ( client , addresources ) ; } return result ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcIntersectionCurve ( ) { } }
if ( ifcIntersectionCurveEClass == null ) { ifcIntersectionCurveEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 326 ) ; } return ifcIntersectionCurveEClass ;
public class PerceptronClassifier { /** * 朴素感知机训练算法 * @ param instanceList 训练实例 * @ param featureMap 特征函数 * @ param maxIteration 训练迭代次数 */ private static LinearModel trainNaivePerceptron ( Instance [ ] instanceList , FeatureMap featureMap , int maxIteration ) { } }
LinearModel model = new LinearModel ( featureMap , new float [ featureMap . size ( ) ] ) ; for ( int it = 0 ; it < maxIteration ; ++ it ) { Utility . shuffleArray ( instanceList ) ; for ( Instance instance : instanceList ) { int y = model . decode ( instance . x ) ; if ( y != instance . y ) // 误差反馈 model . update ( instance . x , instance . y ) ; } } return model ;
public class BreadthFirstIterator { /** * Determines whether another element is available in the iteration . * @ return a boolean value indicating whether another element is available in the iteration . * @ see java . util . Iterator # hasNext ( ) */ @ Override public boolean hasNext ( ) { } }
while ( ! ( iterators . isEmpty ( ) || iterators . peek ( ) . hasNext ( ) ) ) { Assert . isFalse ( iterators . removeFirst ( ) . hasNext ( ) , new IllegalStateException ( "removing a non-empty Iterator" ) ) ; } return ( ! iterators . isEmpty ( ) && iterators . peek ( ) . hasNext ( ) ) ;
public class ELParser { /** * Function * Namespace : Name ( a , b , c ) */ final public void Function ( ) throws ParseException { } }
/* @ bgen ( jjtree ) Function */ AstFunction jjtn000 = new AstFunction ( JJTFUNCTION ) ; boolean jjtc000 = true ; jjtree . openNodeScope ( jjtn000 ) ; Token t0 = null ; Token t1 = null ; try { t0 = jj_consume_token ( IDENTIFIER ) ; switch ( ( jj_ntk == - 1 ) ? jj_ntk ( ) : jj_ntk ) { case COLON : jj_consume_token ( COLON ) ; t1 = jj_consume_token ( IDENTIFIER ) ; break ; default : jj_la1 [ 48 ] = jj_gen ; ; } if ( t1 != null ) { jjtn000 . setPrefix ( t0 . image ) ; jjtn000 . setLocalName ( t1 . image ) ; } else { jjtn000 . setLocalName ( t0 . image ) ; } label_19 : while ( true ) { MethodParameters ( ) ; switch ( ( jj_ntk == - 1 ) ? jj_ntk ( ) : jj_ntk ) { case LPAREN : ; break ; default : jj_la1 [ 49 ] = jj_gen ; break label_19 ; } } } catch ( Throwable jjte000 ) { if ( jjtc000 ) { jjtree . clearNodeScope ( jjtn000 ) ; jjtc000 = false ; } else { jjtree . popNode ( ) ; } if ( jjte000 instanceof RuntimeException ) { { if ( true ) throw ( RuntimeException ) jjte000 ; } } if ( jjte000 instanceof ParseException ) { { if ( true ) throw ( ParseException ) jjte000 ; } } { if ( true ) throw ( Error ) jjte000 ; } } finally { if ( jjtc000 ) { jjtree . closeNodeScope ( jjtn000 , true ) ; } }
public class UInteger { /** * Generate a cached value for initial unsigned integer values . * @ return Array of cached values for UInteger */ private static final UInteger [ ] mkValues ( ) { } }
int precacheSize = getPrecacheSize ( ) ; UInteger [ ] ret ; if ( precacheSize <= 0 ) return null ; ret = new UInteger [ precacheSize ] ; for ( int i = 0 ; i < precacheSize ; i ++ ) ret [ i ] = new UInteger ( i ) ; return ret ;
public class AbstractAttributeCollection { /** * Fire the an attribute removal event . * @ param name is the name of the attribute for which the event occured . * @ param oldValue is the previous value of the attribute */ protected synchronized void fireAttributeRemovedEvent ( String name , AttributeValue oldValue ) { } }
if ( this . listenerList != null && isEventFirable ( ) ) { final AttributeChangeListener [ ] list = new AttributeChangeListener [ this . listenerList . size ( ) ] ; this . listenerList . toArray ( list ) ; final AttributeChangeEvent event = new AttributeChangeEvent ( this , Type . REMOVAL , name , oldValue , name , oldValue ) ; for ( final AttributeChangeListener listener : list ) { listener . onAttributeChangeEvent ( event ) ; } }
public class BootstrapConfig { /** * Clear property * @ param key * Key of property to clear * @ return current / removed value */ public String remove ( final String key ) { } }
if ( key == null ) return null ; return initProps . remove ( key ) ;
public class ThreadPoolBase { /** * main scheduling implementation class . */ protected boolean scheduleImpl ( Runnable task , ClassLoader loader , long expireTime , boolean isPriority , boolean isQueueIfFull , boolean isWakeScheduler ) { } }
Objects . requireNonNull ( task ) ; RunnableItem taskItem = new RunnableItem ( task , loader ) ; return schedule ( taskItem ) ;
public class InjectableArgumentMethodSimulator { /** * Injects the arguments of the method invocation to the local variables . * @ param arguments The argument values */ private void injectArguments ( final List < Element > arguments , final MethodIdentifier identifier ) { } }
final boolean staticMethod = identifier . isStaticMethod ( ) ; final int startIndex = staticMethod ? 0 : 1 ; final int endIndex = staticMethod ? arguments . size ( ) - 1 : arguments . size ( ) ; IntStream . rangeClosed ( startIndex , endIndex ) . forEach ( i -> localVariables . put ( i , arguments . get ( staticMethod ? i : i - 1 ) ) ) ;
public class AWSGreengrassClient { /** * Creates a resource definition which contains a list of resources to be used in a group . You can create an initial * version of the definition by providing a list of resources now , or use ' ' CreateResourceDefinitionVersion ' ' later . * @ param createResourceDefinitionRequest * @ return Result of the CreateResourceDefinition operation returned by the service . * @ throws BadRequestException * invalid request * @ sample AWSGreengrass . CreateResourceDefinition * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / greengrass - 2017-06-07 / CreateResourceDefinition " * target = " _ top " > AWS API Documentation < / a > */ @ Override public CreateResourceDefinitionResult createResourceDefinition ( CreateResourceDefinitionRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateResourceDefinition ( request ) ;
public class DropFunction { /** * Remove the function with the given name from the VoltXMLElement schema * if it is there already . * @ param functionName * @ return Return true iff the function is removed . * Return false if the function does not exist in the schema . */ private boolean removeUDFInSchema ( String functionName ) { } }
for ( int idx = 0 ; idx < m_schema . children . size ( ) ; idx ++ ) { VoltXMLElement func = m_schema . children . get ( idx ) ; if ( "ud_function" . equals ( func . name ) ) { String fnm = func . attributes . get ( "name" ) ; if ( fnm != null && functionName . equals ( fnm ) ) { m_schema . children . remove ( idx ) ; m_tracker . addDroppedFunction ( functionName ) ; m_logger . debug ( String . format ( "Removed XML for" + " function named %s" , functionName ) ) ; return true ; } } } return false ;
public class BitInputStream { /** * peek at bits into an unsigned integer without advancing the input stream . * @ param bits The number of bits to read * @ return The bits as an unsigned integer * @ throws IOException Thrown if error reading input stream */ public int peekRawUInt ( int bits ) throws IOException { } }
int val = 0 ; for ( int i = 0 ; i < bits ; i ++ ) { val = peekBitToInt ( val , i ) ; } return val ;
public class AddResourcesListener { /** * Add the required CSS files and the FontAwesome CDN link . * @ param root The UIViewRoot of the JSF tree . * @ param context The current FacesContext */ private void addCSS ( UIViewRoot root , FacesContext context ) { } }
// The following code is needed to diagnose the warning " Unable to save dynamic // action with clientId ' j _ id . . . ' " // List < UIComponent > r = root . getComponentResources ( context , " head " ) ; // System . out . println ( " * * * * * " ) ; // for ( UIComponent ava : r ) { // String name = ( String ) ava . getAttributes ( ) . get ( " name " ) ; // System . out . println ( ava . getClientId ( context ) + " : " + name + " " + // ava . getClass ( ) . getSimpleName ( ) ) ; // System . out . println ( " * * * * * " ) ; // end of the diagnostic code // 1 ) First load Theme files ( core . css + theme . css ) String theme = loadTheme ( root , context ) ; // deactivate FontAwesome support if the no - fa facet is found in the h : head tag UIComponent header = findHeader ( root ) ; boolean useCDNImportForFontAwesome = ( null == header ) || ( null == header . getFacet ( "no-fa" ) ) ; if ( useCDNImportForFontAwesome ) { String useCDN = BsfUtils . getInitParam ( P_GET_FONTAWESOME_FROM_CDN ) ; if ( null != useCDN ) { useCDN = ELTools . evalAsString ( useCDN ) ; } if ( null != useCDN ) { useCDNImportForFontAwesome = ! isFalseOrNo ( useCDN ) ; } } // Do we have to add font - awesome , or are the resources already there ? List < UIComponent > availableResources = root . getComponentResources ( context , "head" ) ; for ( UIComponent ava : availableResources ) { String name = ( String ) ava . getAttributes ( ) . get ( "name" ) ; if ( null != name ) { name = name . toLowerCase ( ) ; if ( ( name . contains ( "font-awesome" ) || name . contains ( "fontawesome" ) ) && name . endsWith ( "css" ) ) useCDNImportForFontAwesome = false ; } } // 2 ) Font Awesome if ( useCDNImportForFontAwesome ) { InternalFALink output = new InternalFALink ( ) ; Map < String , Object > viewMap = root . getViewMap ( ) ; if ( viewMap . containsKey ( FONTAWESOME_USED ) ) { String version = ( String ) viewMap . get ( FONTAWESOME_VERSION ) ; if ( version != null ) { output . setVersion ( version ) ; output . setNeedsVersion4 ( needsFontAwesome4 ( ) ) ; } else { output . setVersion ( "4" ) ; output . setNeedsVersion4 ( needsFontAwesome4 ( ) ) ; } addResourceIfNecessary ( root , context , output ) ; } } @ SuppressWarnings ( "unchecked" ) List < String > extCSSMap = ( List < String > ) root . getViewMap ( ) . get ( EXT_RESOURCE_KEY ) ; if ( extCSSMap != null ) { for ( String file : extCSSMap ) { String name = "css/" + file ; createAndAddComponent ( root , context , CSS_RENDERER , name , C . BSF_LIBRARY ) ; } } @ SuppressWarnings ( "unchecked" ) List < String > themedCSSMap = ( List < String > ) root . getViewMap ( ) . get ( THEME_RESOURCE_KEY ) ; if ( themedCSSMap != null ) { for ( String file : themedCSSMap ) { String name = "css/" + theme + "/" + file ; // Glyphicons icons now are in core . css if ( file . equals ( " icons . css " ) ) / / the // icons . css file isn ' t found in a theme folder // Glyphicons icons now are in core . css name = " css / icons . css " ; / / look for it // under the css root instead createAndAddComponent ( root , context , CSS_RENDERER , name , C . BSF_LIBRARY ) ; } } if ( theme . equals ( "patternfly" ) ) { createAndAddComponent ( root , context , CSS_RENDERER , "css/patternfly/bootstrap-switch.css" , C . BSF_LIBRARY ) ; } // Add mandatory CSS bsf . css createAndAddComponent ( root , context , CSS_RENDERER , "css/bsf.css" , C . BSF_LIBRARY ) ; // 3 ) Bootstrap from CDN ( TODO : check removeBootstrapResources ) boolean loadBootstrap = shouldLibraryBeLoaded ( P_GET_BOOTSTRAP_FROM_CDN , true ) ; if ( ! loadBootstrap ) { removeBootstrapResources ( root , context ) ; }
public class ResourceHelper { /** * Parse the request path and match with possible resource patterns * @ param securityContext * @ param request * @ param resourceMap * @ param propertyView * @ return resourceChain * @ throws FrameworkException */ public static List < Resource > parsePath ( final SecurityContext securityContext , final HttpServletRequest request , final Map < Pattern , Class < ? extends Resource > > resourceMap , final Value < String > propertyView ) throws FrameworkException { } }
final String path = request . getPathInfo ( ) ; // intercept empty path and send 204 No Content if ( StringUtils . isBlank ( path ) ) { throw new NoResultsException ( "No content" ) ; } // 1 . : split request path into URI parts final String [ ] pathParts = path . split ( "[/]+" ) ; // 2 . : create container for resource constraints final Set < String > propertyViews = Services . getInstance ( ) . getConfigurationProvider ( ) . getPropertyViews ( ) ; final List < Resource > resourceChain = new ArrayList < > ( pathParts . length ) ; // 3 . : try to assign resource constraints for each URI part for ( int i = 0 ; i < pathParts . length ; i ++ ) { // eliminate empty strings final String part = pathParts [ i ] . trim ( ) ; if ( part . length ( ) > 0 ) { boolean found = false ; // check views first if ( propertyViews . contains ( part ) ) { Resource resource = new ViewFilterResource ( ) ; resource . checkAndConfigure ( part , securityContext , request ) ; resource . configurePropertyView ( propertyView ) ; resourceChain . add ( resource ) ; // mark this part as successfully parsed found = true ; } else { // look for matching pattern for ( Map . Entry < Pattern , Class < ? extends Resource > > entry : resourceMap . entrySet ( ) ) { Pattern pattern = entry . getKey ( ) ; Matcher matcher = pattern . matcher ( pathParts [ i ] ) ; if ( matcher . matches ( ) ) { Class < ? extends Resource > type = entry . getValue ( ) ; Resource resource = null ; try { // instantiate resource constraint resource = type . newInstance ( ) ; } catch ( Throwable t ) { logger . warn ( "Error instantiating resource class" , t ) ; } if ( resource != null ) { // set security context resource . setSecurityContext ( securityContext ) ; if ( resource . checkAndConfigure ( part , securityContext , request ) ) { logger . debug ( "{} matched, adding resource of type {} for part {}" , new Object [ ] { matcher . pattern ( ) , type . getName ( ) , part } ) ; // allow constraint to modify context resource . configurePropertyView ( propertyView ) ; // add constraint and go on resourceChain . add ( resource ) ; found = true ; // first match wins , so choose priority wisely ; ) break ; } } } } } if ( ! found ) { throw new NotFoundException ( "Cannot resolve URL path" ) ; } } } return resourceChain ;
public class BrokerHelper { /** * Build a Count - Query based on aQuery * @ param aQuery * @ return The count query */ public Query getCountQuery ( Query aQuery ) { } }
if ( aQuery instanceof QueryBySQL ) { return getQueryBySqlCount ( ( QueryBySQL ) aQuery ) ; } else if ( aQuery instanceof ReportQueryByCriteria ) { return getReportQueryByCriteriaCount ( ( ReportQueryByCriteria ) aQuery ) ; } else { return getQueryByCriteriaCount ( ( QueryByCriteria ) aQuery ) ; }
public class Vector4i { /** * / * ( non - Javadoc ) * @ see org . joml . Vector4ic # mul ( int , org . joml . Vector4i ) */ public Vector4i mul ( int scalar , Vector4i dest ) { } }
dest . x = x * scalar ; dest . y = y * scalar ; dest . z = z * scalar ; dest . w = w * scalar ; return dest ;
public class TableModel { /** * Removes a row at a particular index from the table model * @ param index Index of the row to remove * @ return Itself */ public synchronized TableModel < V > removeRow ( int index ) { } }
List < V > removedRow = rows . remove ( index ) ; for ( Listener < V > listener : listeners ) { listener . onRowRemoved ( this , index , removedRow ) ; } return this ;
public class ServletUtils { /** * Print parameters and attributes in the given request . * @ param request the current HttpServletRequest . * @ param output a PrintStream to which to output request parameters and request / session * attributes ; if < code > null < / null > , < code > System . err < / code > is used . */ public static void dumpRequest ( ServletRequest request , PrintStream output ) { } }
if ( output == null ) { output = System . err ; } output . println ( "*** ServletRequest " + request ) ; if ( request instanceof HttpServletRequest ) { output . println ( " uri = " + ( ( HttpServletRequest ) request ) . getRequestURI ( ) ) ; } for ( Enumeration e = request . getParameterNames ( ) ; e . hasMoreElements ( ) ; ) { String name = ( String ) e . nextElement ( ) ; output . println ( " parameter " + name + " = " + request . getParameter ( name ) ) ; } for ( Enumeration e = request . getAttributeNames ( ) ; e . hasMoreElements ( ) ; ) { String name = ( String ) e . nextElement ( ) ; output . println ( " attribute " + name + " = " + request . getAttribute ( name ) ) ; } if ( request instanceof HttpServletRequest ) { HttpSession session = ( ( HttpServletRequest ) request ) . getSession ( false ) ; if ( session != null ) { for ( Enumeration e = session . getAttributeNames ( ) ; e . hasMoreElements ( ) ; ) { String name = ( String ) e . nextElement ( ) ; output . println ( " session attribute " + name + " = " + session . getAttribute ( name ) ) ; } } }
public class ArrayUtils { /** * Same as { @ link Collection # addAll ( Collection ) } but in case of RandomAccess iterates over indices */ public static < T > void addAll ( Collection < ? super T > collection , Collection < ? extends T > toAdd ) { } }
if ( collection == null || toAdd == null ) { return ; } if ( toAdd instanceof RandomAccess ) { List < ? extends T > randomAccess = ( List < ? extends T > ) toAdd ; for ( int i = 0 , size = randomAccess . size ( ) ; i < size ; i ++ ) { T element = randomAccess . get ( i ) ; collection . add ( element ) ; } } else { collection . addAll ( toAdd ) ; }
import java . util . * ; class Main { /** * Function to sort a list of tuples based on the first element of each tuple in alphabetical order . * Examples : * > > > order _ tuples ( [ ( ' Amana ' , 28 ) , ( ' Zenat ' , 30 ) , ( ' Abhishek ' , 29 ) , ( ' Nikhil ' , 21 ) , ( ' B ' , ' C ' ) ] ) * [ ( ' Abhishek ' , 29 ) , ( ' Amana ' , 28 ) , ( ' B ' , ' C ' ) , ( ' Nikhil ' , 21 ) , ( ' Zenat ' , 30 ) ] * > > > order _ tuples ( [ ( ' aaaa ' , 28 ) , ( ' aa ' , 30 ) , ( ' bab ' , 29 ) , ( ' bb ' , 21 ) , ( ' csa ' , ' C ' ) ] ) * [ ( ' aa ' , 30 ) , ( ' aaaa ' , 28 ) , ( ' bab ' , 29 ) , ( ' bb ' , 21 ) , ( ' csa ' , ' C ' ) ] * > > > order _ tuples ( [ ( ' Sarala ' , 28 ) , ( ' Ayesha ' , 30 ) , ( ' Suman ' , 29 ) , ( ' Sai ' , 21 ) , ( ' G ' , ' H ' ) ] ) * [ ( ' Ayesha ' , 30 ) , ( ' G ' , ' H ' ) , ( ' Sai ' , 21 ) , ( ' Sarala ' , 28 ) , ( ' Suman ' , 29 ) ] */ public static List < Tuple < String , Object > > order_tuples ( List < Tuple < String , Object > > tuplesList ) { } } class Tuple < F , S > { private final F first ; private final S second ; public Tuple ( F first , S second ) { this . first = first ; this . second = second ; } public F getFirst ( ) { return first ; } public S getSecond ( ) { return second ; } }
// Getting the length of the list int listLength = tuplesList . size ( ) ; // Implementing Bubble Sort for ( int i = 0 ; i < listLength ; i ++ ) { for ( int j = 0 ; j < listLength - i - 1 ; j ++ ) { // Swapping the tuples if the first element of the current tuple is greater than the next one . if ( tuplesList . get ( j ) . getFirst ( ) . compareTo ( tuplesList . get ( j + 1 ) . getFirst ( ) ) > 0 ) { Collections . swap ( tuplesList , j , j + 1 ) ; } } } // Returning the sorted list return tuplesList ;
public class TypeLexer { /** * $ ANTLR start " EXTENDS " */ public final void mEXTENDS ( ) throws RecognitionException { } }
try { int _type = EXTENDS ; int _channel = DEFAULT_TOKEN_CHANNEL ; // org / javaruntype / type / parser / Type . g : 35:9 : ( ' extends ' ) // org / javaruntype / type / parser / Type . g : 35:11 : ' extends ' { match ( " extends " ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class Property { /** * Filters the given set of { @ link Property } values and keeps the values * backed by an { @ link Enum } constant of the given type . * @ param properties * the properties to filter * @ param clazz * the class of a vocabulary - backing enum * @ return An immutable { @ link EnumSet } representing the filtered properties */ public static < E extends Enum < E > > Set < E > filter ( Set < Property > properties , final Class < E > clazz ) { } }
Preconditions . checkNotNull ( clazz ) ; if ( properties == null ) return ImmutableSet . of ( ) ; return Sets . immutableEnumSet ( Collections2 . filter ( Collections2 . transform ( properties , new Function < Property , E > ( ) { @ Override public E apply ( Property input ) { try { return clazz . cast ( input . toEnum ( ) ) ; } catch ( Exception e ) { return null ; } } } ) , Predicates . notNull ( ) ) ) ;
public class SCSIResponseParser { /** * { @ inheritDoc } */ @ Override protected final void deserializeBytes1to3 ( final int line ) throws InternetSCSIException { } }
Utils . isReserved ( line & RESERVED_FLAGS_MASK ) ; bidirectionalReadResidualOverflow = Utils . isBitSet ( line & Constants . READ_RESIDUAL_OVERFLOW_FLAG_MASK ) ; bidirectionalReadResidualUnderflow = Utils . isBitSet ( line & Constants . READ_RESIDUAL_UNDERFLOW_FLAG_MASK ) ; residualOverflow = Utils . isBitSet ( line & Constants . RESIDUAL_OVERFLOW_FLAG_MASK ) ; residualUnderflow = Utils . isBitSet ( line & Constants . RESIDUAL_UNDERFLOW_FLAG_MASK ) ; response = ServiceResponse . valueOf ( ( byte ) ( ( line & Constants . THIRD_BYTE_MASK ) >>> Constants . ONE_BYTE_SHIFT ) ) ; status = SCSIStatus . valueOf ( ( byte ) ( line & Constants . FOURTH_BYTE_MASK ) ) ;
public class Reflect { /** * Check whether two arrays of types match , converting primitive types to * their corresponding wrappers . */ private boolean match ( Class < ? > [ ] declaredTypes , Class < ? > [ ] actualTypes ) { } }
if ( declaredTypes . length == actualTypes . length ) { for ( int i = 0 ; i < actualTypes . length ; i ++ ) { if ( actualTypes [ i ] == NULL . class ) continue ; if ( wrapper ( declaredTypes [ i ] ) . isAssignableFrom ( wrapper ( actualTypes [ i ] ) ) ) continue ; return false ; } return true ; } else { return false ; }
public class ImageModerationsImpl { /** * Fuzzily match an image against one of your custom Image Lists . You can create and manage your custom image lists using & lt ; a href = " / docs / services / 578ff44d2703741568569ab9 / operations / 578ff7b12703741568569abe " & gt ; this & lt ; / a & gt ; API . * Returns ID and tags of matching image . & lt ; br / & gt ; * & lt ; br / & gt ; * Note : Refresh Index must be run on the corresponding Image List before additions and removals are reflected in the response . * @ param contentType The content type . * @ param imageUrl The image url . * @ param listId The list Id . * @ param cacheImage Whether to retain the submitted image for future use ; defaults to false if omitted . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the MatchResponse object */ public Observable < ServiceResponse < MatchResponse > > matchUrlInputWithServiceResponseAsync ( String contentType , BodyModelModel imageUrl , String listId , Boolean cacheImage ) { } }
if ( this . client . baseUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.baseUrl() is required and cannot be null." ) ; } if ( contentType == null ) { throw new IllegalArgumentException ( "Parameter contentType is required and cannot be null." ) ; } if ( imageUrl == null ) { throw new IllegalArgumentException ( "Parameter imageUrl is required and cannot be null." ) ; } Validator . validate ( imageUrl ) ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{baseUrl}" , this . client . baseUrl ( ) ) ; return service . matchUrlInput ( listId , cacheImage , contentType , imageUrl , this . client . acceptLanguage ( ) , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < MatchResponse > > > ( ) { @ Override public Observable < ServiceResponse < MatchResponse > > call ( Response < ResponseBody > response ) { try { ServiceResponse < MatchResponse > clientResponse = matchUrlInputDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class AbstractCRUDRestService { /** * { @ inheritDoc } */ @ Override public boolean doDeleteObject ( Object object , final Wave wave ) { } }
LOGGER . trace ( "Delete Object." ) ; final Response deleteResponse = baseWebTarget ( ) . path ( "1" /* object . id ( ) */ ) . request ( MediaType . APPLICATION_XML ) . delete ( ) ; return deleteResponse . getStatusInfo ( ) == Status . OK ;
public class PreConditionException { /** * Validates that the value under test is a particular value . * This method ensures that < code > value = = condition < / code > . * @ param identifier The name of the object . * @ param condition The condition value . * @ param value The value to be tested . * @ throws PreConditionException if the condition is not met . */ public static void validateEqualTo ( long value , long condition , String identifier ) throws PreConditionException { } }
if ( value == condition ) { return ; } throw new PreConditionException ( identifier + " was not equal to " + condition + ". Was: " + value ) ;
public class Log4JPropertiesBuilder { /** * Make log4j2 configuration for a process to push all its logs to a log file . * < ul > * < li > the file ' s name will use the prefix defined in { @ link RootLoggerConfig # getProcessId ( ) # getLogFilenamePrefix ( ) } . < / li > * < li > the file will follow the rotation policy defined in property { @ link # ROLLING _ POLICY _ PROPERTY } and * the max number of files defined in property { @ link # MAX _ FILES _ PROPERTY } < / li > * < li > the logs will follow the specified log pattern < / li > * < / ul > * @ see # buildLogPattern ( RootLoggerConfig ) */ public void configureGlobalFileLog ( RootLoggerConfig config , File logDir , String logPattern ) { } }
String appenderRef = writeFileAppender ( config , logDir , logPattern ) ; putProperty ( ROOT_LOGGER_NAME + ".appenderRef." + appenderRef + ".ref" , appenderRef ) ;
public class Utils { /** * Deletes a directory recursively . * @ param dir Directory to delete . * @ return True if the delete was successful . */ public static boolean deleteDirectory ( final File dir ) { } }
boolean success = true ; if ( dir != null && dir . exists ( ) ) { try { if ( dir . isDirectory ( ) ) { for ( final File file : dir . listFiles ( ) ) { if ( file == null ) { return false ; } if ( ! deleteDirectory ( file ) ) { success = false ; return success ; } } } if ( ! dir . delete ( ) ) { success = false ; return success ; } return success ; } catch ( Exception e ) { // Failed to delete files or directory } } return false ;
public class ThreadPoolExecutor { /** * Performs blocking or timed wait for a task , depending on * current configuration settings , or returns null if this worker * must exit because of any of : * 1 . There are more than maximumPoolSize workers ( due to * a call to setMaximumPoolSize ) . * 2 . The pool is stopped . * 3 . The pool is shutdown and the queue is empty . * 4 . This worker timed out waiting for a task , and timed - out * workers are subject to termination ( that is , * { @ code allowCoreThreadTimeOut | | workerCount > corePoolSize } ) * both before and after the timed wait , and if the queue is * non - empty , this worker is not the last thread in the pool . * @ return task , or null if the worker must exit , in which case * workerCount is decremented */ private Runnable getTask ( ) { } }
boolean timedOut = false ; // Did the last poll ( ) time out ? for ( ; ; ) { int c = ctl . get ( ) ; int rs = runStateOf ( c ) ; // Check if queue empty only if necessary . if ( rs >= SHUTDOWN && ( rs >= STOP || workQueue . isEmpty ( ) ) ) { decrementWorkerCount ( ) ; return null ; } int wc = workerCountOf ( c ) ; // Are workers subject to culling ? boolean timed = allowCoreThreadTimeOut || wc > corePoolSize ; if ( ( wc > maximumPoolSize || ( timed && timedOut ) ) && ( wc > 1 || workQueue . isEmpty ( ) ) ) { if ( compareAndDecrementWorkerCount ( c ) ) return null ; continue ; } try { Runnable r = timed ? workQueue . poll ( keepAliveTime , TimeUnit . NANOSECONDS ) : workQueue . take ( ) ; if ( r != null ) return r ; timedOut = true ; } catch ( InterruptedException retry ) { timedOut = false ; } }
public class SpringBootLogbackReconfigure { /** * 指定logback的配置文件 , 必须在ApplicationContext启动前使用 , 并且会在spring - boot上下文启动后生效 * @ param location logback配置文件 */ public static void reconfigure ( String location ) { } }
System . setProperty ( LoggingSystem . SYSTEM_PROPERTY , SpringBootLogbackReconfigure . class . getName ( ) ) ; SpringBootLogbackReconfigure . xmlLocation = location ;
public class SmartObject { /** * This function takes string containing Javascript Object Notation ( JSON ) and * uses it to construct the corresponding smart object . This function can be used * for any non - parameterized type for example : * < pre > * Customer customer = SmartObject . fromString ( Customer . class , jsonString ) ; * < / pre > * @ param < T > The type of object being constructed . * @ param classType The concrete class type being constructed . * @ param json The JSON string . * @ return The corresponding object . * @ throws IOException The JSON string could not be parsed correctly . */ static public < T > T fromString ( Class < T > classType , String json ) throws IOException { } }
return safeMapper . readerFor ( classType ) . readValue ( json ) ;
public class NormalAlphabet { /** * { @ inheritDoc } */ @ Override public double [ ] getCuts ( Integer size ) throws SAXException { } }
switch ( size ) { case 2 : return case2 ; case 3 : return case3 ; case 4 : return case4 ; case 5 : return case5 ; case 6 : return case6 ; case 7 : return case7 ; case 8 : return case8 ; case 9 : return case9 ; case 10 : return case10 ; case 11 : return case11 ; case 12 : return case12 ; case 13 : return case13 ; case 14 : return case14 ; case 15 : return case15 ; case 16 : return case16 ; case 17 : return case17 ; case 18 : return case18 ; case 19 : return case19 ; case 20 : return case20 ; default : throw new SAXException ( "Invalid alphabet size." ) ; }
public class VelocityEnhancer { @ Override public final Class < ? extends T > enhanceClass ( Class < T > baseClass ) { } }
logger . info ( "Enhancing {}" , baseClass ) ; CtClass original = null ; try { original = pool . get ( baseClass . getName ( ) ) ; TemplateHelper templateHelper = new TemplateHelper ( pool ) ; VelocityContext velocityContext = new VelocityContext ( ) ; velocityContext . put ( "_" , templateHelper ) ; velocityContext . put ( "base" , original ) ; velocityContext . put ( "getters" , findGetters ( original ) ) ; velocityContext . put ( "abstractMethods" , findAbstractMethods ( original ) ) ; Map < String , Object > contextItems = getAdditionalContextItems ( ) ; for ( Map . Entry < String , Object > contextItem : contextItems . entrySet ( ) ) { velocityContext . put ( contextItem . getKey ( ) , contextItem . getValue ( ) ) ; } StringWriter writer = new StringWriter ( ) ; engine . getTemplate ( getTemplateLocation ( ) ) . merge ( velocityContext , writer ) ; logger . debug ( "Enhanced {} to form new class {} with source:\n{}" , baseClass . getSimpleName ( ) , templateHelper . clsName ( ) , writer ) ; return ClassLoadingUtil . toClass ( templateHelper . compile ( ) ) ; } catch ( Exception e ) { logger . error ( "An error occurred while enhancing {}" , baseClass ) ; throw ExceptionUtil . propagate ( e ) ; } finally { if ( original != null ) { original . detach ( ) ; } }
public class RESTReflect { /** * Finds a the rel of a method . The rel can be found on the declaring class , * but the rel on the method will have the precedence . * @ param method the method to scan * @ return the rel annotation */ static Rel findRel ( Method method ) { } }
Rel rootRel = method . getDeclaringClass ( ) . getAnnotation ( Rel . class ) ; Rel rel = method . getAnnotation ( Rel . class ) ; if ( rel != null ) { return rel ; } else if ( rootRel != null ) { return rootRel ; } return null ;
public class WhiteboxImpl { /** * Finds and returns a certain method . If the method couldn ' t be found this * method delegates to * @ param tested The instance or class containing the method . * @ param declaringClass The class where the method is supposed to be declared ( may be * { @ code null } ) . * @ param methodToExecute The method name . If { @ code null } then method will be * looked up based on the argument types only . * @ param arguments The arguments of the methods . * @ return A single method . * @ throws MethodNotFoundException if no method was found . * @ throws TooManyMethodsFoundException if too methods matched . * @ throws IllegalArgumentException if { @ code tested } is null . */ public static Method findMethodOrThrowException ( Object tested , Class < ? > declaringClass , String methodToExecute , Object [ ] arguments ) { } }
if ( tested == null ) { throw new IllegalArgumentException ( "The object to perform the operation on cannot be null." ) ; } /* * Get methods from the type if it ' s not mocked or from the super type * if the tested object is mocked . */ Class < ? > testedType = null ; if ( isClass ( tested ) ) { testedType = ( Class < ? > ) tested ; } else { testedType = tested . getClass ( ) ; } Method [ ] methods = null ; if ( declaringClass == null ) { methods = getAllMethods ( testedType ) ; } else { methods = declaringClass . getDeclaredMethods ( ) ; } Method potentialMethodToInvoke = null ; for ( Method method : methods ) { if ( methodToExecute == null || method . getName ( ) . equals ( methodToExecute ) ) { Class < ? > [ ] paramTypes = method . getParameterTypes ( ) ; if ( ( arguments != null && ( paramTypes . length == arguments . length ) ) ) { if ( paramTypes . length == 0 ) { potentialMethodToInvoke = method ; break ; } boolean methodFound = checkArgumentTypesMatchParameterTypes ( method . isVarArgs ( ) , paramTypes , arguments ) ; if ( methodFound ) { if ( potentialMethodToInvoke == null ) { potentialMethodToInvoke = method ; } else if ( potentialMethodToInvoke . getName ( ) . equals ( method . getName ( ) ) ) { if ( areAllArgumentsOfSameType ( arguments ) && potentialMethodToInvoke . getDeclaringClass ( ) != method . getDeclaringClass ( ) ) { // We ' ve already found the method which means that " potentialMethodToInvoke " overrides " method " . return potentialMethodToInvoke ; } else { // We ' ve found an overloaded method return getBestMethodCandidate ( getType ( tested ) , method . getName ( ) , getTypes ( arguments ) , false ) ; } } else { // A special case to be backward compatible Method bestCandidateMethod = getMethodWithMostSpecificParameterTypes ( method , potentialMethodToInvoke ) ; if ( bestCandidateMethod != null ) { potentialMethodToInvoke = bestCandidateMethod ; continue ; } /* * We ' ve already found a method match before , this * means that PowerMock cannot determine which * method to expect since there are two methods with * the same name and the same number of arguments * but one is using wrapper types . */ throwExceptionWhenMultipleMethodMatchesFound ( "argument parameter types" , new Method [ ] { potentialMethodToInvoke , method } ) ; } } } else if ( isPotentialVarArgsMethod ( method , arguments ) ) { if ( potentialMethodToInvoke == null ) { potentialMethodToInvoke = method ; } else { /* * We ' ve already found a method match before , this means * that PowerMock cannot determine which method to * expect since there are two methods with the same name * and the same number of arguments but one is using * wrapper types . */ throwExceptionWhenMultipleMethodMatchesFound ( "argument parameter types" , new Method [ ] { potentialMethodToInvoke , method } ) ; } break ; } else if ( arguments != null && ( paramTypes . length != arguments . length ) ) { continue ; } else if ( arguments == null && paramTypes . length == 1 && ! paramTypes [ 0 ] . isPrimitive ( ) ) { potentialMethodToInvoke = method ; } } } WhiteboxImpl . throwExceptionIfMethodWasNotFound ( getType ( tested ) , methodToExecute , potentialMethodToInvoke , arguments ) ; return potentialMethodToInvoke ;
public class LifecycleQueryNamespaceDefinitionsProposalResponse { /** * The names of chaincode that have been committed . * @ return The names of chaincode that have been committed . * @ throws ProposalException */ public Collection < String > getChaincodeNamespaceTypes ( ) throws ProposalException { } }
final Lifecycle . QueryNamespaceDefinitionsResult queryNamespaceDefinitionsResult = parsePayload ( ) ; if ( queryNamespaceDefinitionsResult == null ) { return Collections . emptySet ( ) ; } final Map < String , Lifecycle . QueryNamespaceDefinitionsResult . Namespace > namespacesMap = queryNamespaceDefinitionsResult . getNamespacesMap ( ) ; if ( null == namespacesMap ) { return Collections . emptySet ( ) ; } final Set < String > ret = new HashSet < > ( ) ; namespacesMap . forEach ( ( s , namespace ) -> { if ( "Chaincode" . equalsIgnoreCase ( namespace . getType ( ) ) ) { ret . add ( s ) ; } } ) ; return Collections . unmodifiableSet ( ret ) ;
public class CSSClassManager { /** * Retrieve a single class by name and owner * @ param name Class name * @ param owner Class owner * @ return existing ( old ) class * @ throws CSSNamingConflict if an owner was specified and doesn ' t match */ public CSSClass getClass ( String name , Object owner ) throws CSSNamingConflict { } }
CSSClass existing = store . get ( name ) ; // Not found . if ( existing == null ) { return null ; } // Different owner if ( owner != null && existing . getOwner ( ) != owner ) { throw new CSSNamingConflict ( "CSS class naming conflict between " + owner . toString ( ) + " and " + existing . getOwner ( ) . toString ( ) ) ; } return existing ;
public class Base64 { /** * Decodes four bytes from array < var > source < / var > and writes the resulting * bytes ( up to three of them ) to < var > destination < / var > . The source and * destination arrays can be manipulated anywhere along their length by * specifying < var > srcOffset < / var > and < var > destOffset < / var > . This method * does not check to make sure your arrays are large enough to accomodate * < var > srcOffset < / var > + 4 for the < var > source < / var > array or * < var > destOffset < / var > + 3 for the < var > destination < / var > array . This * method returns the actual number of bytes that were converted from the * Base64 encoding . * @ param source * the array to convert * @ param srcOffset * the index where conversion begins * @ param destination * the array to hold the conversion * @ param destOffset * the index where output will be put * @ return the number of decoded bytes converted * @ since 1.3 */ private static int decode4to3 ( byte [ ] source , int srcOffset , byte [ ] destination , int destOffset ) { } }
// Example : Dk = = if ( source [ srcOffset + 2 ] == EQUALS_SIGN ) { // Two ways to do the same thing . Don ' t know which way I like best . // int outBuff = ( ( DECODABET [ source [ srcOffset ] ] < < 24 ) > > > 6 // | ( ( DECODABET [ source [ srcOffset + 1 ] ] < < 24 ) > > > 12 ) ; int outBuff = ( ( DECODABET [ source [ srcOffset ] ] & 0xFF ) << 18 ) | ( ( DECODABET [ source [ srcOffset + 1 ] ] & 0xFF ) << 12 ) ; destination [ destOffset ] = ( byte ) ( outBuff >>> 16 ) ; return 1 ; } // Example : DkL = else if ( source [ srcOffset + 3 ] == EQUALS_SIGN ) { // Two ways to do the same thing . Don ' t know which way I like best . // int outBuff = ( ( DECODABET [ source [ srcOffset ] ] < < 24 ) > > > 6 // | ( ( DECODABET [ source [ srcOffset + 1 ] ] < < 24 ) > > > 12 ) // | ( ( DECODABET [ source [ srcOffset + 2 ] ] < < 24 ) > > > 18 ) ; int outBuff = ( ( DECODABET [ source [ srcOffset ] ] & 0xFF ) << 18 ) | ( ( DECODABET [ source [ srcOffset + 1 ] ] & 0xFF ) << 12 ) | ( ( DECODABET [ source [ srcOffset + 2 ] ] & 0xFF ) << 6 ) ; destination [ destOffset ] = ( byte ) ( outBuff >>> 16 ) ; destination [ destOffset + 1 ] = ( byte ) ( outBuff >>> 8 ) ; return 2 ; } // Example : DkLE else { try { // Two ways to do the same thing . Don ' t know which way I like // best . // int outBuff = ( ( DECODABET [ source [ srcOffset ] ] < < 24 ) // | ( ( DECODABET [ source [ srcOffset + 1 ] ] < < 24 ) > > > 12 ) // | ( ( DECODABET [ source [ srcOffset + 2 ] ] < < 24 ) > > > 18 ) // | ( ( DECODABET [ source [ srcOffset + 3 ] ] < < 24 ) > > > 24 ) ; int outBuff = ( ( DECODABET [ source [ srcOffset ] ] & 0xFF ) << 18 ) | ( ( DECODABET [ source [ srcOffset + 1 ] ] & 0xFF ) << 12 ) | ( ( DECODABET [ source [ srcOffset + 2 ] ] & 0xFF ) << 6 ) | ( ( DECODABET [ source [ srcOffset + 3 ] ] & 0xFF ) ) ; destination [ destOffset ] = ( byte ) ( outBuff >> 16 ) ; destination [ destOffset + 1 ] = ( byte ) ( outBuff >> 8 ) ; destination [ destOffset + 2 ] = ( byte ) ( outBuff ) ; return 3 ; } catch ( Exception e ) { System . out . println ( "" + source [ srcOffset ] + ": " + ( DECODABET [ source [ srcOffset ] ] ) ) ; System . out . println ( "" + source [ srcOffset + 1 ] + ": " + ( DECODABET [ source [ srcOffset + 1 ] ] ) ) ; System . out . println ( "" + source [ srcOffset + 2 ] + ": " + ( DECODABET [ source [ srcOffset + 2 ] ] ) ) ; System . out . println ( "" + source [ srcOffset + 3 ] + ": " + ( DECODABET [ source [ srcOffset + 3 ] ] ) ) ; return - 1 ; } // e nd catch }
public class CommercePriceListUtil { /** * Returns the last commerce price list in the ordered set where groupId = & # 63 ; and status = & # 63 ; . * @ param groupId the group ID * @ param status the status * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce price list , or < code > null < / code > if a matching commerce price list could not be found */ public static CommercePriceList fetchByG_S_Last ( long groupId , int status , OrderByComparator < CommercePriceList > orderByComparator ) { } }
return getPersistence ( ) . fetchByG_S_Last ( groupId , status , orderByComparator ) ;
public class BaseAuthenticationRequestRiskCalculator { /** * Calculate score authentication risk score . * @ param request the request * @ param authentication the authentication * @ param service the service * @ param events the events * @ return the authentication risk score */ protected BigDecimal calculateScore ( final HttpServletRequest request , final Authentication authentication , final RegisteredService service , final Collection < ? extends CasEvent > events ) { } }
return HIGHEST_RISK_SCORE ;
public class GlobalAddressClient { /** * Creates an address resource in the specified project using the data included in the request . * < p > Sample code : * < pre > < code > * try ( GlobalAddressClient globalAddressClient = GlobalAddressClient . create ( ) ) { * ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ; * Address addressResource = Address . newBuilder ( ) . build ( ) ; * Operation response = globalAddressClient . insertGlobalAddress ( project , addressResource ) ; * < / code > < / pre > * @ param project Project ID for this request . * @ param addressResource A reserved address resource . ( = = resource _ for beta . addresses = = ) ( = = * resource _ for v1 . addresses = = ) ( = = resource _ for beta . globalAddresses = = ) ( = = resource _ for * v1 . globalAddresses = = ) * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation insertGlobalAddress ( ProjectName project , Address addressResource ) { } }
InsertGlobalAddressHttpRequest request = InsertGlobalAddressHttpRequest . newBuilder ( ) . setProject ( project == null ? null : project . toString ( ) ) . setAddressResource ( addressResource ) . build ( ) ; return insertGlobalAddress ( request ) ;
public class DataTransferServiceClient { /** * Returns user facing log messages for the data transfer run . * < p > Sample code : * < pre > < code > * try ( DataTransferServiceClient dataTransferServiceClient = DataTransferServiceClient . create ( ) ) { * RunName parent = ProjectRunName . of ( " [ PROJECT ] " , " [ TRANSFER _ CONFIG ] " , " [ RUN ] " ) ; * for ( TransferMessage element : dataTransferServiceClient . listTransferLogs ( parent . toString ( ) ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param parent Transfer run name in the form : * ` projects / { project _ id } / transferConfigs / { config _ Id } / runs / { run _ id } ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final ListTransferLogsPagedResponse listTransferLogs ( String parent ) { } }
ListTransferLogsRequest request = ListTransferLogsRequest . newBuilder ( ) . setParent ( parent ) . build ( ) ; return listTransferLogs ( request ) ;
public class BatchKernelImpl { /** * Publish jms topic if batch jms event is available * @ param objectToPublish WSJobInstance * @ param eventToPublish */ private void publishEvent ( WSJobInstance objectToPublish , String eventToPublishTo , String correlationId ) { } }
if ( eventsPublisher != null ) { eventsPublisher . publishJobInstanceEvent ( objectToPublish , eventToPublishTo , correlationId ) ; }
public class Scope { /** * Exit the inner - most Scope , remove all Keys created since the matching * enter call except for the listed Keys . * @ return Returns the list of kept keys . */ static public Key [ ] exit ( Key ... keep ) { } }
List < Key > keylist = new ArrayList < > ( ) ; if ( keep != null ) for ( Key k : keep ) if ( k != null ) keylist . add ( k ) ; Object [ ] arrkeep = keylist . toArray ( ) ; Arrays . sort ( arrkeep ) ; Stack < HashSet < Key > > keys = _scope . get ( ) . _keys ; if ( keys . size ( ) > 0 ) { Futures fs = new Futures ( ) ; for ( Key key : keys . pop ( ) ) { int found = Arrays . binarySearch ( arrkeep , key ) ; if ( ( arrkeep . length == 0 || found < 0 ) && key != null ) Keyed . remove ( key , fs ) ; } fs . blockForPending ( ) ; } return keep ;
public class WordUtil { /** * Returns the given text with the first letter in upper case . * < h2 > Examples : < / h2 > * < pre > * capitalize ( " hi " ) = = " Hi " * capitalize ( " Hi " ) = = " Hi " * capitalize ( " hi there " ) = = " hi there " * capitalize ( " " ) = = " " * capitalize ( null ) = = null * < / pre > * @ param text the text to capitalize * @ return text with the first letter in upper case */ public static String capitalize ( String text ) { } }
if ( text == null || text . isEmpty ( ) ) { return text ; } return text . substring ( 0 , 1 ) . toUpperCase ( ) . concat ( text . substring ( 1 , text . length ( ) ) ) ;
public class MachinetagsApi { /** * Fetch recently used ( or created ) machine tags values . * < br > * This method does not require authentication . * @ param namespace ( Optional ) A namespace that all values should be restricted to . * @ param predicate ( Optional ) A predicate that all values should be restricted to . * @ param addedSince ( Optional ) Only return machine tags values that have been added since this timestamp , in epoch seconds . * @ param sign if true , the request will be signed . * @ return object containing a list of recently used or created machine tags values . * @ throws JinxException if there are any errors . * @ see < a href = " https : / / www . flickr . com / services / api / flickr . machinetags . getRecentValues . html " > flickr . machinetags . getRecentValues < / a > */ public Values getRecentValues ( String namespace , String predicate , String addedSince , boolean sign ) throws JinxException { } }
Map < String , String > params = new TreeMap < > ( ) ; params . put ( "method" , "flickr.machinetags.getRecentValues" ) ; if ( ! JinxUtils . isNullOrEmpty ( namespace ) ) { params . put ( "namespace" , namespace ) ; } if ( ! JinxUtils . isNullOrEmpty ( predicate ) ) { params . put ( "predicate" , predicate ) ; } if ( ! JinxUtils . isNullOrEmpty ( addedSince ) ) { params . put ( "added_since" , addedSince ) ; } // if ( perPage > 0 ) { // params . put ( " per _ page " , Integer . toString ( perPage ) ) ; // if ( page > 0 ) { // params . put ( " page " , Integer . toString ( page ) ) ; return jinx . flickrGet ( params , Values . class , sign ) ;
public class StyleUtilities { /** * Checks if the list of { @ link FeatureTypeStyleWrapper } s supplied contains one with the supplied name . * < p > If the rule is contained it adds an index to the name . * @ param ftsWrapperList the list of featureTypeStyles to check . * @ param ftsName the name of the featureTypeStyle to find . * @ return the new name of the featureTypeStyle . */ public static String checkSameNameFeatureTypeStyle ( List < FeatureTypeStyleWrapper > ftsWrapperList , String ftsName ) { } }
int index = 1 ; String name = ftsName . trim ( ) ; for ( int i = 0 ; i < ftsWrapperList . size ( ) ; i ++ ) { FeatureTypeStyleWrapper ftsWrapper = ftsWrapperList . get ( i ) ; String tmpName = ftsWrapper . getName ( ) ; if ( tmpName == null ) { continue ; } tmpName = tmpName . trim ( ) ; if ( tmpName . equals ( name ) ) { // name exists , change the name of the entering if ( name . endsWith ( ")" ) ) { name = name . trim ( ) . replaceFirst ( "\\([0-9]+\\)$" , "(" + ( index ++ ) + ")" ) ; } else { name = name + " (" + ( index ++ ) + ")" ; } // start again i = 0 ; } if ( index == 1000 ) { // something odd is going on throw new RuntimeException ( ) ; } } return name ;
public class JDBCUtil { /** * Calls < code > stmt . executeUpdate ( ) < / code > on the supplied statement , checking to see that it * returns the expected update count and throwing a persistence exception if it does not . */ public static void checkedUpdate ( PreparedStatement stmt , int expectedCount ) throws SQLException , PersistenceException { } }
int modified = stmt . executeUpdate ( ) ; if ( modified != expectedCount ) { String err = "Statement did not modify expected number of rows [stmt=" + stmt + ", expected=" + expectedCount + ", modified=" + modified + "]" ; throw new PersistenceException ( err ) ; }
public class Evaluation { /** * Assertion for the result . Depending on the CTX either " as " or " to " will have more sense . */ public CTX as ( @ Nonnull java . util . function . Consumer < RS > assertions ) { } }
return to ( assertions ) ;
public class MetaDataService { /** * create or replace metric * @ param metric metric * @ return is success */ public boolean createOrReplaceMetric ( Metric metric ) { } }
String metricName = metric . getName ( ) ; checkMetricIsEmpty ( metricName ) ; QueryPart < Metric > queryPart = new Query < Metric > ( "metrics" ) . path ( metricName , true ) ; return httpClientManager . updateMetaData ( queryPart , put ( metric ) ) ;
public class USCImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . USC__BYPSIDEN : return BYPSIDEN_EDEFAULT == null ? bypsiden != null : ! BYPSIDEN_EDEFAULT . equals ( bypsiden ) ; } return super . eIsSet ( featureID ) ;
public class ZooKeeperMasterModel { /** * Create a deployment group . * < p > If successful , the following ZK nodes will be created : * < ul > * < li > / config / deployment - groups / [ group - name ] < / li > * < li > / status / deployment - groups / [ group - name ] < / li > * < li > / status / deployment - groups / [ group - name ] / hosts < / li > * < / ul > * These nodes are guaranteed to exist until the DG is removed . * < p > If the operation fails no ZK nodes will be created . If any of the nodes above already exist * the operation will fail . * @ throws DeploymentGroupExistsException If a DG with the same name already exists . */ @ Override public void addDeploymentGroup ( final DeploymentGroup deploymentGroup ) throws DeploymentGroupExistsException { } }
log . info ( "adding deployment-group: {}" , deploymentGroup ) ; final ZooKeeperClient client = provider . get ( "addDeploymentGroup" ) ; try { try { client . ensurePath ( Paths . configDeploymentGroups ( ) ) ; client . ensurePath ( Paths . statusDeploymentGroups ( ) ) ; client . transaction ( create ( Paths . configDeploymentGroup ( deploymentGroup . getName ( ) ) , deploymentGroup ) , create ( Paths . statusDeploymentGroup ( deploymentGroup . getName ( ) ) ) , create ( Paths . statusDeploymentGroupHosts ( deploymentGroup . getName ( ) ) , Json . asBytesUnchecked ( emptyList ( ) ) ) , create ( Paths . statusDeploymentGroupRemovedHosts ( deploymentGroup . getName ( ) ) , Json . asBytesUnchecked ( emptyList ( ) ) ) ) ; } catch ( final NodeExistsException e ) { throw new DeploymentGroupExistsException ( deploymentGroup . getName ( ) ) ; } } catch ( final KeeperException e ) { throw new HeliosRuntimeException ( "adding deployment-group " + deploymentGroup + " failed" , e ) ; }
public class ProcessExecutor { /** * Sets the program and its arguments which are being executed . * @ param command A string array containing the program and its arguments . * @ return This process executor . */ public ProcessExecutor command ( String ... command ) { } }
builder . command ( fixArguments ( Arrays . asList ( command ) ) ) ; return this ;
public class ComponentImpl { /** * return element that has at least given access or null * @ param access * @ param name * @ return matching value */ protected Object get ( int access , String name , Object defaultValue ) { } }
return get ( access , KeyImpl . init ( name ) , defaultValue ) ;
public class RouteUtils { /** * Gets the list of Argument , i . e . formal parameter metadata for the given method . * @ param method the method * @ return the list of arguments */ public static List < ActionParameter > buildActionParameterList ( Method method ) { } }
List < ActionParameter > arguments = new ArrayList < > ( ) ; Annotation [ ] [ ] annotations = method . getParameterAnnotations ( ) ; Class < ? > [ ] typesOfParameters = method . getParameterTypes ( ) ; Type [ ] genericTypeOfParameters = method . getGenericParameterTypes ( ) ; for ( int i = 0 ; i < annotations . length ; i ++ ) { arguments . add ( ActionParameter . from ( method , annotations [ i ] , typesOfParameters [ i ] , genericTypeOfParameters [ i ] ) ) ; } return arguments ;
public class DSLMapLexer { /** * $ ANTLR start " WS " */ public final void mWS ( ) throws RecognitionException { } }
try { int _type = WS ; int _channel = DEFAULT_TOKEN_CHANNEL ; // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 274:9 : ( ( ' ' | ' \ \ t ' | ' \ \ f ' ) + ) // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 274:17 : ( ' ' | ' \ \ t ' | ' \ \ f ' ) + { // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 274:17 : ( ' ' | ' \ \ t ' | ' \ \ f ' ) + int cnt1 = 0 ; loop1 : while ( true ) { int alt1 = 2 ; int LA1_0 = input . LA ( 1 ) ; if ( ( LA1_0 == '\t' || LA1_0 == '\f' || LA1_0 == ' ' ) ) { alt1 = 1 ; } switch ( alt1 ) { case 1 : // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : { if ( input . LA ( 1 ) == '\t' || input . LA ( 1 ) == '\f' || input . LA ( 1 ) == ' ' ) { input . consume ( ) ; state . failed = false ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return ; } MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } } break ; default : if ( cnt1 >= 1 ) break loop1 ; if ( state . backtracking > 0 ) { state . failed = true ; return ; } EarlyExitException eee = new EarlyExitException ( 1 , input ) ; throw eee ; } cnt1 ++ ; } if ( state . backtracking == 0 ) { _channel = HIDDEN ; } } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class Select { /** * Foward pass : y [ i ] = x [ j ] , where j = ( i1 , i2 , . . . , i ( d - 1 ) , k , i ( d + 1 ) , . . . , i ( n ) ) */ @ Override public Tensor forward ( ) { } }
Tensor x = modIn . getOutput ( ) ; y = x . select ( dim , idx ) ; return y ;
public class StylesheetHandler { /** * Do common initialization . * @ param processor non - null reference to the transformer factory that owns this handler . */ void init ( TransformerFactoryImpl processor ) { } }
m_stylesheetProcessor = processor ; // Set the initial content handler . m_processors . push ( m_schema . getElementProcessor ( ) ) ; this . pushNewNamespaceSupport ( ) ; // m _ includeStack . push ( SystemIDResolver . getAbsoluteURI ( this . getBaseIdentifier ( ) , null ) ) ; // initXPath ( processor , null ) ;
public class NumberType { /** * Converter from a numeric object to Integer . Input is checked to be * within range represented by the given number type . */ static Integer convertToInt ( SessionInterface session , Object a , int type ) { } }
int value ; if ( a instanceof Integer ) { if ( type == Types . SQL_INTEGER ) { return ( Integer ) a ; } value = ( ( Integer ) a ) . intValue ( ) ; } else if ( a instanceof Long ) { long temp = ( ( Long ) a ) . longValue ( ) ; if ( Integer . MAX_VALUE < temp || temp < Integer . MIN_VALUE ) { throw Error . error ( ErrorCode . X_22003 ) ; } value = ( int ) temp ; } else if ( a instanceof BigDecimal ) { BigDecimal bd = ( ( BigDecimal ) a ) ; if ( bd . compareTo ( MAX_INT ) > 0 || bd . compareTo ( MIN_INT ) < 0 ) { throw Error . error ( ErrorCode . X_22003 ) ; } value = bd . intValue ( ) ; } else if ( a instanceof Double || a instanceof Float ) { double d = ( ( Number ) a ) . doubleValue ( ) ; if ( session instanceof Session ) { if ( ! ( ( Session ) session ) . database . sqlConvertTruncate ) { d = java . lang . Math . rint ( d ) ; } } if ( Double . isInfinite ( d ) || Double . isNaN ( d ) || d >= ( double ) Integer . MAX_VALUE + 1 || d <= ( double ) Integer . MIN_VALUE - 1 ) { throw Error . error ( ErrorCode . X_22003 ) ; } value = ( int ) d ; } else { throw Error . error ( ErrorCode . X_42561 ) ; } if ( type == Types . TINYINT ) { if ( Byte . MAX_VALUE < value || value < Byte . MIN_VALUE ) { throw Error . error ( ErrorCode . X_22003 ) ; } } else if ( type == Types . SQL_SMALLINT ) { if ( Short . MAX_VALUE < value || value < Short . MIN_VALUE ) { throw Error . error ( ErrorCode . X_22003 ) ; } } return Integer . valueOf ( value ) ;
public class DbPro { /** * Execute a batch of SQL INSERT , UPDATE , or DELETE queries . * < pre > * Example : * String sql = " insert into user ( name , cash ) values ( ? , ? ) " ; * int [ ] result = Db . use ( ) . batch ( sql , new Object [ ] [ ] { { " James " , 888 } , { " zhanjin " , 888 } } ) ; * < / pre > * @ param sql The SQL to execute . * @ param paras An array of query replacement parameters . Each row in this array is one set of batch replacement values . * @ return The number of rows updated per statement */ public int [ ] batch ( String sql , Object [ ] [ ] paras , int batchSize ) { } }
Connection conn = null ; Boolean autoCommit = null ; try { conn = config . getConnection ( ) ; autoCommit = conn . getAutoCommit ( ) ; conn . setAutoCommit ( false ) ; return batch ( config , conn , sql , paras , batchSize ) ; } catch ( Exception e ) { throw new ActiveRecordException ( e ) ; } finally { if ( autoCommit != null ) try { conn . setAutoCommit ( autoCommit ) ; } catch ( Exception e ) { LogKit . error ( e . getMessage ( ) , e ) ; } config . close ( conn ) ; }
public class StoreUtils { /** * Check if the current node is part of routing request based on cluster . xml * or throw an exception . * @ param key The key we are checking * @ param routingStrategy The routing strategy * @ param currentNode Current node */ public static void assertValidMetadata ( ByteArray key , RoutingStrategy routingStrategy , Node currentNode ) { } }
List < Node > nodes = routingStrategy . routeRequest ( key . get ( ) ) ; for ( Node node : nodes ) { if ( node . getId ( ) == currentNode . getId ( ) ) { return ; } } throw new InvalidMetadataException ( "Client accessing key belonging to partitions " + routingStrategy . getPartitionList ( key . get ( ) ) + " not present at " + currentNode ) ;
public class JobsInner { /** * Stop the job identified by jobId . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ param jobId The job id . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > stopAsync ( String resourceGroupName , String automationAccountName , UUID jobId , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( stopWithServiceResponseAsync ( resourceGroupName , automationAccountName , jobId ) , serviceCallback ) ;
public class XMLDatabase { /** * Reinitializes the instance variables for a new build . */ private void init ( ) { } }
masterLanguage = null ; additionalRootAttrs . clear ( ) ; // the collection is not modifiable and therefore cannot be cleared . additionalNamespaces = new ArrayList < > ( ) ; textNodeList . clear ( ) ; parseWarnings . clear ( ) ;
public class DescribeReplicationTasksResult { /** * A description of the replication tasks . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setReplicationTasks ( java . util . Collection ) } or { @ link # withReplicationTasks ( java . util . Collection ) } if you * want to override the existing values . * @ param replicationTasks * A description of the replication tasks . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeReplicationTasksResult withReplicationTasks ( ReplicationTask ... replicationTasks ) { } }
if ( this . replicationTasks == null ) { setReplicationTasks ( new java . util . ArrayList < ReplicationTask > ( replicationTasks . length ) ) ; } for ( ReplicationTask ele : replicationTasks ) { this . replicationTasks . add ( ele ) ; } return this ;
public class LastModifiedServlet { /** * Adds a last modified time ( to the nearest second ) to a URL if the resource is directly available * as a local resource . Only applies to relative URLs ( . / , . . / ) or URLs that begin with a slash ( / ) . * This implementation assume anchors ( # ) are always after the last question mark ( ? ) . */ public static String addLastModified ( ServletContext servletContext , HttpServletRequest request , String servletPath , String url , AddLastModifiedWhen when ) throws MalformedURLException { } }
// Never try to add if when = = falsee if ( when != AddLastModifiedWhen . FALSE ) { // Get the context - relative path ( resolves relative paths ) String resourcePath = ServletUtil . getAbsolutePath ( servletPath , url ) ; if ( resourcePath . startsWith ( "/" ) ) { // Strip parameters from resourcePath { int questionPos = resourcePath . lastIndexOf ( '?' ) ; resourcePath = questionPos == - 1 ? resourcePath : resourcePath . substring ( 0 , questionPos ) ; } String extension = FileUtils . getExtension ( resourcePath ) . toLowerCase ( Locale . ROOT ) ; final boolean doAdd ; if ( when == AddLastModifiedWhen . TRUE ) { // Always try to add doAdd = true ; } else { // Check for header disabling auto last modified if ( "false" . equalsIgnoreCase ( request . getHeader ( LAST_MODIFIED_HEADER_NAME ) ) ) { doAdd = false ; } else { // Conditionally try to add based on file extension doAdd = staticExtensions . contains ( extension ) ; } } if ( doAdd ) { long lastModified = getLastModified ( servletContext , request , resourcePath , extension ) ; if ( lastModified != 0 ) { int questionPos = url . lastIndexOf ( '?' ) ; int anchorStart = url . lastIndexOf ( '#' ) ; if ( anchorStart == - 1 ) { // No anchor url = url + ( questionPos == - 1 ? '?' : '&' ) + LAST_MODIFIED_PARAMETER_NAME + "=" + encodeLastModified ( lastModified ) ; } else { // With anchor url = url . substring ( 0 , anchorStart ) + ( questionPos == - 1 ? '?' : '&' ) + LAST_MODIFIED_PARAMETER_NAME + "=" + encodeLastModified ( lastModified ) + url . substring ( anchorStart ) ; } } } } } return url ;
public class PullerInternal { /** * in CBL _ Puller . m * - ( void ) changeTrackerReceivedSequence : ( id ) remoteSequenceID * docID : ( NSString * ) docID * revIDs : ( NSArray * ) revIDs * deleted : ( BOOL ) deleted */ protected void processChangeTrackerChange ( final Map < String , Object > change ) { } }
// Process each change from the feed : String docID = ( String ) change . get ( "id" ) ; if ( docID == null || ! Document . isValidDocumentId ( docID ) ) return ; String lastSequence = change . get ( "seq" ) . toString ( ) ; boolean deleted = ( change . containsKey ( "deleted" ) && change . get ( "deleted" ) . equals ( Boolean . TRUE ) ) ; List < Map < String , Object > > changes = ( List < Map < String , Object > > ) change . get ( "changes" ) ; for ( Map < String , Object > changeDict : changes ) { String revID = ( String ) changeDict . get ( "rev" ) ; if ( revID == null ) { continue ; } PulledRevision rev = new PulledRevision ( docID , revID , deleted ) ; // Remember its remote sequence ID ( opaque ) , and make up a numeric sequence // based on the order in which it appeared in the _ changes feed : rev . setRemoteSequenceID ( lastSequence ) ; if ( changes . size ( ) > 1 ) rev . setConflicted ( true ) ; Log . d ( TAG , "%s: adding rev to inbox %s" , this , rev ) ; Log . v ( TAG , "%s: changeTrackerReceivedChange() incrementing changesCount by 1" , this ) ; // this is purposefully done slightly different than the ios version addToChangesCount ( 1 ) ; addToInbox ( rev ) ; } pauseOrResume ( ) ;
public class PickleUtils { /** * read an arbitrary ' long ' number . Returns an int / long / BigInteger as appropriate to hold the number . */ public static Number decode_long ( byte [ ] data ) { } }
if ( data . length == 0 ) return 0L ; // first reverse the byte array because pickle stores it little - endian byte [ ] data2 = new byte [ data . length ] ; for ( int i = 0 ; i < data . length ; ++ i ) data2 [ data . length - i - 1 ] = data [ i ] ; BigInteger bigint = new BigInteger ( data2 ) ; return optimizeBigint ( bigint ) ;
public class CoordinateUtils { /** * Interpolates a z value ( linearly ) between the two coordinates . * @ param firstCoordinate * @ param lastCoordinate * @ param toBeInterpolated * @ return */ public static double interpolate ( Coordinate firstCoordinate , Coordinate lastCoordinate , Coordinate toBeInterpolated ) { } }
if ( Double . isNaN ( firstCoordinate . z ) ) { return Double . NaN ; } if ( Double . isNaN ( lastCoordinate . z ) ) { return Double . NaN ; } return firstCoordinate . z + ( lastCoordinate . z - firstCoordinate . z ) * firstCoordinate . distance ( toBeInterpolated ) / ( firstCoordinate . distance ( toBeInterpolated ) + toBeInterpolated . distance ( lastCoordinate ) ) ;