signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class PermissionUtil { /** * Check whether the provided { @ link net . dv8tion . jda . core . entities . Member Member } can use the specified { @ link net . dv8tion . jda . core . entities . Emote Emote } . * < p > If the specified Member is not in the emote ' s guild or the emote provided is fake this will return false . * Otherwise it will check if the emote is restricted to any roles and if that is the case if the Member has one of these . * < p > In the case of an { @ link net . dv8tion . jda . core . entities . Emote # isAnimated ( ) animated } Emote , this will * check if the issuer is the currently logged in account , and then check if the account has * { @ link net . dv8tion . jda . core . entities . SelfUser # isNitro ( ) nitro } , and return false if it doesn ' t . * < br > For other accounts , this method will not take into account whether the emote is animated , as there is * no real way to check if the Member can interact with them . * < br > < b > Note < / b > : This is not checking if the issuer owns the Guild or not . * @ param issuer * The member that tries to interact with the Emote * @ param emote * The emote that is the target interaction * @ throws IllegalArgumentException * if any of the provided parameters is { @ code null } * or the provided entities are not from the same guild * @ return True , if the issuer can interact with the emote */ public static boolean canInteract ( Member issuer , Emote emote ) { } }
Checks . notNull ( issuer , "Issuer Member" ) ; Checks . notNull ( emote , "Target Emote" ) ; if ( ! issuer . getGuild ( ) . equals ( emote . getGuild ( ) ) ) throw new IllegalArgumentException ( "The issuer and target are not in the same Guild" ) ; // We don ' t need to check based on the fact it is animated if it ' s a BOT account // because BOT accounts cannot have nitro , and have access to animated Emotes naturally . if ( emote . isAnimated ( ) && ! issuer . getUser ( ) . isBot ( ) ) { // This is a currently logged in client , meaning we can check if they have nitro or not . // If this isn ' t the currently logged in account , we just check it like a normal emote , // since there is no way to verify if they have nitro or not . if ( issuer . getUser ( ) instanceof SelfUser ) { // If they don ' t have nitro , we immediately return // false , otherwise we proceed with the remaining checks . if ( ! ( ( SelfUser ) issuer . getUser ( ) ) . isNitro ( ) ) return false ; } } return emote . canProvideRoles ( ) && ( emote . getRoles ( ) . isEmpty ( ) // Emote restricted to roles - > check if the issuer has them || CollectionUtils . containsAny ( issuer . getRoles ( ) , emote . getRoles ( ) ) ) ;
public class Set { /** * < div color = ' red ' style = " font - size : 24px ; color : red " > < b > < i > JCYPHER LANGUAGE ELEMENT < / i > < / b > < / div > * < div color = ' red ' style = " font - size : 18px ; color : red " > < i > set to a primitive value like a String or a Number < / i > < / div > * < br / > */ public < E > T to ( E value ) { } }
ModifyExpression mx = ( ModifyExpression ) this . astNode ; mx . setValue ( value ) ; return this . connector ;
public class KNXNetworkLinkIP { /** * { @ inheritDoc } When communicating with a KNX network which uses open medium , * messages are broadcasted within domain ( as opposite to system broadcast ) by * default . Specify < code > dst null < / code > for system broadcast . */ public void sendRequest ( KNXAddress dst , Priority p , byte [ ] nsdu ) throws KNXLinkClosedException , KNXTimeoutException { } }
send ( dst , p , nsdu , false ) ;
public class RtfTable { /** * Imports the rows and settings from the Table into this * RtfTable . * @ param table The source PdfPTable * @ since 2.1.3 */ private void importTable ( PdfPTable table ) { } }
this . rows = new ArrayList ( ) ; this . tableWidthPercent = table . getWidthPercentage ( ) ; // this . tableWidthPercent = table . getWidth ( ) ; this . proportionalWidths = table . getAbsoluteWidths ( ) ; // this . proportionalWidths = table . getProportionalWidths ( ) ; this . cellPadding = ( float ) ( table . spacingAfter ( ) * TWIPS_FACTOR ) ; // this . cellPadding = ( float ) ( table . getPadding ( ) * TWIPS _ FACTOR ) ; this . cellSpacing = ( float ) ( table . spacingAfter ( ) * TWIPS_FACTOR ) ; // this . cellSpacing = ( float ) ( table . getSpacing ( ) * TWIPS _ FACTOR ) ; // this . borders = new RtfBorderGroup ( this . document , RtfBorder . ROW _ BORDER , table . getBorder ( ) , table . getBorderWidth ( ) , table . getBorderColor ( ) ) ; // this . borders = new RtfBorderGroup ( this . document , RtfBorder . ROW _ BORDER , table . getBorder ( ) , table . getBorderWidth ( ) , table . getBorderColor ( ) ) ; this . alignment = table . getHorizontalAlignment ( ) ; // this . alignment = table . getAlignment ( ) ; int i = 0 ; Iterator rowIterator = table . getRows ( ) . iterator ( ) ; // Iterator rowIterator = table . iterator ( ) ; while ( rowIterator . hasNext ( ) ) { this . rows . add ( new RtfRow ( this . document , this , ( PdfPRow ) rowIterator . next ( ) , i ) ) ; i ++ ; } for ( i = 0 ; i < this . rows . size ( ) ; i ++ ) { ( ( RtfRow ) this . rows . get ( i ) ) . handleCellSpanning ( ) ; ( ( RtfRow ) this . rows . get ( i ) ) . cleanRow ( ) ; } this . headerRows = table . getHeaderRows ( ) ; // this . headerRows = table . getLastHeaderRow ( ) ; this . cellsFitToPage = table . getKeepTogether ( ) ; // this . cellsFitToPage = table . isCellsFitPage ( ) ; this . tableFitToPage = table . getKeepTogether ( ) ; // this . tableFitToPage = table . isTableFitsPage ( ) ; // if ( ! Float . isNaN ( table . getOffset ( ) ) ) { // this . offset = ( int ) ( table . getOffset ( ) * 2 ) ; // if ( ! Float . isNaN ( table . getOffset ( ) ) ) { // this . offset = ( int ) ( table . getOffset ( ) * 2 ) ;
public class HttpRpcPluginQuery { /** * Return the base route with no plugin prefix in it . This is matched with * values returned by { @ link HttpRpcPlugin # getPath ( ) } . * @ return the base route path ( no query parameters , etc . ) */ @ Override public String getQueryBaseRoute ( ) { } }
final String [ ] parts = explodePath ( ) ; if ( parts . length < 2 ) { // Must be at least something like : / plugin / blah throw new BadRequestException ( "Invalid plugin request path: " + getQueryPath ( ) ) ; } return parts [ 1 ] ;
public class ElasticSearchIndex { /** * Configure ElasticSearchIndex ' s ES client according to semantics introduced in * 0.5.1 . Allows greater flexibility than the previous config semantics . See * { @ link com . thinkaurelius . titan . diskstorage . es . ElasticSearchSetup } for more * information . * This is activated by setting an explicit value for { @ link # INTERFACE } in * the Titan configuration . * @ see # legacyConfiguration ( com . thinkaurelius . titan . diskstorage . configuration . Configuration ) * @ param config a config passed to ElasticSearchIndex ' s constructor * @ return a node and client object open and ready for use */ private ElasticSearchSetup . Connection interfaceConfiguration ( Configuration config ) { } }
ElasticSearchSetup clientMode = ConfigOption . getEnumValue ( config . get ( INTERFACE ) , ElasticSearchSetup . class ) ; try { return clientMode . connect ( config ) ; } catch ( IOException e ) { throw new TitanException ( e ) ; }
public class BitOutputStream { /** * Write . * @ param bits the bits * @ throws IOException the io exception */ public synchronized void write ( final Bits bits ) throws IOException { } }
Bits newRemainder = this . remainder . concatenate ( bits ) ; final int newRemainingBits = newRemainder . bitLength % 8 ; int bitsToWrite = newRemainder . bitLength - newRemainingBits ; if ( bitsToWrite > 0 ) { assert ( 0 == bitsToWrite % 8 ) ; final Bits toWrite = newRemainder . range ( 0 , bitsToWrite ) ; this . inner . write ( toWrite . getBytes ( ) ) ; newRemainder = newRemainder . range ( bitsToWrite ) ; } this . remainder = newRemainder ; this . totalBitsWritten += bits . bitLength ;
public class AddressDivisionGrouping { /** * Returns whether the values of this division grouping contain the prefix block for the given prefix length * @ param prefixLength * @ return */ @ Override public boolean containsPrefixBlock ( int prefixLength ) { } }
checkSubnet ( this , prefixLength ) ; int divisionCount = getDivisionCount ( ) ; int prevBitCount = 0 ; for ( int i = 0 ; i < divisionCount ; i ++ ) { AddressDivision division = getDivision ( i ) ; int bitCount = division . getBitCount ( ) ; int totalBitCount = bitCount + prevBitCount ; if ( prefixLength < totalBitCount ) { int divPrefixLen = Math . max ( 0 , prefixLength - prevBitCount ) ; if ( ! division . isPrefixBlock ( division . getDivisionValue ( ) , division . getUpperDivisionValue ( ) , divPrefixLen ) ) { return false ; } for ( ++ i ; i < divisionCount ; i ++ ) { division = getDivision ( i ) ; if ( ! division . isFullRange ( ) ) { return false ; } } return true ; } prevBitCount = totalBitCount ; } return true ;
public class SCharsStemmer { /** * Removes ending s of words longer than 3 chars */ @ Override public String stem ( String token ) { } }
if ( token != null && token . length ( ) > 3 && token . endsWith ( "s" ) ) { return token . substring ( 0 , token . length ( ) - 1 ) ; } return token ;
public class JCRSQLQueryBuilder { /** * Creates < code > LocationStepQueryNode < / code > s from a < code > path < / code > . * @ param path the path pattern * @ param operation the type of the parent node */ private void createPathQuery ( String path , int operation ) { } }
MergingPathQueryNode pathNode = new MergingPathQueryNode ( operation , factory . createPathQueryNode ( null ) . getValidJcrSystemNodeTypeNames ( ) ) ; pathNode . setAbsolute ( true ) ; if ( path . equals ( "/" ) ) { pathNode . addPathStep ( factory . createLocationStepQueryNode ( pathNode ) ) ; pathConstraints . add ( pathNode ) ; return ; } String [ ] names = path . split ( "/" ) ; for ( int i = 0 ; i < names . length ; i ++ ) { if ( names [ i ] . length ( ) == 0 ) { if ( i == 0 ) { // root pathNode . addPathStep ( factory . createLocationStepQueryNode ( pathNode ) ) ; } else { // descendant ' / / ' - > invalid path // todo throw or ignore ? // we currently do not throw and add location step for an // empty name ( which is basically the root node ) pathNode . addPathStep ( factory . createLocationStepQueryNode ( pathNode ) ) ; } } else { int idx = names [ i ] . indexOf ( '[' ) ; String name ; int index = LocationStepQueryNode . NONE ; if ( idx > - 1 ) { // contains index name = names [ i ] . substring ( 0 , idx ) ; String suffix = names [ i ] . substring ( idx ) ; String indexStr = suffix . substring ( 1 , suffix . length ( ) - 1 ) ; if ( indexStr . equals ( "%" ) ) { // select all same name siblings index = LocationStepQueryNode . NONE ; } else { try { index = Integer . parseInt ( indexStr ) ; } catch ( NumberFormatException e ) { log . warn ( "Unable to parse index for path element: " + names [ i ] ) ; } } if ( name . equals ( "%" ) ) { name = null ; } } else { // no index specified // - index defaults to 1 if there is an explicit name test // - index defaults to NONE if name test is % name = names [ i ] ; if ( name . equals ( "%" ) ) { name = null ; } else { index = 1 ; } } InternalQName qName = null ; if ( name != null ) { try { qName = resolver . parseJCRName ( name ) . getInternalName ( ) ; } catch ( NamespaceException e ) { throw new IllegalArgumentException ( "Illegal name: " + name , e ) ; } catch ( RepositoryException e ) { throw new IllegalArgumentException ( "Illegal name: " + name , e ) ; } } // if name test is % this means also search descendants boolean descendant = name == null ; LocationStepQueryNode step = factory . createLocationStepQueryNode ( pathNode ) ; step . setNameTest ( qName ) ; step . setIncludeDescendants ( descendant ) ; if ( index > 0 ) { step . setIndex ( index ) ; } pathNode . addPathStep ( step ) ; } } pathConstraints . add ( pathNode ) ;
public class XMLWikiPrinter { /** * Print the xml element . In the form { @ code < name att1 = " value1 " att2 = " value2 " / > } . * @ param name the xml element to print * @ param attributes the xml attributes of the element to print */ public void printXMLElement ( String name , String [ ] [ ] attributes ) { } }
Element element = new DefaultElement ( name ) ; if ( attributes != null && attributes . length > 0 ) { for ( String [ ] entry : attributes ) { element . addAttribute ( entry [ 0 ] , entry [ 1 ] ) ; } } try { this . xmlWriter . write ( element ) ; } catch ( IOException e ) { // TODO : add error log here }
public class SubItemUtil { /** * retrieves a flat list of the items in the adapter , respecting subitems regardless of there current visibility * @ param adapter the adapter instance * @ param predicate predicate against which each item will be checked before adding it to the result * @ return list of items in the adapter that apply to the predicate */ public static List < IItem > getAllItems ( final IItemAdapter adapter , IPredicate predicate ) { } }
return getAllItems ( adapter . getAdapterItems ( ) , true , false , predicate ) ;
public class ClassUtility { /** * Convert aStringUnderscored into A _ STRING _ UNDESCORED . * @ param camelCaseString the string to convert * @ return the underscored string */ public static String camelCaseToUnderscore ( final String camelCaseString ) { } }
final StringBuilder sb = new StringBuilder ( ) ; for ( final String camelPart : camelCaseString . split ( "(?<!(^|[A-Z]))(?=[A-Z])|(?<!^)(?=[A-Z][a-z])" ) ) { if ( sb . length ( ) > 0 ) { sb . append ( CASE_SEPARATOR ) ; } sb . append ( camelPart . toUpperCase ( Locale . getDefault ( ) ) ) ; } return sb . toString ( ) ;
public class ColumnText { /** * Finds the intersection between the < CODE > yLine < / CODE > and the two * column bounds . It will set the < CODE > lineStatus < / CODE > appropriately . * @ return a < CODE > float [ 2 ] < / CODE > with the x coordinates of the intersection */ protected float [ ] findLimitsOneLine ( ) { } }
float x1 = findLimitsPoint ( leftWall ) ; if ( lineStatus == LINE_STATUS_OFFLIMITS || lineStatus == LINE_STATUS_NOLINE ) return null ; float x2 = findLimitsPoint ( rightWall ) ; if ( lineStatus == LINE_STATUS_NOLINE ) return null ; return new float [ ] { x1 , x2 } ;
public class CmsGalleryController { /** * Gets the filtered list of categories . < p > * @ param filter the search string to use for filtering * @ return the filtered category beans */ private List < CmsCategoryBean > getFilteredCategories ( String filter ) { } }
List < CmsCategoryBean > result ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( filter ) ) { result = new ArrayList < CmsCategoryBean > ( ) ; for ( CmsCategoryBean category : getCategoryList ( ) ) { if ( category . matchesFilter ( filter ) ) { result . add ( category ) ; } } } else { result = getCategoryList ( ) ; } return result ;
public class OpenTracingPlugin { /** * Creates a new { @ link OpenTracingPlugin plugin } by < strong > combining < / strong > the { @ link SpanDecorator decorator ( s ) } of * { @ code this } plugin with the supplied ones . * @ param first first decorator * @ param decorators optional , remaining decorators * @ return a new { @ link OpenTracingPlugin } */ @ CheckReturnValue public OpenTracingPlugin withAdditionalSpanDecorators ( final SpanDecorator first , final SpanDecorator ... decorators ) { } }
return withSpanDecorators ( decorator , SpanDecorator . composite ( first , decorators ) ) ;
public class NodeModelBuilder { /** * Generate a { @ link NodeModel } for use in parsing , based on the selectors * and handlers attached to this builder . * This will also trigger a cascade < code > build ( ) < / code > calls on any attached * < code > ElementHandler < / code > instances . * @ return < code > NodeModel < / code > for parsing */ public final NodeModel < T > build ( ) { } }
for ( Map . Entry < NodeState < T > , List < ElementHandler < T > > > e : statesWithHandlers . entrySet ( ) ) { NodeState < T > state = e . getKey ( ) ; // Run a sub - builder rooted here for each handler // Inject into model NodeModel < T > subModel = new NodeModel < T > ( state ) ; NodeModelBuilder < T > subBuilder = new NodeModelBuilder < T > ( subModel ) ; for ( ElementHandler < T > handler : e . getValue ( ) ) { handler . build ( subBuilder ) ; } // A recursive call is necessary to pick up layers of nesting // beyond the first subBuilder . build ( ) ; } return model ;
public class InjectionPointFactory { /** * Build a InjectionPoint directly from the given ASTType * @ param astType required type * @ param context analysis context * @ return Injection Node */ public InjectionNode buildInjectionNode ( ASTType astType , AnalysisContext context ) { } }
return buildInjectionNode ( Collections . EMPTY_LIST , astType , astType , context ) ;
public class AWSMediaConnectClient { /** * Displays a list of all entitlements that have been granted to this account . This request returns 20 results per * page . * @ param listEntitlementsRequest * @ return Result of the ListEntitlements operation returned by the service . * @ throws ServiceUnavailableException * AWS Elemental MediaConnect is currently unavailable . Try again later . * @ throws TooManyRequestsException * You have exceeded the service request rate limit for your AWS Elemental MediaConnect account . * @ throws BadRequestException * The request that you submitted is not valid . * @ throws InternalServerErrorException * AWS Elemental MediaConnect can ' t fulfill your request because it encountered an unexpected condition . * @ sample AWSMediaConnect . ListEntitlements * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / mediaconnect - 2018-11-14 / ListEntitlements " target = " _ top " > AWS * API Documentation < / a > */ @ Override public ListEntitlementsResult listEntitlements ( ListEntitlementsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListEntitlements ( request ) ;
public class CommonModelFactory { /** * Resolve a { @ link String } against the string pool to obtain the canonical * instance of the string . * @ param value { @ link String } the string to obtain canonical reference for * @ return { @ link String } the canonical reference for the string */ public String resolveValue ( final String value ) { } }
if ( value == null ) { return null ; } final String sharedValue = valuePool . get ( value ) ; if ( sharedValue != null ) { return sharedValue ; } valuePool . put ( value , value ) ; return value ;
public class AuditUtils { /** * Creates an audit entry for the ' membership granted ' even . * @ param organizationId the organization id * @ param data the membership data * @ param securityContext the security context * @ return the audit entry */ public static AuditEntryBean membershipGranted ( String organizationId , MembershipData data , ISecurityContext securityContext ) { } }
AuditEntryBean entry = newEntry ( organizationId , AuditEntityType . Organization , securityContext ) ; entry . setEntityId ( null ) ; entry . setEntityVersion ( null ) ; entry . setWhat ( AuditEntryType . Grant ) ; entry . setData ( toJSON ( data ) ) ; return entry ;
public class FilterListSlowStr { /** * convert an address to a string array , where each level in the array * represent a substring between two periods ( " . " ) of the address . * The rightmost substring will be at index 0 , and so on . * @ param newAddress * the address to convert * @ return the String array representing the substrings of the address */ private String [ ] convertToArray ( String newAddress ) { } }
int start = 0 ; String sub ; List < String > addressWords = new ArrayList < String > ( ) ; String addr = newAddress ; while ( true ) { // fill address from back to front start = addr . lastIndexOf ( '.' ) ; if ( start != - 1 ) { sub = addr . substring ( start + 1 ) ; addressWords . add ( sub ) ; } else { addressWords . add ( addr ) ; break ; } addr = addr . substring ( 0 , start ) ; } String [ ] sa = new String [ addressWords . size ( ) ] ; sa = addressWords . toArray ( sa ) ; return ( sa ) ;
public class AbstractRuleImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case XtextPackage . ABSTRACT_RULE__NAME : return NAME_EDEFAULT == null ? name != null : ! NAME_EDEFAULT . equals ( name ) ; case XtextPackage . ABSTRACT_RULE__TYPE : return type != null ; case XtextPackage . ABSTRACT_RULE__ALTERNATIVES : return alternatives != null ; case XtextPackage . ABSTRACT_RULE__ANNOTATIONS : return annotations != null && ! annotations . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
public class ContentSpecUtilities { /** * Get the prefix to use for level container fixed urls . * @ param level The level to get the prefix for . * @ return The levels prefix to be used in a fixed url . */ public static String getLevelPrefix ( final Level level ) { } }
// Get the pre link string switch ( level . getLevelType ( ) ) { case APPENDIX : return "appe-" ; case SECTION : return "sect-" ; case PROCESS : return "proc-" ; case CHAPTER : return "chap-" ; case PART : return "part-" ; case PREFACE : return "pref-" ; default : return "" ; }
public class Waiter { /** * Waits for a text to be shown . * @ param classToFilterBy the class to filter by * @ param text the text that needs to be shown , specified as a regular expression . * @ param expectedMinimumNumberOfMatches the minimum number of matches of text that must be shown . { @ code 0 } means any number of matches * @ param timeout the amount of time in milliseconds to wait * @ param scroll { @ code true } if scrolling should be performed * @ param onlyVisible { @ code true } if only visible text views should be waited for * @ param hardStoppage { @ code true } if search is to be stopped when timeout expires * @ return { @ code true } if text is found and { @ code false } if it is not found before the timeout */ public < T extends TextView > T waitForText ( Class < T > classToFilterBy , String text , int expectedMinimumNumberOfMatches , long timeout , boolean scroll , boolean onlyVisible , boolean hardStoppage ) { } }
final long endTime = SystemClock . uptimeMillis ( ) + timeout ; while ( true ) { final boolean timedOut = SystemClock . uptimeMillis ( ) > endTime ; if ( timedOut ) { return null ; } sleeper . sleep ( ) ; if ( ! hardStoppage ) timeout = 0 ; final T textViewToReturn = searcher . searchFor ( classToFilterBy , text , expectedMinimumNumberOfMatches , timeout , scroll , onlyVisible ) ; if ( textViewToReturn != null ) { return textViewToReturn ; } }
public class GJChronology { /** * Factory method returns instances of the GJ cutover chronology . Any * cutover date may be specified . * @ param zone the time zone to use , null is default * @ param gregorianCutover the cutover to use , null means default * @ param minDaysInFirstWeek minimum number of days in first week of the year ; default is 4 */ public static GJChronology getInstance ( DateTimeZone zone , ReadableInstant gregorianCutover , int minDaysInFirstWeek ) { } }
zone = DateTimeUtils . getZone ( zone ) ; Instant cutoverInstant ; if ( gregorianCutover == null ) { cutoverInstant = DEFAULT_CUTOVER ; } else { cutoverInstant = gregorianCutover . toInstant ( ) ; LocalDate cutoverDate = new LocalDate ( cutoverInstant . getMillis ( ) , GregorianChronology . getInstance ( zone ) ) ; if ( cutoverDate . getYear ( ) <= 0 ) { throw new IllegalArgumentException ( "Cutover too early. Must be on or after 0001-01-01." ) ; } } GJCacheKey cacheKey = new GJCacheKey ( zone , cutoverInstant , minDaysInFirstWeek ) ; GJChronology chrono = cCache . get ( cacheKey ) ; if ( chrono == null ) { if ( zone == DateTimeZone . UTC ) { chrono = new GJChronology ( JulianChronology . getInstance ( zone , minDaysInFirstWeek ) , GregorianChronology . getInstance ( zone , minDaysInFirstWeek ) , cutoverInstant ) ; } else { chrono = getInstance ( DateTimeZone . UTC , cutoverInstant , minDaysInFirstWeek ) ; chrono = new GJChronology ( ZonedChronology . getInstance ( chrono , zone ) , chrono . iJulianChronology , chrono . iGregorianChronology , chrono . iCutoverInstant ) ; } GJChronology oldChrono = cCache . putIfAbsent ( cacheKey , chrono ) ; if ( oldChrono != null ) { chrono = oldChrono ; } } return chrono ;
public class Template { /** * Expand the template . * @ param variables containing the values for expansion . * @ return a fully qualified URI with the variables expanded . */ public String expand ( Map < String , ? > variables ) { } }
if ( variables == null ) { throw new IllegalArgumentException ( "variable map is required." ) ; } /* resolve all expressions within the template */ StringBuilder resolved = new StringBuilder ( ) ; for ( TemplateChunk chunk : this . templateChunks ) { if ( chunk instanceof Expression ) { String resolvedExpression = this . resolveExpression ( ( Expression ) chunk , variables ) ; if ( resolvedExpression != null ) { resolved . append ( resolvedExpression ) ; } } else { /* chunk is a literal value */ resolved . append ( chunk . getValue ( ) ) ; } } return resolved . toString ( ) ;
public class StringUtils { /** * Parses command line arguments into a Map . Arguments of the form * - flag1 arg1a arg1b . . . arg1m - flag2 - flag3 arg3a . . . arg3n * will be parsed so that the flag is a key in the Map ( including * the hyphen ) and its value will be a { @ link String } [ ] containing * the optional arguments ( if present ) . The non - flag values not * captured as flag arguments are collected into a String [ ] array * and returned as the value of < code > null < / code > in the Map . In * this invocation , the maximum number of arguments for each flag * can be specified as an { @ link Integer } value of the appropriate * flag key in the < code > flagsToNumArgs < / code > { @ link Map } * argument . ( By default , flags cannot take arguments . ) * Example of usage : * < code > * Map flagsToNumArgs = new HashMap ( ) ; * flagsToNumArgs . put ( " - x " , new Integer ( 2 ) ) ; * flagsToNumArgs . put ( " - d " , new Integer ( 1 ) ) ; * Map result = argsToMap ( args , flagsToNumArgs ) ; * < / code > * If a given flag appears more than once , the extra args are appended to * the String [ ] value for that flag . * @ param args the argument array to be parsed * @ param flagsToNumArgs a { @ link Map } of flag names to { @ link * Integer } values specifying the maximum number of * allowed arguments for that flag ( default 0 ) . * @ return a { @ link Map } of flag names to flag argument { @ link * String } arrays . */ public static Map < String , String [ ] > argsToMap ( String [ ] args , Map < String , Integer > flagsToNumArgs ) { } }
Map < String , String [ ] > result = new HashMap < String , String [ ] > ( ) ; List < String > remainingArgs = new ArrayList < String > ( ) ; for ( int i = 0 ; i < args . length ; i ++ ) { String key = args [ i ] ; if ( key . charAt ( 0 ) == '-' ) { // found a flag Integer maxFlagArgs = flagsToNumArgs . get ( key ) ; int max = maxFlagArgs == null ? 0 : maxFlagArgs . intValue ( ) ; List < String > flagArgs = new ArrayList < String > ( ) ; for ( int j = 0 ; j < max && i + 1 < args . length && args [ i + 1 ] . charAt ( 0 ) != '-' ; i ++ , j ++ ) { flagArgs . add ( args [ i + 1 ] ) ; } if ( result . containsKey ( key ) ) { // append the second specification into the args . String [ ] newFlagArg = new String [ result . get ( key ) . length + flagsToNumArgs . get ( key ) ] ; int oldNumArgs = result . get ( key ) . length ; System . arraycopy ( result . get ( key ) , 0 , newFlagArg , 0 , oldNumArgs ) ; for ( int j = 0 ; j < flagArgs . size ( ) ; j ++ ) { newFlagArg [ j + oldNumArgs ] = flagArgs . get ( j ) ; } result . put ( key , newFlagArg ) ; } else { result . put ( key , flagArgs . toArray ( new String [ flagArgs . size ( ) ] ) ) ; } } else { remainingArgs . add ( args [ i ] ) ; } } result . put ( null , remainingArgs . toArray ( new String [ remainingArgs . size ( ) ] ) ) ; return result ;
public class DateTimeUtil { /** * 获取季度已过的天数 * @ param date 时间 ( { @ link Date } ) * @ return 天数 , 如果date is null , 将返回null */ public static Integer getDayOfSeason ( Date date ) { } }
if ( date == null ) return null ; int day = 0 ; Date [ ] seasonDates = getSeasonDate ( date ) ; Calendar c = Calendar . getInstance ( ) ; c . setTime ( date ) ; int month = c . get ( Calendar . MONTH ) ; if ( month == Calendar . JANUARY || month == Calendar . APRIL || month == Calendar . JULY || month == Calendar . OCTOBER ) { // 季度第一个月 day = getDayOfMonth ( seasonDates [ 0 ] ) ; } else if ( month == Calendar . FEBRUARY || month == Calendar . MAY || month == Calendar . AUGUST || month == Calendar . NOVEMBER ) { // 季度第二个月 day = getDayOfMonth ( seasonDates [ 0 ] ) + getDayOfMonth ( seasonDates [ 1 ] ) ; } else if ( month == Calendar . MARCH || month == Calendar . JUNE || month == Calendar . SEPTEMBER || month == Calendar . DECEMBER ) { // 季度第三个月 day = getDayOfMonth ( seasonDates [ 0 ] ) + getDayOfMonth ( seasonDates [ 1 ] ) + getDayOfMonth ( seasonDates [ 2 ] ) ; } return day ;
public class Maestrano { /** * load Properties from a filePath , in the classPath or absolute * @ param filePath * @ return * @ throws MnoConfigurationException */ public static Properties loadProperties ( String filePath ) throws MnoConfigurationException { } }
Properties properties = new Properties ( ) ; InputStream input = getInputStreamFromClassPathOrFile ( filePath ) ; try { properties . load ( input ) ; } catch ( IOException e ) { throw new MnoConfigurationException ( "Could not load properties file: " + filePath , e ) ; } finally { IOUtils . closeQuietly ( input ) ; } return properties ;
public class IndexScanPlanNode { /** * added for reverse scan purpose only */ public boolean isPredicatesOptimizableForAggregate ( ) { } }
// for reverse scan , need to examine " added " predicates List < AbstractExpression > predicates = ExpressionUtil . uncombinePredicate ( m_predicate ) ; // if the size of predicates doesn ' t equal 1 , can ' t be our added artifact predicates if ( predicates . size ( ) != 1 ) { return false ; } // examin the possible " added " predicates : NOT NULL expr . AbstractExpression expr = predicates . get ( 0 ) ; if ( expr . getExpressionType ( ) != ExpressionType . OPERATOR_NOT ) { return false ; } if ( expr . getLeft ( ) . getExpressionType ( ) != ExpressionType . OPERATOR_IS_NULL ) { return false ; } // Not reverse scan . if ( m_lookupType != IndexLookupType . LT && m_lookupType != IndexLookupType . LTE ) { return false ; } return true ;
public class Smb2ReadResponse { /** * { @ inheritDoc } * @ see jcifs . internal . smb2 . ServerMessageBlock2 # readBytesWireFormat ( byte [ ] , int ) */ @ Override protected int readBytesWireFormat ( byte [ ] buffer , int bufferIndex ) throws SMBProtocolDecodingException { } }
int start = bufferIndex ; int structureSize = SMBUtil . readInt2 ( buffer , bufferIndex ) ; if ( structureSize == 9 ) { return this . readErrorResponse ( buffer , bufferIndex ) ; } else if ( structureSize != 17 ) { throw new SMBProtocolDecodingException ( "Expected structureSize = 17" ) ; } short dataOffset = buffer [ bufferIndex + 2 ] ; bufferIndex += 4 ; this . dataLength = SMBUtil . readInt4 ( buffer , bufferIndex ) ; bufferIndex += 4 ; this . dataRemaining = SMBUtil . readInt4 ( buffer , bufferIndex ) ; bufferIndex += 4 ; bufferIndex += 4 ; // Reserved2 int dataStart = getHeaderStart ( ) + dataOffset ; if ( this . dataLength + this . outputBufferOffset > this . outputBuffer . length ) { throw new SMBProtocolDecodingException ( "Buffer to small for read response" ) ; } System . arraycopy ( buffer , dataStart , this . outputBuffer , this . outputBufferOffset , this . dataLength ) ; bufferIndex = Math . max ( bufferIndex , dataStart + this . dataLength ) ; return bufferIndex - start ;
public class DefaultShardManagerBuilder { /** * Sets the range of shards the { @ link DefaultShardManager DefaultShardManager } should contain . * This is useful if you want to split your shards between multiple JVMs or servers . * < p > < b > This does not have any effect if the total shard count is set to { @ code - 1 } ( get recommended shards from discord ) . < / b > * @ param minShardId * The lowest shard id the DefaultShardManager should contain * @ param maxShardId * The highest shard id the DefaultShardManager should contain * @ throws IllegalArgumentException * If either minShardId is negative , maxShardId is lower than shardsTotal or * minShardId is lower than or equal to maxShardId * @ return The DefaultShardManagerBuilder instance . Useful for chaining . */ public DefaultShardManagerBuilder setShards ( final int minShardId , final int maxShardId ) { } }
Checks . notNegative ( minShardId , "minShardId" ) ; Checks . check ( maxShardId < this . shardsTotal , "maxShardId must be lower than shardsTotal" ) ; Checks . check ( minShardId <= maxShardId , "minShardId must be lower than or equal to maxShardId" ) ; List < Integer > shards = new ArrayList < > ( maxShardId - minShardId + 1 ) ; for ( int i = minShardId ; i <= maxShardId ; i ++ ) shards . add ( i ) ; this . shards = shards ; return this ;
public class CoinbaseAccountServiceRaw { /** * Authenticated resource that creates a payment button , page , or iFrame to accept Bitcoin on your * website . This can be used to accept Bitcoin for an individual item or to integrate with your * existing shopping cart solution . For example , you could create a new payment button for each * shopping cart on your website , setting the total and order number in the button at checkout . * @ param button A { @ code CoinbaseButton } containing the desired button configuration for Coinbase * to create . * @ return newly created { @ code CoinbaseButton } . * @ throws IOException * @ see < a * href = " https : / / coinbase . com / api / doc / 1.0 / buttons / create . html " > coinbase . com / api / doc / 1.0 / buttons / create . html < / a > */ public CoinbaseButton createCoinbaseButton ( CoinbaseButton button ) throws IOException { } }
final CoinbaseButton createdButton = coinbase . createButton ( button , exchange . getExchangeSpecification ( ) . getApiKey ( ) , signatureCreator , exchange . getNonceFactory ( ) ) ; return handleResponse ( createdButton ) ;
public class BatchListObjectAttributesResponseMarshaller { /** * Marshall the given parameter object . */ public void marshall ( BatchListObjectAttributesResponse batchListObjectAttributesResponse , ProtocolMarshaller protocolMarshaller ) { } }
if ( batchListObjectAttributesResponse == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( batchListObjectAttributesResponse . getAttributes ( ) , ATTRIBUTES_BINDING ) ; protocolMarshaller . marshall ( batchListObjectAttributesResponse . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class BeanInfoManager { /** * Returns the BeanInfoProperty for the specified property in the * given class , or null if not found . */ public static BeanInfoProperty getBeanInfoProperty ( Class pClass , String pPropertyName , Logger pLogger ) throws ELException { } }
return getBeanInfoManager ( pClass ) . getProperty ( pPropertyName , pLogger ) ;
public class ApiOvhCloud { /** * Get the detail of a group * REST : GET / cloud / project / { serviceName } / instance / group * @ param region [ required ] Instance region * @ param serviceName [ required ] Project id */ public ArrayList < OvhInstanceGroup > project_serviceName_instance_group_GET ( String serviceName , String region ) throws IOException { } }
String qPath = "/cloud/project/{serviceName}/instance/group" ; StringBuilder sb = path ( qPath , serviceName ) ; query ( sb , "region" , region ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t23 ) ;
public class DRL5Expressions { /** * src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 450:1 : shiftOp : ( LESS LESS | GREATER GREATER GREATER | GREATER GREATER ) ; */ public final void shiftOp ( ) throws RecognitionException { } }
try { // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 451:5 : ( ( LESS LESS | GREATER GREATER GREATER | GREATER GREATER ) ) // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 451:7 : ( LESS LESS | GREATER GREATER GREATER | GREATER GREATER ) { // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 451:7 : ( LESS LESS | GREATER GREATER GREATER | GREATER GREATER ) int alt46 = 3 ; int LA46_0 = input . LA ( 1 ) ; if ( ( LA46_0 == LESS ) ) { alt46 = 1 ; } else if ( ( LA46_0 == GREATER ) ) { int LA46_2 = input . LA ( 2 ) ; if ( ( LA46_2 == GREATER ) ) { int LA46_3 = input . LA ( 3 ) ; if ( ( LA46_3 == GREATER ) ) { alt46 = 2 ; } else if ( ( LA46_3 == EOF || LA46_3 == BOOL || ( LA46_3 >= DECIMAL && LA46_3 <= DECR ) || LA46_3 == FLOAT || LA46_3 == HEX || ( LA46_3 >= ID && LA46_3 <= INCR ) || ( LA46_3 >= LEFT_PAREN && LA46_3 <= LESS ) || LA46_3 == MINUS || LA46_3 == NEGATION || LA46_3 == NULL || LA46_3 == PLUS || ( LA46_3 >= STAR && LA46_3 <= TIME_INTERVAL ) ) ) { alt46 = 3 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return ; } int nvaeMark = input . mark ( ) ; try { for ( int nvaeConsume = 0 ; nvaeConsume < 3 - 1 ; nvaeConsume ++ ) { input . consume ( ) ; } NoViableAltException nvae = new NoViableAltException ( "" , 46 , 3 , input ) ; throw nvae ; } finally { input . rewind ( nvaeMark ) ; } } } else { if ( state . backtracking > 0 ) { state . failed = true ; return ; } int nvaeMark = input . mark ( ) ; try { input . consume ( ) ; NoViableAltException nvae = new NoViableAltException ( "" , 46 , 2 , input ) ; throw nvae ; } finally { input . rewind ( nvaeMark ) ; } } } else { if ( state . backtracking > 0 ) { state . failed = true ; return ; } NoViableAltException nvae = new NoViableAltException ( "" , 46 , 0 , input ) ; throw nvae ; } switch ( alt46 ) { case 1 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 451:9 : LESS LESS { match ( input , LESS , FOLLOW_LESS_in_shiftOp2100 ) ; if ( state . failed ) return ; match ( input , LESS , FOLLOW_LESS_in_shiftOp2102 ) ; if ( state . failed ) return ; } break ; case 2 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 452:11 : GREATER GREATER GREATER { match ( input , GREATER , FOLLOW_GREATER_in_shiftOp2114 ) ; if ( state . failed ) return ; match ( input , GREATER , FOLLOW_GREATER_in_shiftOp2116 ) ; if ( state . failed ) return ; match ( input , GREATER , FOLLOW_GREATER_in_shiftOp2118 ) ; if ( state . failed ) return ; } break ; case 3 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 453:11 : GREATER GREATER { match ( input , GREATER , FOLLOW_GREATER_in_shiftOp2130 ) ; if ( state . failed ) return ; match ( input , GREATER , FOLLOW_GREATER_in_shiftOp2132 ) ; if ( state . failed ) return ; } break ; } } } catch ( RecognitionException re ) { throw re ; } finally { // do for sure before leaving }
public class AccentResources { /** * Get a reference to a resource that is equivalent to the one requested , * but changing the tint from the original red to the given color . */ private InputStream getTintTransformationResourceStream ( int id , TypedValue value , int color ) { } }
Bitmap bitmap = getBitmapFromResource ( id , value ) ; bitmap = BitmapUtils . processTintTransformationMap ( bitmap , color ) ; return getStreamFromBitmap ( bitmap ) ;
public class ConcurrentServiceReferenceMap { /** * Associates the reference with the key . * @ param key Key associated with this reference * @ param reference ServiceReference for the target service * @ return true if this is replacing a previous ( non - null ) service reference */ public boolean putReference ( K key , ServiceReference < V > reference ) { } }
if ( key == null || reference == null ) return false ; ConcurrentServiceReferenceElement < V > element = new ConcurrentServiceReferenceElement < V > ( referenceName , reference ) ; return ( elementMap . put ( key , element ) != null ) ;
public class ClientFactory { /** * Creates a message sender asynchronously to the entity using the client settings . * @ param namespaceName namespace name of entity * @ param entityPath path of entity * @ param clientSettings client settings * @ return a CompletableFuture representing the pending creating of IMessageSender instance */ public static CompletableFuture < IMessageSender > createMessageSenderFromEntityPathAsync ( String namespaceName , String entityPath , ClientSettings clientSettings ) { } }
Utils . assertNonNull ( "namespaceName" , namespaceName ) ; Utils . assertNonNull ( "entityPath" , entityPath ) ; return createMessageSenderFromEntityPathAsync ( Util . convertNamespaceToEndPointURI ( namespaceName ) , entityPath , clientSettings ) ;
public class Slf4jLogger { /** * Log an info message . Calls SLF4J { @ link Logger # info ( String ) } . * @ param message The message to log . */ @ Override public void info ( String message ) { } }
if ( this . logger . isInfoEnabled ( ) ) { this . logger . info ( buildMessage ( message ) ) ; }
public class JsonWriter { /** * Write a list of entities ( feed ) to the JSON stream . * @ param entities The list of entities to fill in the JSON stream . * @ param contextUrl The ' Context URL ' to write . * @ param meta Additional metadata for the writer . * @ return the rendered feed . * @ throws ODataRenderException In case it is not possible to write to the JSON stream . */ public String writeFeed ( List < ? > entities , String contextUrl , Map < String , Object > meta ) throws ODataRenderException { } }
this . contextURL = checkNotNull ( contextUrl ) ; try { return writeJson ( entities , meta ) ; } catch ( IOException | IllegalAccessException | NoSuchFieldException | ODataEdmException | ODataRenderException e ) { LOG . error ( "Not possible to marshall feed stream JSON" ) ; throw new ODataRenderException ( "Not possible to marshall feed stream JSON: " , e ) ; }
public class DefaultClusterManager { /** * Selects a group in the cluster . */ private void selectGroup ( final Object key , final Handler < AsyncResult < String > > doneHandler ) { } }
context . execute ( new Action < String > ( ) { @ Override public String perform ( ) { String address = groupSelectors . get ( key ) ; if ( address != null ) { return address ; } Set < String > groups = DefaultClusterManager . this . groups . keySet ( ) ; int index = new Random ( ) . nextInt ( groups . size ( ) ) ; int i = 0 ; for ( String group : groups ) { if ( i == index ) { groupSelectors . put ( key , group ) ; return group ; } i ++ ; } return null ; } } , doneHandler ) ;
public class MultiVertexGeometryImpl { /** * Checked vs . Jan 11 , 2011 */ @ Override public void setAttribute ( int semantics , int offset , int ordinate , double value ) { } }
if ( offset < 0 || offset >= m_pointCount ) throw new IndexOutOfBoundsException ( ) ; int ncomps = VertexDescription . getComponentCount ( semantics ) ; if ( ordinate >= ncomps ) throw new IndexOutOfBoundsException ( ) ; addAttribute ( semantics ) ; _verifyAllStreams ( ) ; int attributeIndex = m_description . getAttributeIndex ( semantics ) ; notifyModified ( DirtyFlags . DirtyCoordinates ) ; m_vertexAttributes [ attributeIndex ] . writeAsDbl ( offset * ncomps + ordinate , value ) ;
public class NBAcceptChannelSelector { /** * @ see com . ibm . ws . tcpchannel . internal . ChannelSelector # performRequest ( ) */ @ Override protected boolean performRequest ( ) { } }
SocketChannel sc = null ; boolean closeOnError = false ; // If we were woken up because we have work to do , do it . Set < SelectionKey > keySet = this . selector . selectedKeys ( ) ; Iterator < SelectionKey > keyIterator = keySet . iterator ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "performRequest - processing " + keySet . size ( ) + " items" ) ; } while ( keyIterator . hasNext ( ) ) { closeOnError = false ; // If we get exceptions repeatedly , we may need to take a break for a number of reasons if ( numExceptions >= 3100 ) { // if we don ' t recover after 290 more seconds , wait for 10 minutes if ( pauseAccept ( ) ) { resetExceptions ( ) ; // clear exceptions if we weren ' t interrupted } continue ; } else if ( numExceptions >= 200 ) { // if we get exceptions repeatedly , wait a tenth to give // other threads some time . It should take about 10 seconds for // us to get up to 200 exceptions ( via trace analysis ) . if ( numExceptions == 1500 ) { FFDCFilter . processException ( new Exception ( "TCP channel has received 1500 exceptions in a row on the accept selector" ) , getClass ( ) . getName ( ) , "101" , this ) ; } try { Thread . sleep ( 100 ) ; } catch ( InterruptedException ie ) { // ignore it } } // PK40415 - change audit handling of errors SelectionKey key = keyIterator . next ( ) ; TCPPort endPoint = ( TCPPort ) key . attachment ( ) ; TCPChannel tcpChannel = endPoint . getTCPChannel ( ) ; try { // safe remove from set while iterating keyIterator . remove ( ) ; // perform accept ServerSocketChannel serverSocketChannel = ( ServerSocketChannel ) key . channel ( ) ; try { sc = serverSocketChannel . accept ( ) ; // Configure all inbound channels to be non - blocking if ( sc != null ) { sc . configureBlocking ( false ) ; } } catch ( IOException ioe ) { incrementExceptions ( ) ; this . numAcceptIOExceptions ++ ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( this , tc , "TCP Channel: " + tcpChannel . getExternalName ( ) + " caught IOException doing accept: " + ioe + " total=" + numExceptions + " count=" + numAcceptIOExceptions ) ; } // Since we could be monitoring several serverSocketChannels , we need // to continue processing even though we got an IOException on this // serverSocketChannel . continue ; } // sometimes the accept can return null . This seems to happen when // there is a network problem , and we got notified it was ready to // accept , but when we went to do the accept we couldn ' t complete the accept . If // this happens , just skip this and go on if ( sc == null ) { incrementExceptions ( ) ; this . numAcceptNulls ++ ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( this , tc , "TCP Channel: " + tcpChannel . getExternalName ( ) + " accept() returned null, total=" + numExceptions + " count=" + numAcceptNulls ) ; } continue ; } Socket socket = sc . socket ( ) ; if ( ! testSocket ( sc ) ) { continue ; } closeOnError = true ; if ( ! tcpChannel . verifyConnection ( socket ) ) { closeSocketChannel ( sc ) ; // if we made it this far , then the accept worked without exceptions if ( 0 < this . numExceptions ) { resetExceptions ( ) ; } continue ; } SocketIOChannel ioSocket = null ; try { ioSocket = tcpChannel . createInboundSocketIOChannel ( sc ) ; } catch ( IOException ioe ) { // no FFDC required incrementExceptions ( ) ; this . numConfigureIOExceptions ++ ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( this , tc , "IOException caught while configuring socket: " + ioe + ", total=" + numExceptions + " count=" + numConfigureIOExceptions ) ; } closeSocketChannel ( sc ) ; continue ; } endPoint . processNewConnection ( ioSocket ) ; } catch ( CancelledKeyException cke ) { // no FFDC required // Should only get this if the socket was closed . Therefore // log error and continue processing incrementExceptions ( ) ; this . numCancelledKeys ++ ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( this , tc , "Cancelled key exception found, cke=" + cke + " total=" + numExceptions + " count=" + numCancelledKeys ) ; } continue ; } catch ( Throwable t ) { // don ' t leak sockets if we can ' t proceed after the accept if ( closeOnError ) { closeSocketChannel ( sc ) ; } // the catcher of the RTE will do the FFDC throw new RuntimeException ( t ) ; } // if we made it this far , then the accept worked without exceptions if ( 0 < this . numExceptions ) { resetExceptions ( ) ; } } // end - while return false ;
public class ServiceExtensionLoader { /** * { @ inheritDoc } * @ see org . jboss . shrinkwrap . api . ExtensionLoader # getArchiveFormatFromExtensionMapping ( java . lang . Class ) */ public < T extends Archive < T > > ArchiveFormat getArchiveFormatFromExtensionMapping ( final Class < T > type ) { } }
ExtensionWrapper extensionWrapper = extensionMappings . get ( type ) ; if ( extensionWrapper == null ) { loadExtensionMapping ( type ) ; } extensionWrapper = extensionMappings . get ( type ) ; if ( extensionWrapper == null ) { throw UnknownExtensionTypeExceptionDelegator . newExceptionInstance ( type ) ; } String archiveFormat = extensionWrapper . getProperty ( "archiveFormat" ) ; return ArchiveFormat . valueOf ( archiveFormat ) ;
public class Request { /** * Creates a new Request configured to post a status update to a user ' s feed . * @ param session * the Session to use , or null ; if non - null , the session must be in an opened state * @ param message * the text of the status update * @ param place * an optional place to associate with the post * @ param tags * an optional list of users to tag in the post * @ param callback * a callback that will be called when the request is completed to handle success or error conditions * @ return a Request that is ready to execute */ public static Request newStatusUpdateRequest ( Session session , String message , GraphPlace place , List < GraphUser > tags , Callback callback ) { } }
List < String > tagIds = null ; if ( tags != null ) { tagIds = new ArrayList < String > ( tags . size ( ) ) ; for ( GraphUser tag : tags ) { tagIds . add ( tag . getId ( ) ) ; } } String placeId = place == null ? null : place . getId ( ) ; return newStatusUpdateRequest ( session , message , placeId , tagIds , callback ) ;
public class FutureStreamUtils { /** * Perform a forEach operation over the Stream without closing it , capturing any elements and errors in the supplied consumers , but only consuming * the specified number of elements from the Stream , at this time . More elements can be consumed later , by called request on the returned Subscription , * when the entire Stream has been processed an onComplete event will be recieved . * < pre > * { @ code * Subscription next = Streams . forEach ( Stream . of ( ( ) - > 1 , ( ) - > 2 , ( ) - > throw new RuntimeException ( ) , ( ) - > 4) * . map ( Supplier : : getValue ) , System . out : : println , e - > e . printStackTrace ( ) , ( ) - > System . out . println ( " the take ! " ) ) ; * System . out . println ( " First batch processed ! " ) ; * next . request ( 2 ) ; * System . out . println ( " Second batch processed ! " ) ; * / / prints * First batch processed ! * RuntimeException Stack Trace on System . err * Second batch processed ! * The take ! * < / pre > * @ param stream - the Stream to consume data from * @ param x To consume from the Stream at this time * @ param consumerElement To accept incoming elements from the Stream * @ param consumerError To accept incoming processing errors from the Stream * @ param onComplete To run after an onComplete event * @ return Subscription so that further processing can be continued or cancelled . */ public static < T , X extends Throwable > Tuple3 < CompletableFuture < Subscription > , Runnable , CompletableFuture < Boolean > > forEachXEvents ( final Stream < T > stream , final long x , final Consumer < ? super T > consumerElement , final Consumer < ? super Throwable > consumerError , final Runnable onComplete ) { } }
final CompletableFuture < Boolean > streamCompleted = new CompletableFuture < > ( ) ; final Subscription s = new Subscription ( ) { Iterator < T > it = stream . iterator ( ) ; volatile boolean running = true ; @ Override public void request ( final long n ) { for ( int i = 0 ; i < n && running ; i ++ ) { try { if ( it . hasNext ( ) ) { consumerElement . accept ( it . next ( ) ) ; } else { try { onComplete . run ( ) ; } finally { streamCompleted . complete ( true ) ; break ; } } } catch ( final Throwable t ) { consumerError . accept ( t ) ; } } } @ Override public void cancel ( ) { running = false ; } } ; final CompletableFuture < Subscription > subscription = CompletableFuture . completedFuture ( s ) ; return tuple ( subscription , ( ) -> { s . request ( x ) ; } , streamCompleted ) ;
public class FlowTypeCheck { /** * Check that a given variable declaration is not empty . That is , the declared * type is not equivalent to void . This is an important sanity check . * @ param d */ private void checkNonEmpty ( Decl . Variable d , LifetimeRelation lifetimes ) { } }
if ( relaxedSubtypeOperator . isVoid ( d . getType ( ) , lifetimes ) ) { syntaxError ( d . getType ( ) , EMPTY_TYPE ) ; }
public class Grid { /** * Retrieves the grid instance , as defined in the given configuration file . * @ param configFile The name of the configuration file containing the grid definition . * @ param properties A { @ link Properties Properties } object containing the grid ' s properties to be injected into placeholders * in the configuration file . * This parameter is helpful when you want to use the same xml configuration with different properties for different * instances . * @ return The grid instance . */ public static Grid getInstance ( String configFile , Properties properties ) throws InterruptedException { } }
return getInstance ( configFile == null ? null : new FileSystemResource ( configFile ) , ( Object ) properties ) ;
public class Introspector { /** * Method which attempts to instantiate a BeanInfo object of the supplied * classname * @ param theBeanInfoClassName - * the Class Name of the class of which the BeanInfo is an * instance * @ param classLoader * @ return A BeanInfo object which is an instance of the Class named * theBeanInfoClassName null if the Class does not exist or if there * are problems instantiating the instance */ private static BeanInfo loadBeanInfo ( String beanInfoClassName , Class < ? > beanClass ) throws Exception { } }
try { ClassLoader cl = beanClass . getClassLoader ( ) ; if ( cl != null ) { return ( BeanInfo ) Class . forName ( beanInfoClassName , true , beanClass . getClassLoader ( ) ) . newInstance ( ) ; } } catch ( Exception e ) { // fall through } try { return ( BeanInfo ) Class . forName ( beanInfoClassName , true , ClassLoader . getSystemClassLoader ( ) ) . newInstance ( ) ; } catch ( Exception e ) { // fall through } return ( BeanInfo ) Class . forName ( beanInfoClassName , true , Thread . currentThread ( ) . getContextClassLoader ( ) ) . newInstance ( ) ;
public class PyGenerator { /** * Generate the given object . * @ param event the event . * @ param context the context . */ protected void _generate ( SarlEvent event , IExtraLanguageGeneratorContext context ) { } }
final JvmDeclaredType jvmType = getJvmModelAssociations ( ) . getInferredType ( event ) ; final PyAppendable appendable = createAppendable ( jvmType , context ) ; final List < JvmTypeReference > superTypes ; if ( event . getExtends ( ) != null ) { superTypes = Collections . singletonList ( event . getExtends ( ) ) ; } else { superTypes = Collections . singletonList ( getTypeReferences ( ) . getTypeForName ( Event . class , event ) ) ; } if ( generateTypeDeclaration ( this . qualifiedNameProvider . getFullyQualifiedName ( event ) . toString ( ) , event . getName ( ) , event . isAbstract ( ) , superTypes , getTypeBuilder ( ) . getDocumentation ( event ) , true , event . getMembers ( ) , appendable , context , null ) ) { final QualifiedName name = getQualifiedNameProvider ( ) . getFullyQualifiedName ( event ) ; writeFile ( name , appendable , context ) ; }
public class LineReader { /** * Line and column starts at 1 */ public int positionFor ( int line , int column ) { } }
if ( lines . size ( ) < line ) return - 1 ; return lines . get ( line - 1 ) + column - 1 ;
public class DrizzleConnection { /** * Undoes all changes made after the given < code > Savepoint < / code > object was set . * This method should be used only when auto - commit has been disabled . * @ param savepoint the < code > Savepoint < / code > object to roll back to * @ throws java . sql . SQLException if a database access error occurs , this method is called while participating in a * distributed transaction , this method is called on a closed connection , the * < code > Savepoint < / code > object is no longer valid , or this < code > Connection < / code > * object is currently in auto - commit mode * @ throws java . sql . SQLFeatureNotSupportedException * if the JDBC driver does not support this method * @ see java . sql . Savepoint * @ see # rollback * @ since 1.4 */ public void rollback ( final Savepoint savepoint ) throws SQLException { } }
try { protocol . rollback ( savepoint . toString ( ) ) ; } catch ( QueryException e ) { throw SQLExceptionMapper . get ( e ) ; }
public class AbstractX509PrincipalResolver { /** * Get alternate principal if alternate attribute configured . * @ param certificate X509 Certificate of user * @ return principal using alternate attribute or null if none configured */ protected String getAlternatePrincipal ( final X509Certificate certificate ) { } }
if ( alternatePrincipalAttribute == null ) { return null ; } val attributes = extractPersonAttributes ( certificate ) ; val attribute = attributes . get ( alternatePrincipalAttribute ) ; if ( attribute == null ) { LOGGER . debug ( "Attempt to get alternate principal with attribute [{}] was unsuccessful." , alternatePrincipalAttribute ) ; return null ; } val optionalAttribute = CollectionUtils . firstElement ( attribute ) ; if ( optionalAttribute . isEmpty ( ) ) { LOGGER . debug ( "Alternate attribute list for [{}] was empty." , alternatePrincipalAttribute ) ; return null ; } val alternatePrincipal = optionalAttribute . get ( ) . toString ( ) ; if ( StringUtils . isNotEmpty ( alternatePrincipal ) ) { LOGGER . debug ( "Using alternate principal attribute [{}]" , alternatePrincipal ) ; return alternatePrincipal ; } LOGGER . debug ( "Returning null principal id..." ) ; return null ;
public class Page { /** * Checks the dates for consistency . */ private void checkDates ( ) { } }
DateTime created = this . dateCreated ; DateTime published = this . datePublished ; DateTime modified = this . dateModified ; DateTime reviewed = this . dateReviewed ; if ( created != null && published != null && published . compareTo ( created ) < 0 ) throw new IllegalArgumentException ( "published may not be before created" ) ; if ( created != null && modified != null && modified . compareTo ( created ) < 0 ) throw new IllegalArgumentException ( "modified may not be before created" ) ; if ( created != null && reviewed != null && reviewed . compareTo ( created ) < 0 ) throw new IllegalArgumentException ( "reviewed may not be before created" ) ;
public class InstanceGroupManagerClient { /** * Retrieves the list of managed instance groups and groups them by zone . * < p > Sample code : * < pre > < code > * try ( InstanceGroupManagerClient instanceGroupManagerClient = InstanceGroupManagerClient . create ( ) ) { * ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ; * for ( InstanceGroupManagersScopedList element : instanceGroupManagerClient . aggregatedListInstanceGroupManagers ( project . toString ( ) ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param project Project ID for this request . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final AggregatedListInstanceGroupManagersPagedResponse aggregatedListInstanceGroupManagers ( String project ) { } }
AggregatedListInstanceGroupManagersHttpRequest request = AggregatedListInstanceGroupManagersHttpRequest . newBuilder ( ) . setProject ( project ) . build ( ) ; return aggregatedListInstanceGroupManagers ( request ) ;
public class RandomDateUtils { /** * Returns a random { @ link LocalDateTime } that is after the given { @ link LocalDateTime } . * @ param after the value that returned { @ link LocalDateTime } must be after * @ return the random { @ link LocalDateTime } * @ throws IllegalArgumentException if after is null or if after is equal to or after { @ link * RandomDateUtils # MAX _ INSTANT } */ public static LocalDateTime randomLocalDateTimeAfter ( LocalDateTime after ) { } }
checkArgument ( after != null , "After must be non-null" ) ; Instant instant = randomInstantAfter ( after . toInstant ( UTC_OFFSET ) ) ; return LocalDateTime . ofInstant ( instant , UTC ) ;
public class TrainingsImpl { /** * Get images by id for a given project iteration . * This API will return a set of Images for the specified tags and optionally iteration . If no iteration is specified the * current workspace is used . * @ param projectId The project id * @ param imageIds The list of image ids to retrieve . Limited to 256 * @ param iterationId The iteration id . Defaults to workspace * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; Image & gt ; object */ public Observable < ServiceResponse < List < Image > > > getImagesByIdsWithServiceResponseAsync ( UUID projectId , List < String > imageIds , UUID iterationId ) { } }
if ( projectId == null ) { throw new IllegalArgumentException ( "Parameter projectId is required and cannot be null." ) ; } if ( this . client . apiKey ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiKey() is required and cannot be null." ) ; } Validator . validate ( imageIds ) ; String imageIdsConverted = this . client . serializerAdapter ( ) . serializeList ( imageIds , CollectionFormat . CSV ) ; return service . getImagesByIds ( projectId , imageIdsConverted , iterationId , this . client . apiKey ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < List < Image > > > > ( ) { @ Override public Observable < ServiceResponse < List < Image > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < List < Image > > clientResponse = getImagesByIdsDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class ApiOvhDomain { /** * Enable Dnssec * REST : POST / domain / zone / { zoneName } / dnssec * @ param zoneName [ required ] The internal name of your zone */ public void zone_zoneName_dnssec_POST ( String zoneName ) throws IOException { } }
String qPath = "/domain/zone/{zoneName}/dnssec" ; StringBuilder sb = path ( qPath , zoneName ) ; exec ( qPath , "POST" , sb . toString ( ) , null ) ;
public class JdbcExtractor { /** * If input query is null or ' * ' in the select list , consider all columns . * @ return true , to select all colums . else , false . */ private boolean isSelectAllColumns ( ) { } }
String columnProjection = this . getInputColumnProjection ( ) ; if ( columnProjection == null || columnProjection . trim ( ) . equals ( "*" ) || columnProjection . contains ( ".*" ) ) { return true ; } return false ;
public class DistributedAnalysisRunner { /** * Finds a source column which is appropriate for an ORDER BY clause in the * generated paginated queries * @ param jobBuilder * @ return */ private InputColumn < ? > findOrderByColumn ( final AnalysisJobBuilder jobBuilder ) { } }
final Table sourceTable = jobBuilder . getSourceTables ( ) . get ( 0 ) ; // preferred strategy : Use the primary key final List < Column > primaryKeys = sourceTable . getPrimaryKeys ( ) ; if ( primaryKeys . size ( ) == 1 ) { final Column primaryKey = primaryKeys . get ( 0 ) ; final InputColumn < ? > sourceColumn = jobBuilder . getSourceColumnByName ( primaryKey . getName ( ) ) ; if ( sourceColumn == null ) { jobBuilder . addSourceColumn ( primaryKey ) ; logger . info ( "Added PK source column for ORDER BY clause on slave jobs: {}" , sourceColumn ) ; return jobBuilder . getSourceColumnByName ( primaryKey . getName ( ) ) ; } else { logger . info ( "Using existing PK source column for ORDER BY clause on slave jobs: {}" , sourceColumn ) ; return sourceColumn ; } } else { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Found {} primary keys, cannot select a single for ORDER BY clause on slave jobs: {}" , primaryKeys . size ( ) , primaryKeys . size ( ) ) ; } } // secondary strategy : See if there ' s a source column called something // like ' ID ' or so , and use that . final List < MetaModelInputColumn > sourceColumns = jobBuilder . getSourceColumns ( ) ; final String tableName = sourceTable . getName ( ) . toLowerCase ( ) ; for ( final MetaModelInputColumn sourceColumn : sourceColumns ) { String name = sourceColumn . getName ( ) ; if ( name != null ) { name = StringUtils . replaceWhitespaces ( name , "" ) ; name = StringUtils . replaceAll ( name , "_" , "" ) ; name = StringUtils . replaceAll ( name , "-" , "" ) ; name = name . toLowerCase ( ) ; if ( "id" . equals ( name ) || ( tableName + "id" ) . equals ( name ) || ( tableName + "number" ) . equals ( name ) || ( tableName + "key" ) . equals ( name ) ) { logger . info ( "Using existing source column for ORDER BY clause on slave jobs: {}" , sourceColumn ) ; return sourceColumn ; } } } // last resort : Pick any source column and sort on that ( might not work // if the column contains a lot of repeated values ) final MetaModelInputColumn sourceColumn = sourceColumns . get ( 0 ) ; logger . warn ( "Couldn't pick a good source column for ORDER BY clause on slave jobs. Picking the first column: {}" , sourceColumn ) ; return sourceColumn ;
public class AmazonWebServiceRequest { /** * Sets the amount of time ( in milliseconds ) to allow the client to complete the execution of * an API call . This timeout covers the entire client execution except for marshalling . This * includes request handler execution , all HTTP request including retries , unmarshalling , etc . * This feature requires buffering the entire response ( for non - streaming APIs ) into memory to * enforce a hard timeout when reading the response . For APIs that return large responses this * could be expensive . * The client execution timeout feature doesn ' t have strict guarantees on how quickly a request * is aborted when the timeout is breached . The typical case aborts the request within a few * milliseconds but there may occasionally be requests that don ' t get aborted until several * seconds after the timer has been breached . Because of this the client execution timeout * feature should not be used when absolute precision is needed . * This may be used together with { @ link AmazonWebServiceRequest # setSdkRequestTimeout ( int ) } to * enforce both a timeout on each individual HTTP request ( i . e . each retry ) and the total time * spent on all requests across retries ( i . e . the ' client execution ' time ) . A non - positive value * disables this feature . * < b > Note : < / b > This feature is not compatible with Java 1.6. * @ param sdkClientExecutionTimeout * The amount of time ( in milliseconds ) to allow the client to complete the execution * of an API call . A non - positive value disables the timeout for this request . * @ return The updated AmazonWebServiceRequest object for method chaining * @ see { @ link AmazonWebServiceRequest # setSdkRequestTimeout ( int ) } to enforce a timeout per HTTP * request */ public < T extends AmazonWebServiceRequest > T withSdkClientExecutionTimeout ( int sdkClientExecutionTimeout ) { } }
setSdkClientExecutionTimeout ( sdkClientExecutionTimeout ) ; @ SuppressWarnings ( "unchecked" ) T t = ( T ) this ; return t ;
public class UpdateElasticsearchDomainConfigRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateElasticsearchDomainConfigRequest updateElasticsearchDomainConfigRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateElasticsearchDomainConfigRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateElasticsearchDomainConfigRequest . getDomainName ( ) , DOMAINNAME_BINDING ) ; protocolMarshaller . marshall ( updateElasticsearchDomainConfigRequest . getElasticsearchClusterConfig ( ) , ELASTICSEARCHCLUSTERCONFIG_BINDING ) ; protocolMarshaller . marshall ( updateElasticsearchDomainConfigRequest . getEBSOptions ( ) , EBSOPTIONS_BINDING ) ; protocolMarshaller . marshall ( updateElasticsearchDomainConfigRequest . getSnapshotOptions ( ) , SNAPSHOTOPTIONS_BINDING ) ; protocolMarshaller . marshall ( updateElasticsearchDomainConfigRequest . getVPCOptions ( ) , VPCOPTIONS_BINDING ) ; protocolMarshaller . marshall ( updateElasticsearchDomainConfigRequest . getCognitoOptions ( ) , COGNITOOPTIONS_BINDING ) ; protocolMarshaller . marshall ( updateElasticsearchDomainConfigRequest . getAdvancedOptions ( ) , ADVANCEDOPTIONS_BINDING ) ; protocolMarshaller . marshall ( updateElasticsearchDomainConfigRequest . getAccessPolicies ( ) , ACCESSPOLICIES_BINDING ) ; protocolMarshaller . marshall ( updateElasticsearchDomainConfigRequest . getLogPublishingOptions ( ) , LOGPUBLISHINGOPTIONS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Util { /** * Ensure a collection is unmodifiable , but cope with < code > null < / code > too */ static < T > Collection < T > freeze ( Collection < T > coll ) { } }
return coll == null ? null : Collections . unmodifiableCollection ( coll ) ;
public class CharOperation { /** * Answers true if a sub - pattern matches the subpart of the given name , false otherwise . char [ ] pattern matching , * accepting wild - cards ' * ' and ' ? ' . Can match only subset of name / pattern . end positions are non - inclusive . The * subpattern is defined by the patternStart and pattternEnd positions . When not case sensitive , the pattern is * assumed to already be lowercased , the name will be lowercased character per character as comparing . < br > * < br > * For example : * < ol > * < li > * < pre > * pattern = { ' ? ' , ' b ' , ' * ' } * patternStart = 1 * patternEnd = 3 * name = { ' a ' , ' b ' , ' c ' , ' d ' } * nameStart = 1 * nameEnd = 4 * isCaseSensitive = true * result = & gt ; true * < / pre > * < / li > * < li > * < pre > * pattern = { ' ? ' , ' b ' , ' * ' } * patternStart = 1 * patternEnd = 2 * name = { ' a ' , ' b ' , ' c ' , ' d ' } * nameStart = 1 * nameEnd = 2 * isCaseSensitive = true * result = & gt ; false * < / pre > * < / li > * < / ol > * @ param pattern * the given pattern * @ param patternStart * the given pattern start * @ param patternEnd * the given pattern end * @ param name * the given name * @ param nameStart * the given name start * @ param nameEnd * the given name end * @ param isCaseSensitive * flag to know if the matching should be case sensitive * @ return true if a sub - pattern matches the subpart of the given name , false otherwise */ public static final boolean match ( char [ ] pattern , int patternStart , int patternEnd , char [ ] name , int nameStart , int nameEnd , boolean isCaseSensitive ) { } }
if ( name == null ) { return false ; // null name cannot match } if ( pattern == null ) { return true ; // null pattern is equivalent to ' * ' } int iPattern = patternStart ; int iName = nameStart ; if ( patternEnd < 0 ) { patternEnd = pattern . length ; } if ( nameEnd < 0 ) { nameEnd = name . length ; } /* check first segment */ char patternChar = 0 ; while ( iPattern < patternEnd && ( patternChar = pattern [ iPattern ] ) != '*' ) { if ( iName == nameEnd ) { return false ; } if ( patternChar != ( isCaseSensitive ? name [ iName ] : Character . toLowerCase ( name [ iName ] ) ) && patternChar != '?' ) { return false ; } iName ++ ; iPattern ++ ; } /* check sequence of star + segment */ int segmentStart ; if ( patternChar == '*' ) { segmentStart = ++ iPattern ; // skip star } else { segmentStart = 0 ; // force iName check } int prefixStart = iName ; checkSegment : while ( iName < nameEnd ) { if ( iPattern == patternEnd ) { iPattern = segmentStart ; // mismatch - restart current // segment iName = ++ prefixStart ; continue checkSegment ; } /* segment is ending */ if ( ( patternChar = pattern [ iPattern ] ) == '*' ) { segmentStart = ++ iPattern ; // skip start if ( segmentStart == patternEnd ) { return true ; } prefixStart = iName ; continue checkSegment ; } /* check current name character */ if ( ( isCaseSensitive ? name [ iName ] : Character . toLowerCase ( name [ iName ] ) ) != patternChar && patternChar != '?' ) { iPattern = segmentStart ; // mismatch - restart current // segment iName = ++ prefixStart ; continue checkSegment ; } iName ++ ; iPattern ++ ; } return segmentStart == patternEnd || iName == nameEnd && iPattern == patternEnd || iPattern == patternEnd - 1 && pattern [ iPattern ] == '*' ;
public class CssScanner { /** * Builds a UNICODE _ RANGE token . */ private void _urange ( ) throws IOException , CssException { } }
if ( debug ) { checkArgument ( ( reader . curChar == 'U' || reader . curChar == 'u' ) && reader . peek ( ) == '+' ) ; } builder . type = Type . URANGE ; reader . next ( ) ; List < Integer > cbuf = Lists . newArrayList ( ) ; int count = 0 ; while ( true ) { Mark mark = reader . mark ( ) ; int ch = reader . next ( ) ; if ( ch == - 1 ) { reader . unread ( ch , mark ) ; break ; } if ( URANGECHAR . matches ( ( char ) ch ) ) { count = ch == '-' ? 0 : count + 1 ; if ( count == 7 ) { builder . error ( CssErrorCode . SCANNER_ILLEGAL_URANGE , reader , "U+" + toString ( cbuf ) + ( char ) ch ) ; } cbuf . add ( ch ) ; } else { reader . unread ( ch , mark ) ; break ; } } builder . append ( "U+" ) ; builder . append ( Ints . toArray ( cbuf ) ) ;
public class LeaderState { /** * Appends initial entries to the log to take leadership . */ private void appendInitialEntries ( ) { } }
final long term = context . getTerm ( ) ; // Append a no - op entry to reset session timeouts and commit entries from prior terms . try ( InitializeEntry entry = context . getLog ( ) . create ( InitializeEntry . class ) ) { entry . setTerm ( term ) . setTimestamp ( appender . time ( ) ) ; Assert . state ( context . getLog ( ) . append ( entry ) == appender . index ( ) , "Initialize entry not appended at the start of the leader's term" ) ; LOGGER . trace ( "{} - Appended {}" , context . getCluster ( ) . member ( ) . address ( ) , entry ) ; } // Append a configuration entry to propagate the leader ' s cluster configuration . configure ( context . getCluster ( ) . members ( ) ) ;
public class hqlLexer { /** * $ ANTLR start " THEN " */ public final void mTHEN ( ) throws RecognitionException { } }
try { int _type = THEN ; int _channel = DEFAULT_TOKEN_CHANNEL ; // hql . g : 64:6 : ( ' then ' ) // hql . g : 64:8 : ' then ' { match ( "then" ) ; if ( state . failed ) return ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class SQLPPMapping2DatalogConverter { /** * Returns a new function by renaming variables occurring in the { @ code function } * according to the { @ code attributes } lookup table */ private Function renameVariables ( Function function , ImmutableMap < QualifiedAttributeID , Term > attributes , QuotedIDFactory idfac ) throws AttributeNotFoundException { } }
List < Term > terms = function . getTerms ( ) ; List < Term > newTerms = new ArrayList < > ( terms . size ( ) ) ; for ( Term term : terms ) { Term newTerm ; if ( term instanceof Variable ) { Variable var = ( Variable ) term ; QuotedID attribute = idfac . createAttributeID ( var . getName ( ) ) ; newTerm = attributes . get ( new QualifiedAttributeID ( null , attribute ) ) ; if ( newTerm == null ) { QuotedID quotedAttribute = QuotedID . createIdFromDatabaseRecord ( idfac , var . getName ( ) ) ; newTerm = attributes . get ( new QualifiedAttributeID ( null , quotedAttribute ) ) ; if ( newTerm == null ) throw new AttributeNotFoundException ( "The source query does not provide the attribute " + attribute + " (variable " + var . getName ( ) + ") required by the target atom." ) ; } } else if ( term instanceof Function ) newTerm = renameVariables ( ( Function ) term , attributes , idfac ) ; else if ( term instanceof Constant ) newTerm = term . clone ( ) ; else throw new RuntimeException ( "Unknown term type: " + term ) ; newTerms . add ( newTerm ) ; } return termFactory . getFunction ( function . getFunctionSymbol ( ) , newTerms ) ;
public class GVRDirectLight { /** * Enables or disabled shadow casting for a direct light . * Enabling shadows attaches a GVRShadowMap component to the * GVRSceneObject which owns the light and provides the * component with an orthographic camera for shadow casting . * @ param enableFlag true to enable shadow casting , false to disable */ public void setCastShadow ( boolean enableFlag ) { } }
GVRSceneObject owner = getOwnerObject ( ) ; if ( owner != null ) { GVRShadowMap shadowMap = ( GVRShadowMap ) getComponent ( GVRRenderTarget . getComponentType ( ) ) ; if ( enableFlag ) { if ( shadowMap != null ) { shadowMap . setEnable ( true ) ; } else { GVRCamera shadowCam = GVRShadowMap . makeOrthoShadowCamera ( getGVRContext ( ) . getMainScene ( ) . getMainCameraRig ( ) . getCenterCamera ( ) ) ; shadowMap = new GVRShadowMap ( getGVRContext ( ) , shadowCam ) ; owner . attachComponent ( shadowMap ) ; } } else if ( shadowMap != null ) { shadowMap . setEnable ( false ) ; } } mCastShadow = enableFlag ;
public class AbstractCloseableRegistry { /** * Registers a { @ link Closeable } with the registry . In case the registry is already closed , this method throws an * { @ link IllegalStateException } and closes the passed { @ link Closeable } . * @ param closeable Closeable tor register * @ throws IOException exception when the registry was closed before */ public final void registerCloseable ( C closeable ) throws IOException { } }
if ( null == closeable ) { return ; } synchronized ( getSynchronizationLock ( ) ) { if ( ! closed ) { doRegister ( closeable , closeableToRef ) ; return ; } } IOUtils . closeQuietly ( closeable ) ; throw new IOException ( "Cannot register Closeable, registry is already closed. Closing argument." ) ;
public class KafkaExtractor { /** * Record the avg time per record for the current partition , then increment this . currentPartitionIdx , * and switch metric context to the new partition . */ private void moveToNextPartition ( ) { } }
if ( this . currentPartitionIdx == INITIAL_PARTITION_IDX ) { LOG . info ( "Pulling topic " + this . topicName ) ; this . currentPartitionIdx = 0 ; } else { updateStatisticsForCurrentPartition ( ) ; this . currentPartitionIdx ++ ; this . currentPartitionRecordCount = 0 ; this . currentPartitionTotalSize = 0 ; this . currentPartitionDecodeRecordTime = 0 ; this . currentPartitionFetchMessageBufferTime = 0 ; this . currentPartitionReadRecordTime = 0 ; this . currentPartitionLastSuccessfulRecord = null ; } this . messageIterator = null ; if ( this . currentPartitionIdx < this . partitions . size ( ) ) { LOG . info ( String . format ( "Pulling partition %s from offset %d to %d, range=%d" , this . getCurrentPartition ( ) , this . nextWatermark . get ( this . currentPartitionIdx ) , this . highWatermark . get ( this . currentPartitionIdx ) , this . highWatermark . get ( this . currentPartitionIdx ) - this . nextWatermark . get ( this . currentPartitionIdx ) ) ) ; switchMetricContextToCurrentPartition ( ) ; } if ( ! allPartitionsFinished ( ) ) { this . startFetchEpochTime . put ( this . getCurrentPartition ( ) , System . currentTimeMillis ( ) ) ; }
public class AgentManager { /** * Updates state of agent , if the call in a queue was redirect to the next * agent because the ringed agent doesn ' t answer the call . After reset * state , put the next agent in charge . * @ param channelCalling * @ param agent */ private void updateRingingAgents ( String channelCalling , AsteriskAgentImpl agent ) { } }
synchronized ( ringingAgents ) { if ( ringingAgents . containsKey ( channelCalling ) ) { updateAgentState ( ringingAgents . get ( channelCalling ) , AgentState . AGENT_IDLE ) ; } ringingAgents . put ( channelCalling , agent ) ; }
public class CmsGroupsOfUserTable { /** * Init method . < p > * @ param app the app instance * @ param cms CmsObject * @ param user CmsUser * @ param groups list of groups */ public void init ( CmsAccountsApp app , CmsObject cms , CmsUser user , List < CmsGroup > groups ) { } }
m_app = app ; if ( m_container == null ) { m_container = new IndexedContainer ( ) ; for ( TableProperty prop : TableProperty . values ( ) ) { m_container . addContainerProperty ( prop , prop . getType ( ) , prop . getDefault ( ) ) ; setColumnHeader ( prop , prop . getName ( ) ) ; } m_app . addGroupContainerProperties ( m_container ) ; setContainerDataSource ( m_container ) ; setItemIconPropertyId ( TableProperty . Icon ) ; setRowHeaderMode ( RowHeaderMode . ICON_ONLY ) ; setColumnWidth ( null , 40 ) ; setSelectable ( false ) ; setMultiSelect ( false ) ; setVisibleColumns ( TableProperty . Name , TableProperty . OU ) ; } m_container . removeAllItems ( ) ; for ( CmsGroup group : groups ) { Item item = m_container . addItem ( group ) ; m_app . fillGroupItem ( item , group , new ArrayList < CmsGroup > ( ) ) ; }
public class PowerIteration { /** * Returns the largest eigen pair of matrix with the power iteration * under the assumptions A has an eigenvalue that is strictly greater * in magnitude than its other eigenvalues and the starting * vector has a nonzero component in the direction of an eigenvector * associated with the dominant eigenvalue . * @ param A the matrix supporting matrix vector multiplication operation . * @ param v on input , it is the non - zero initial guess of the eigen vector . * On output , it is the eigen vector corresponding largest eigen value . * @ param tol the desired convergence tolerance . * @ param maxIter the maximum number of iterations in case that the algorithm * does not converge . * @ return the largest eigen value . */ public static double eigen ( Matrix A , double [ ] v , double tol , int maxIter ) { } }
return eigen ( A , v , 0.0 , tol , maxIter ) ;
public class TargetSslProxyClient { /** * Changes SslCertificates for TargetSslProxy . * < p > Sample code : * < pre > < code > * try ( TargetSslProxyClient targetSslProxyClient = TargetSslProxyClient . create ( ) ) { * ProjectGlobalTargetSslProxyName targetSslProxy = ProjectGlobalTargetSslProxyName . of ( " [ PROJECT ] " , " [ TARGET _ SSL _ PROXY ] " ) ; * TargetSslProxiesSetSslCertificatesRequest targetSslProxiesSetSslCertificatesRequestResource = TargetSslProxiesSetSslCertificatesRequest . newBuilder ( ) . build ( ) ; * Operation response = targetSslProxyClient . setSslCertificatesTargetSslProxy ( targetSslProxy . toString ( ) , targetSslProxiesSetSslCertificatesRequestResource ) ; * < / code > < / pre > * @ param targetSslProxy Name of the TargetSslProxy resource whose SslCertificate resource is to * be set . * @ param targetSslProxiesSetSslCertificatesRequestResource * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation setSslCertificatesTargetSslProxy ( String targetSslProxy , TargetSslProxiesSetSslCertificatesRequest targetSslProxiesSetSslCertificatesRequestResource ) { } }
SetSslCertificatesTargetSslProxyHttpRequest request = SetSslCertificatesTargetSslProxyHttpRequest . newBuilder ( ) . setTargetSslProxy ( targetSslProxy ) . setTargetSslProxiesSetSslCertificatesRequestResource ( targetSslProxiesSetSslCertificatesRequestResource ) . build ( ) ; return setSslCertificatesTargetSslProxy ( request ) ;
public class JsMessageHandleFactory { /** * Create the singleton Factory instance . */ private static void createFactoryInstance ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createFactoryInstance" ) ; try { Class cls = Class . forName ( JS_MESSAGE_HANDLE_FACTORY_CLASS ) ; instance = ( JsMessageHandleFactory ) cls . newInstance ( ) ; } catch ( Exception e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.mfp.JsMessageHandleFactory.createFactoryInstance" , "133" ) ; SibTr . error ( tc , "UNABLE_TO_CREATE_HANDLEFACTORY_CWSIF0031" , e ) ; NoClassDefFoundError ncdfe = new NoClassDefFoundError ( e . getMessage ( ) ) ; ncdfe . initCause ( e ) ; throw ncdfe ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createFactoryInstance" ) ;
public class DeleteOnExit { /** * Does the actual work . Note we ( a ) first sort the files lexicographically * and then ( b ) delete them in reverse order . This is to make sure files * get deleted before their parent directories . */ @ Override public void run ( ) { } }
Collections . sort ( files ) ; for ( int i = files . size ( ) - 1 ; i >= 0 ; i -- ) { new File ( files . get ( i ) ) . delete ( ) ; }
public class ProposalResponse { /** * Verifies that a Proposal response is properly signed . The payload is the * concatenation of the response payload byte string and the endorsement The * certificate ( public key ) is gotten from the Endorsement . Endorser . IdBytes * field * @ param crypto the CryptoPrimitives instance to be used for signing and * verification * @ return true / false depending on result of signature verification */ boolean verify ( CryptoSuite crypto ) { } }
logger . trace ( format ( "%s verifying transaction: %s endorsement." , peer , getTransactionID ( ) ) ) ; if ( hasBeenVerified ) { // check if this proposalResponse was already verified by client code logger . trace ( format ( "%s transaction: %s was already verified returned %b" , peer , getTransactionID ( ) , isVerified ) ) ; return this . isVerified ; } try { if ( isInvalid ( ) ) { this . isVerified = false ; logger . debug ( format ( "%s for transaction %s returned invalid. Setting verify to false" , peer , getTransactionID ( ) ) ) ; return false ; } FabricProposalResponse . Endorsement endorsement = this . proposalResponse . getEndorsement ( ) ; ByteString sig = endorsement . getSignature ( ) ; byte [ ] endorserCertifcate = null ; byte [ ] signature = null ; byte [ ] data = null ; try { Identities . SerializedIdentity endorser = Identities . SerializedIdentity . parseFrom ( endorsement . getEndorser ( ) ) ; ByteString plainText = proposalResponse . getPayload ( ) . concat ( endorsement . getEndorser ( ) ) ; if ( config . extraLogLevel ( 10 ) ) { if ( null != diagnosticFileDumper ) { StringBuilder sb = new StringBuilder ( 10000 ) ; sb . append ( "payload TransactionBuilderbytes in hex: " + DatatypeConverter . printHexBinary ( proposalResponse . getPayload ( ) . toByteArray ( ) ) ) ; sb . append ( "\n" ) ; sb . append ( "endorser bytes in hex: " + DatatypeConverter . printHexBinary ( endorsement . getEndorser ( ) . toByteArray ( ) ) ) ; sb . append ( "\n" ) ; sb . append ( "plainText bytes in hex: " + DatatypeConverter . printHexBinary ( plainText . toByteArray ( ) ) ) ; logger . trace ( "payload TransactionBuilderbytes: " + diagnosticFileDumper . createDiagnosticFile ( sb . toString ( ) ) ) ; } } if ( sig == null || sig . isEmpty ( ) ) { // we shouldn ' t get here . . . logger . warn ( format ( "%s %s returned signature is empty verify set to false." , peer , getTransactionID ( ) ) ) ; this . isVerified = false ; } else { endorserCertifcate = endorser . getIdBytes ( ) . toByteArray ( ) ; signature = sig . toByteArray ( ) ; data = plainText . toByteArray ( ) ; this . isVerified = crypto . verify ( endorserCertifcate , config . getSignatureAlgorithm ( ) , signature , data ) ; if ( ! this . isVerified ) { logger . warn ( format ( "%s transaction: %s verify: Failed to verify. Endorsers certificate: %s, " + "signature: %s, signing algorithm: %s, signed data: %s." , peer , getTransactionID ( ) , toHexString ( endorserCertifcate ) , toHexString ( signature ) , config . getSignatureAlgorithm ( ) , toHexString ( data ) ) ) ; } } } catch ( InvalidProtocolBufferException | CryptoException e ) { logger . error ( format ( "%s transaction: %s verify: Failed to verify. Endorsers certificate: %s, " + "signature: %s, signing algorithm: %s, signed data: %s." , peer , getTransactionID ( ) , toHexString ( endorserCertifcate ) , toHexString ( signature ) , config . getSignatureAlgorithm ( ) , toHexString ( data ) ) , e ) ; logger . error ( format ( "%s transaction: %s verify: Cannot retrieve peer identity from ProposalResponse. Error is: %s" , peer , getTransactionID ( ) , e . getMessage ( ) ) , e ) ; this . isVerified = false ; } logger . debug ( format ( "%s finished verify for transaction %s returning %b" , peer , getTransactionID ( ) , this . isVerified ) ) ; return this . isVerified ; } finally { hasBeenVerified = true ; }
public class ArgsParser { private static String formatValue ( String value ) { } }
if ( value == null ) return "" ; value = value . trim ( ) ; if ( value . startsWith ( "\"" ) || value . startsWith ( "'" ) ) value = value . substring ( 1 ) ; if ( value . endsWith ( "\"" ) || value . endsWith ( "'" ) ) value = value . substring ( 0 , value . length ( ) - 1 ) ; return value ;
public class JobGetOptions { /** * Set the time the request was issued . Client libraries typically set this to the current system clock time ; set it explicitly if you are calling the REST API directly . * @ param ocpDate the ocpDate value to set * @ return the JobGetOptions object itself . */ public JobGetOptions withOcpDate ( DateTime ocpDate ) { } }
if ( ocpDate == null ) { this . ocpDate = null ; } else { this . ocpDate = new DateTimeRfc1123 ( ocpDate ) ; } return this ;
public class MemoryFS { @ Override public void move ( EightyPath source , EightyPath target ) { } }
MemoryFS targetFS = ( MemoryFS ) ( target . _getFileSystem ( ) ) . get80 ( ) ; // todo different filestore ? getStore ( source ) . getData ( ) . move ( source , targetFS . getStore ( target ) . getData ( ) , target ) ; // todo tested ? // watcher . signal ( source , ENTRY _ DELETE ) ; // targetFS . watcher . signal ( target , ENTRY _ CREATE ) ;
public class sslpolicylabel_binding { /** * Use this API to fetch sslpolicylabel _ binding resource of given name . */ public static sslpolicylabel_binding get ( nitro_service service , String labelname ) throws Exception { } }
sslpolicylabel_binding obj = new sslpolicylabel_binding ( ) ; obj . set_labelname ( labelname ) ; sslpolicylabel_binding response = ( sslpolicylabel_binding ) obj . get_resource ( service ) ; return response ;
public class AbilityUtils { /** * Fetches the user who caused the update . * @ param update a Telegram { @ link Update } * @ return the originating user * @ throws IllegalStateException if the user could not be found */ public static User getUser ( Update update ) { } }
if ( MESSAGE . test ( update ) ) { return update . getMessage ( ) . getFrom ( ) ; } else if ( CALLBACK_QUERY . test ( update ) ) { return update . getCallbackQuery ( ) . getFrom ( ) ; } else if ( INLINE_QUERY . test ( update ) ) { return update . getInlineQuery ( ) . getFrom ( ) ; } else if ( CHANNEL_POST . test ( update ) ) { return update . getChannelPost ( ) . getFrom ( ) ; } else if ( EDITED_CHANNEL_POST . test ( update ) ) { return update . getEditedChannelPost ( ) . getFrom ( ) ; } else if ( EDITED_MESSAGE . test ( update ) ) { return update . getEditedMessage ( ) . getFrom ( ) ; } else if ( CHOSEN_INLINE_QUERY . test ( update ) ) { return update . getChosenInlineQuery ( ) . getFrom ( ) ; } else { throw new IllegalStateException ( "Could not retrieve originating user from update" ) ; }
public class CmsSolrDocumentXmlContent { /** * Extracts the content of a single XML content resource . < p > * @ param cms the cms context * @ param resource the resource * @ param index the used index * @ param forceLocale if set , only the content values for the given locale will be extracted * @ return the extraction result * @ throws CmsException in case reading or unmarshalling the content fails */ public static CmsExtractionResult extractXmlContent ( CmsObject cms , CmsResource resource , I_CmsSearchIndex index , Locale forceLocale ) throws CmsException { } }
// un - marshal the content CmsFile file = cms . readFile ( resource ) ; if ( file . getLength ( ) <= 0 ) { throw new CmsIndexNoContentException ( Messages . get ( ) . container ( Messages . ERR_NO_CONTENT_1 , resource . getRootPath ( ) ) ) ; } A_CmsXmlDocument xmlContent = CmsXmlContentFactory . unmarshal ( cms , file ) ; // initialize some variables Map < Locale , LinkedHashMap < String , String > > items = new HashMap < Locale , LinkedHashMap < String , String > > ( ) ; Map < String , String > fieldMappings = new HashMap < String , String > ( ) ; List < Locale > contentLocales = forceLocale != null ? Collections . singletonList ( forceLocale ) : xmlContent . getLocales ( ) ; Locale resourceLocale = index . getLocaleForResource ( cms , resource , contentLocales ) ; LinkedHashMap < String , String > localeItems = null ; GalleryNameChooser galleryNameChooser = null ; // loop over the locales of the content for ( Locale locale : contentLocales ) { galleryNameChooser = new GalleryNameChooser ( cms , xmlContent , locale ) ; localeItems = new LinkedHashMap < String , String > ( ) ; StringBuffer textContent = new StringBuffer ( ) ; // store the locales of the content as space separated field // loop over the available element paths of the current content locale List < String > paths = xmlContent . getNames ( locale ) ; for ( String xpath : paths ) { // try to get the value extraction for the current element path String extracted = null ; I_CmsXmlContentValue value = xmlContent . getValue ( xpath , locale ) ; try { // the new DatePointField . createField dose not support milliseconds if ( value instanceof CmsXmlDateTimeValue ) { extracted = CmsSearchUtil . getDateAsIso8601 ( ( ( CmsXmlDateTimeValue ) value ) . getDateTimeValue ( ) ) ; } else { extracted = value . getPlainText ( cms ) ; if ( CmsStringUtil . isEmptyOrWhitespaceOnly ( extracted ) && value . isSimpleType ( ) && ! ( value instanceof CmsXmlHtmlValue ) ) { // no text value for simple type , so take the string value as item // prevent this for elements of type " OpenCmsHtml " , since this causes problematic values // being indexed , e . g . , < iframe . . . > < / iframe > // TODO : Why is this special handling needed at all ? ? ? extracted = value . getStringValue ( cms ) ; } } } catch ( Exception e ) { // it can happen that a exception is thrown while extracting a single value LOG . warn ( Messages . get ( ) . container ( Messages . LOG_EXTRACT_VALUE_2 , xpath , resource ) , e ) ; } // put the extraction to the items and to the textual content if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( extracted ) ) { localeItems . put ( xpath , extracted ) ; } if ( value . getContentDefinition ( ) . getContentHandler ( ) . isSearchable ( value ) && CmsStringUtil . isNotEmptyOrWhitespaceOnly ( extracted ) ) { // value is search - able and the extraction is not empty , so added to the textual content textContent . append ( extracted ) ; textContent . append ( '\n' ) ; } List < String > mappings = xmlContent . getHandler ( ) . getMappings ( value . getPath ( ) ) ; if ( mappings . size ( ) > 0 ) { // mappings are defined , lets check if we have mappings that interest us for ( String mapping : mappings ) { if ( mapping . startsWith ( I_CmsXmlContentHandler . MAPTO_PROPERTY ) ) { // this is a property mapping String propertyName = mapping . substring ( mapping . lastIndexOf ( ':' ) + 1 ) ; if ( CmsPropertyDefinition . PROPERTY_TITLE . equals ( propertyName ) || CmsPropertyDefinition . PROPERTY_DESCRIPTION . equals ( propertyName ) ) { if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( extracted ) ) { if ( CmsPropertyDefinition . PROPERTY_TITLE . equals ( propertyName ) ) { galleryNameChooser . setMappedTitleValue ( extracted ) ; } else { // if field is not title , it must be description galleryNameChooser . setMappedDescriptionValue ( extracted ) ; } } } } else if ( mapping . equals ( MAPPING_GALLERY_NAME ) ) { galleryNameChooser . setMappedGalleryNameValue ( value . getPlainText ( cms ) ) ; } else if ( mapping . equals ( MAPPING_GALLERY_DESCRIPTION ) ) { galleryNameChooser . setMappedGalleryDescriptionValue ( value . getPlainText ( cms ) ) ; } } } if ( value instanceof CmsXmlSerialDateValue ) { if ( ( null != extracted ) && ! extracted . isEmpty ( ) ) { I_CmsSerialDateValue serialDateValue = new CmsSerialDateValue ( extracted ) ; I_CmsSerialDateBean serialDateBean = CmsSerialDateBeanFactory . createSerialDateBean ( serialDateValue ) ; if ( null != serialDateBean ) { StringBuffer values = new StringBuffer ( ) ; StringBuffer endValues = new StringBuffer ( ) ; StringBuffer currentTillValues = new StringBuffer ( ) ; for ( Long eventDate : serialDateBean . getDatesAsLong ( ) ) { values . append ( "\n" ) . append ( eventDate . toString ( ) ) ; long endDate = null != serialDateBean . getEventDuration ( ) ? eventDate . longValue ( ) + serialDateBean . getEventDuration ( ) . longValue ( ) : eventDate . longValue ( ) ; endValues . append ( "\n" ) . append ( Long . toString ( endDate ) ) ; // Special treatment for events that end at 00:00: // To not show them at the day after they ended , one millisecond is removed from the end time // for the " currenttill " - time currentTillValues . append ( "\n" ) . append ( serialDateValue . isCurrentTillEnd ( ) ? Long . valueOf ( serialDateValue . endsAtMidNight ( ) && ( endDate > eventDate . longValue ( ) ) ? endDate - 1L : endDate ) : eventDate ) ; } fieldMappings . put ( CmsSearchField . FIELD_SERIESDATES , values . substring ( 1 ) ) ; fieldMappings . put ( CmsSearchField . FIELD_SERIESDATES_END , endValues . substring ( 1 ) ) ; fieldMappings . put ( CmsSearchField . FIELD_SERIESDATES_CURRENT_TILL , currentTillValues . substring ( 1 ) ) ; fieldMappings . put ( CmsSearchField . FIELD_SERIESDATES_TYPE , serialDateValue . getDateType ( ) . toString ( ) ) ; } else { LOG . warn ( "Serial date value \"" + value . getStringValue ( cms ) + "\" at element \"" + value . getPath ( ) + "\" is invalid. No dates are indexed for resource \"" + resource . getRootPath ( ) + "\"." ) ; } } } } Set < String > xpaths = Sets . newHashSet ( ) ; collectSchemaXpathsForSimpleValues ( cms , xmlContent . getContentDefinition ( ) , "" , xpaths ) ; for ( String xpath : xpaths ) { // mappings always are stored with indexes , so we add them to the xpath List < String > mappings = xmlContent . getHandler ( ) . getMappings ( CmsXmlUtils . createXpath ( xpath , 1 ) ) ; for ( String mapping : mappings ) { if ( mapping . equals ( MAPPING_GALLERY_NAME ) || mapping . equals ( I_CmsXmlContentHandler . MAPTO_PROPERTY + CmsPropertyDefinition . PROPERTY_TITLE ) ) { String defaultValue = xmlContent . getHandler ( ) . getDefault ( cms , xmlContent . getFile ( ) , null , xpath , locale ) ; if ( mapping . equals ( MAPPING_GALLERY_NAME ) ) { galleryNameChooser . setDefaultGalleryNameValue ( defaultValue ) ; } else { galleryNameChooser . setDefaultTitleValue ( defaultValue ) ; } } } } final String galleryTitleFieldKey = CmsSearchFieldConfiguration . getLocaleExtendedName ( CmsSearchField . FIELD_TITLE_UNSTORED , locale ) + "_s" ; final String galleryNameValue = galleryNameChooser . getGalleryName ( ) ; fieldMappings . put ( galleryTitleFieldKey , galleryNameValue ) ; fieldMappings . put ( CmsSearchFieldConfiguration . getLocaleExtendedName ( CmsSearchField . FIELD_DESCRIPTION , locale ) + "_s" , galleryNameChooser . getDescription ( ) ) ; // handle the textual content if ( textContent . length ( ) > 0 ) { // add the textual content with a localized key to the items // String key = CmsSearchFieldConfiguration . getLocaleExtendedName ( CmsSearchField . FIELD _ CONTENT , locale ) ; // items . put ( key , textContent . toString ( ) ) ; // use the default locale of this resource as general text content for the extraction result localeItems . put ( I_CmsExtractionResult . ITEM_CONTENT , textContent . toString ( ) ) ; } items . put ( locale , localeItems ) ; } // if the content is locale independent , it should have only one content locale , but that should be indexed for all available locales . // TODO : One could think of different indexing behavior , i . e . , index only for getDefaultLocales ( cms , resource ) // But using getAvailableLocales ( cms , resource ) does not work , because locale - available is set to " en " for all that content . if ( ( xmlContent instanceof CmsXmlContent ) && ( ( CmsXmlContent ) xmlContent ) . isLocaleIndependent ( ) ) { if ( forceLocale != null ) { items . put ( forceLocale , localeItems ) ; } else { for ( Locale l : OpenCms . getLocaleManager ( ) . getAvailableLocales ( ) ) { items . put ( l , localeItems ) ; if ( null != galleryNameChooser ) { final String galleryTitleFieldKey = CmsSearchFieldConfiguration . getLocaleExtendedName ( CmsSearchField . FIELD_TITLE_UNSTORED , l ) + "_s" ; fieldMappings . put ( galleryTitleFieldKey , galleryNameChooser . getGalleryName ( l ) ) ; fieldMappings . put ( CmsSearchFieldConfiguration . getLocaleExtendedName ( CmsSearchField . FIELD_DESCRIPTION , l ) + "_s" , galleryNameChooser . getDescription ( l ) ) ; } } } } // add the locales that have been indexed for this document as item and return the extraction result // fieldMappings . put ( CmsSearchField . FIELD _ RESOURCE _ LOCALES , locales . toString ( ) . trim ( ) ) ; return new CmsExtractionResult ( resourceLocale , items , fieldMappings ) ;
public class GridGenerator { /** * Method transforms the grid into pmesh format . */ public void writeGridInPmeshFormat ( String outPutFileName ) throws IOException { } }
BufferedWriter writer = new BufferedWriter ( new FileWriter ( outPutFileName + ".pmesh" ) ) ; int numberOfGridPoints = grid . length * grid [ 0 ] . length * grid [ 0 ] [ 0 ] . length ; writer . write ( numberOfGridPoints + "\n" ) ; for ( int z = 0 ; z < grid [ 0 ] [ 0 ] . length ; z ++ ) { for ( int y = 0 ; y < grid [ 0 ] . length ; y ++ ) { for ( int x = 0 ; x < grid . length ; x ++ ) { Point3d coords = getCoordinatesFromGridPoint ( new Point3d ( x , y , z ) ) ; writer . write ( coords . x + "\t" + coords . y + "\t" + coords . z + "\n" ) ; } } } writer . close ( ) ;
public class MarkLogicRepositoryConnection { /** * private utility for merging Resource varargs * @ param o * @ param arr * @ return */ private static Resource [ ] mergeResource ( Resource o , Resource ... arr ) { } }
if ( o != null ) { Resource [ ] newArray = new Resource [ arr . length + 1 ] ; newArray [ 0 ] = o ; System . arraycopy ( arr , 0 , newArray , 1 , arr . length ) ; return newArray ; } else { return arr ; }
public class EJBMDOrchestrator { /** * F743-25855 */ private void findAnnotatedSessionSynchMethods ( BeanMetaData bmd ) throws EJBConfigurationException { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; boolean lookForAfterBegin = ( bmd . ivAfterBegin == null ) ; boolean lookForBeforeCompletion = ( bmd . ivBeforeCompletion == null ) ; boolean lookForAfterCompletion = ( bmd . ivAfterCompletion == null ) ; for ( MethodMap . MethodInfo methodInfo : MethodMap . getAllDeclaredMethods ( bmd . enterpriseBeanClass ) ) { Method method = methodInfo . getMethod ( ) ; if ( lookForAfterBegin && method . isAnnotationPresent ( AfterBegin . class ) ) { if ( bmd . ivAfterBegin == null ) { if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "@AfterBegin = " + method ) ; method . setAccessible ( true ) ; // d666251 bmd . ivAfterBegin = method ; } else { Tr . error ( tc , "MULTIPLE_SESSION_SYNCH_METHODS_CNTR0326E" , new Object [ ] { "after-begin-method" , bmd . j2eeName . getComponent ( ) , bmd . ivAfterBegin , method } ) ; throw new EJBConfigurationException ( "CNTR0326E: Multiple after-begin-method session synchronization" + " methods have been configured for the " + bmd . j2eeName . getComponent ( ) + " bean. The configured session synchronization methods are : " + bmd . ivAfterBegin + " and " + method ) ; } } if ( lookForBeforeCompletion && method . isAnnotationPresent ( BeforeCompletion . class ) ) { if ( bmd . ivBeforeCompletion == null ) { if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "@BeforeCompletion = " + method ) ; method . setAccessible ( true ) ; // d666251 bmd . ivBeforeCompletion = method ; } else { Tr . error ( tc , "MULTIPLE_SESSION_SYNCH_METHODS_CNTR0326E" , new Object [ ] { "before-completion-method" , bmd . j2eeName . getComponent ( ) , bmd . ivBeforeCompletion , method } ) ; throw new EJBConfigurationException ( "CNTR0326E: Multiple before-completion-method session synchronization" + " methods have been configured for the " + bmd . j2eeName . getComponent ( ) + " bean. The configured session synchronization methods are : " + bmd . ivBeforeCompletion + " and " + method ) ; } } if ( lookForAfterCompletion && method . isAnnotationPresent ( AfterCompletion . class ) ) { if ( bmd . ivAfterCompletion == null ) { if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "@AfterCompletion = " + method ) ; method . setAccessible ( true ) ; // d666251 bmd . ivAfterCompletion = method ; } else { Tr . error ( tc , "MULTIPLE_SESSION_SYNCH_METHODS_CNTR0326E" , new Object [ ] { "after-completion-method" , bmd . j2eeName . getComponent ( ) , bmd . ivAfterCompletion , method } ) ; throw new EJBConfigurationException ( "CNTR0326E: Multiple after-completion-method session synchronization" + " methods have been configured for the " + bmd . j2eeName . getComponent ( ) + " bean. The configured session synchronization methods are : " + bmd . ivAfterCompletion + " and " + method ) ; } } }
public class KeyChainGroup { /** * Mark the DeterministicKeys as used , if they match the pubkey * See { @ link DeterministicKeyChain # markKeyAsUsed ( DeterministicKey ) } for more info on this . */ public void markPubKeyAsUsed ( byte [ ] pubkey ) { } }
if ( chains != null ) { for ( DeterministicKeyChain chain : chains ) { DeterministicKey key ; if ( ( key = chain . markPubKeyAsUsed ( pubkey ) ) != null ) { maybeMarkCurrentKeyAsUsed ( key ) ; return ; } } }
public class BackupManagerImpl { /** * isCurrentRepositoryBackup . * @ param log * File , the log to backup * @ return boolean return the ' true ' if this log is current backup . */ private boolean isCurrentRepositoryBackup ( File log ) { } }
for ( RepositoryBackupChain chain : currentRepositoryBackups ) { if ( log . getName ( ) . equals ( new File ( chain . getLogFilePath ( ) ) . getName ( ) ) ) { return true ; } } return false ;
public class ControllerLookup { /** * Returns a controller instance with the given ID . * @ param id * The string ID of the controller as returned by * { @ link IdentifiableController # getId ( ) } * @ return * The controller with the given ID that has just been * looked up . * @ throws IllegalArgumentException * thrown if a controller cannot be found with the given ID . */ @ SuppressWarnings ( "unchecked" ) public < T > T lookup ( final String id ) { } }
for ( final IdentifiableController controller : identifiables ) { if ( controller . getId ( ) . equals ( id ) ) { return ( T ) controller ; } } throw new IllegalArgumentException ( "Could not find a controller with the ID '" + id + "'" ) ;
public class BoundingBox { /** * Creates a BoundingBox extended up to coordinates ( but does not cross date line / poles ) . * @ param latitude up to the extension * @ param longitude up to the extension * @ return an extended BoundingBox or this ( if contains coordinates ) */ public BoundingBox extendCoordinates ( double latitude , double longitude ) { } }
if ( contains ( latitude , longitude ) ) { return this ; } double minLat = Math . max ( MercatorProjection . LATITUDE_MIN , Math . min ( this . minLatitude , latitude ) ) ; double minLon = Math . max ( - 180 , Math . min ( this . minLongitude , longitude ) ) ; double maxLat = Math . min ( MercatorProjection . LATITUDE_MAX , Math . max ( this . maxLatitude , latitude ) ) ; double maxLon = Math . min ( 180 , Math . max ( this . maxLongitude , longitude ) ) ; return new BoundingBox ( minLat , minLon , maxLat , maxLon ) ;
public class XmlDataHandler { /** * Transfers all characters of a specific tag to the corresponding builder and resets the string buffer . */ private void transferToSpecificBuilderAndReset ( ) { } }
// version if ( currentTag == Tag . VERSION ) { dataBuilder . setVersion ( buffer . toString ( ) ) ; } // robot browser addToRobotBuilder ( ) ; // build browser addToBrowserBuilder ( ) ; // build operating system addToOperatingSystemBuilder ( ) ; // build browser pattern addToBrowserPatternBuilder ( ) ; // build browser type addToBrowserTypeBuilder ( ) ; // build browser to operating system mapping addToBrowserOsMappingBuilder ( ) ; // build operating system pattern addToOperatingSystemPatternBuilder ( ) ; // build browser addToDeviceBuilder ( ) ; // build browser pattern addToDevicePatternBuilder ( ) ; buffer = new StringBuilder ( ) ;
public class FieldDescriptor { /** * Sets the fieldConversion . * @ param fieldConversionClassName The fieldConversion to set */ public void setFieldConversionClassName ( String fieldConversionClassName ) { } }
try { this . fieldConversion = ( FieldConversion ) ClassHelper . newInstance ( fieldConversionClassName ) ; } catch ( Exception e ) { throw new MetadataException ( "Could not instantiate FieldConversion class using default constructor" , e ) ; }
public class NmeaStreamProcessor { /** * Returns the list of those indexes that can be removed from the buffer * because they have a timestamp more than * MAXIMUM _ ARRIVAL _ TIME _ DIFFERENCE _ MS from the arrival time of the given * index . * @ param index * @ return */ private Set < Integer > findExpiredIndexesBeforeIndex ( int index ) { } }
long indexTime = getLineTime ( index ) ; Set < Integer > removeThese = Sets . newHashSet ( ) ; for ( int i = index - 1 ; i >= 0 ; i -- ) { if ( indexTime - getLineTime ( i ) > MAXIMUM_ARRIVAL_TIME_DIFFERENCE_MS ) { listener . timestampNotFound ( getLine ( i ) , getLineTime ( i ) ) ; removeThese . add ( i ) ; } } return removeThese ;
public class HybridRunbookWorkerGroupsInner { /** * Retrieve a hybrid runbook worker group . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ param hybridRunbookWorkerGroupName The hybrid runbook worker group name * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the HybridRunbookWorkerGroupInner object if successful . */ public HybridRunbookWorkerGroupInner get ( String resourceGroupName , String automationAccountName , String hybridRunbookWorkerGroupName ) { } }
return getWithServiceResponseAsync ( resourceGroupName , automationAccountName , hybridRunbookWorkerGroupName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class AAFConHttp { /** * / * ( non - Javadoc ) * @ see com . att . cadi . aaf . v2_0 . AAFCon # basicAuthSS ( java . security . Principal ) */ @ Override public SecuritySetter < HttpURLConnection > basicAuthSS ( BasicPrincipal principal ) throws CadiException { } }
try { return new HBasicAuthSS ( principal , si ) ; } catch ( IOException e ) { throw new CadiException ( "Error creating HBasicAuthSS" , e ) ; }
public class ManagedThreadFactoryService { /** * DS method to deactivate this component . * Best practice : this should be a protected method , not public or private * @ param componentContext DeclarativeService defined / populated component context */ protected void deactivate ( ComponentContext componentContext ) { } }
isShutdown . set ( true ) ; contextSvcRef . deactivate ( componentContext ) ; threadGroupTracker . threadFactoryDestroyed ( name , threadGroup ) ;