signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class AttributeRenderer { /** * This method will add an attribute to the list of inheritted attributes . * @ param list * @ param attr * @ param removes */ private void addAttribute ( int list , TreeHtmlAttributeInfo attr , RemoveInfo removes ) { } }
ArrayList al = _lists [ list ] ; // if the array list is the empty list then we need to allocate a new array list if ( al == empty ) { al = new ArrayList ( ) ; _lists [ list ] = al ; } // check to see if this attribute is already inside the tree . int cnt = al . size ( ) ; for ( int i = 0 ; i < cnt ; i ++ ) { TreeHtmlAttributeInfo a = ( TreeHtmlAttributeInfo ) al . get ( i ) ; assert ( a != null ) ; if ( a . getAttribute ( ) . equals ( attr . getAttribute ( ) ) ) { removes . removes . add ( a ) ; if ( ! attr . isApplyToDescendents ( ) ) { removes . scopeOverrides = true ; } al . remove ( a ) ; break ; } } // add this to the list al . add ( attr ) ;
public class ModClusterContainer { /** * Management command enabling all contexts on the given node . * @ param jvmRoute the jvmRoute * @ return */ public synchronized boolean enableNode ( final String jvmRoute ) { } }
final Node node = nodes . get ( jvmRoute ) ; if ( node != null ) { for ( final Context context : node . getContexts ( ) ) { context . enable ( ) ; } return true ; } return false ;
public class FastAdapterDialog { /** * add an array of items at the given position within the existing items * @ param position the global position * @ param items the items to add */ @ SafeVarargs public final FastAdapterDialog < Item > add ( int position , Item ... items ) { } }
mItemAdapter . add ( position , items ) ; return this ;
public class QueryRow { /** * This method was created in VisualAge . * @ param name java . lang . String * @ param val java . lang . String */ protected void put ( String name , String val ) { } }
if ( val == null ) { row . put ( name , "" ) ; } else { row . put ( name , val ) ; }
public class Util { /** * Locate the setting for the server properties . */ public static String findServerSettings ( String [ ] args ) { } }
for ( String s : args ) { if ( s . startsWith ( "--server:" ) ) { return s ; } } return null ;
public class SingleSignOnSessionsEndpoint { /** * Gets sso sessions . * @ param option the option * @ return the sso sessions */ private Collection < Map < String , Object > > getActiveSsoSessions ( final SsoSessionReportOptions option ) { } }
val activeSessions = new ArrayList < Map < String , Object > > ( ) ; val dateFormat = new ISOStandardDateFormat ( ) ; getNonExpiredTicketGrantingTickets ( ) . stream ( ) . map ( TicketGrantingTicket . class :: cast ) . filter ( tgt -> ! ( option == SsoSessionReportOptions . DIRECT && tgt . getProxiedBy ( ) != null ) ) . forEach ( tgt -> { val authentication = tgt . getAuthentication ( ) ; val principal = authentication . getPrincipal ( ) ; val sso = new HashMap < String , Object > ( SsoSessionAttributeKeys . values ( ) . length ) ; sso . put ( SsoSessionAttributeKeys . AUTHENTICATED_PRINCIPAL . toString ( ) , principal . getId ( ) ) ; sso . put ( SsoSessionAttributeKeys . AUTHENTICATION_DATE . toString ( ) , authentication . getAuthenticationDate ( ) ) ; sso . put ( SsoSessionAttributeKeys . AUTHENTICATION_DATE_FORMATTED . toString ( ) , dateFormat . format ( DateTimeUtils . dateOf ( authentication . getAuthenticationDate ( ) ) ) ) ; sso . put ( SsoSessionAttributeKeys . NUMBER_OF_USES . toString ( ) , tgt . getCountOfUses ( ) ) ; sso . put ( SsoSessionAttributeKeys . TICKET_GRANTING_TICKET . toString ( ) , tgt . getId ( ) ) ; sso . put ( SsoSessionAttributeKeys . PRINCIPAL_ATTRIBUTES . toString ( ) , principal . getAttributes ( ) ) ; sso . put ( SsoSessionAttributeKeys . AUTHENTICATION_ATTRIBUTES . toString ( ) , authentication . getAttributes ( ) ) ; if ( option != SsoSessionReportOptions . DIRECT ) { if ( tgt . getProxiedBy ( ) != null ) { sso . put ( SsoSessionAttributeKeys . IS_PROXIED . toString ( ) , Boolean . TRUE ) ; sso . put ( SsoSessionAttributeKeys . PROXIED_BY . toString ( ) , tgt . getProxiedBy ( ) . getId ( ) ) ; } else { sso . put ( SsoSessionAttributeKeys . IS_PROXIED . toString ( ) , Boolean . FALSE ) ; } } sso . put ( SsoSessionAttributeKeys . AUTHENTICATED_SERVICES . toString ( ) , tgt . getServices ( ) ) ; activeSessions . add ( sso ) ; } ) ; return activeSessions ;
public class RelativeToEasterSundayParser { /** * Returns the easter Sunday for a given year . * @ param nYear * The year to retrieve Easter Sunday date * @ return Easter Sunday . */ public static ChronoLocalDate getEasterSunday ( final int nYear ) { } }
return nYear <= CPDT . LAST_JULIAN_YEAR ? getJulianEasterSunday ( nYear ) : getGregorianEasterSunday ( nYear ) ;
public class AWSServiceCatalogClient { /** * Lists the specified TagOptions or all TagOptions . * @ param listTagOptionsRequest * @ return Result of the ListTagOptions operation returned by the service . * @ throws TagOptionNotMigratedException * An operation requiring TagOptions failed because the TagOptions migration process has not been performed * for this account . Please use the AWS console to perform the migration process before retrying the * operation . * @ throws InvalidParametersException * One or more parameters provided to the operation are not valid . * @ sample AWSServiceCatalog . ListTagOptions * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / servicecatalog - 2015-12-10 / ListTagOptions " target = " _ top " > AWS * API Documentation < / a > */ @ Override public ListTagOptionsResult listTagOptions ( ListTagOptionsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListTagOptions ( request ) ;
public class VirtualNetworkGatewaysInner { /** * The Set VpnclientIpsecParameters operation sets the vpnclient ipsec policy for P2S client of virtual network gateway in the specified resource group through Network resource provider . * @ param resourceGroupName The name of the resource group . * @ param virtualNetworkGatewayName The name of the virtual network gateway . * @ param vpnclientIpsecParams Parameters supplied to the Begin Set vpnclient ipsec parameters of Virtual Network Gateway P2S client operation through Network resource provider . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the VpnClientIPsecParametersInner object if successful . */ public VpnClientIPsecParametersInner setVpnclientIpsecParameters ( String resourceGroupName , String virtualNetworkGatewayName , VpnClientIPsecParametersInner vpnclientIpsecParams ) { } }
return setVpnclientIpsecParametersWithServiceResponseAsync ( resourceGroupName , virtualNetworkGatewayName , vpnclientIpsecParams ) . toBlocking ( ) . last ( ) . body ( ) ;
public class AddUser { /** * Find the command - line arg corresponding to the parameter { @ code arg } . * @ param arg * @ return The corresponding arg or null . */ private static CommandLineArgument findCommandLineOption ( String arg ) { } }
for ( CommandLineArgument commandLineArgument : CommandLineArgument . values ( ) ) { if ( commandLineArgument . match ( arg ) ) { return commandLineArgument ; } } return null ;
public class JKDefaultExceptionHandler { /** * ( non - Javadoc ) * @ see com . jk . exceptions . ExceptionHandler # handle ( java . lang . Throwable , boolean ) */ @ Override public void handle ( final T throwable , final boolean throwRuntimeException ) { } }
this . logger . error ( throwable ) ; if ( throwRuntimeException ) { if ( throwable instanceof RuntimeException ) { throw ( RuntimeException ) throwable ; } throw new RuntimeException ( throwable ) ; }
public class WalletTool { /** * Sets up all objects needed for network communication but does not bring up the peers . */ private static void setup ( ) throws BlockStoreException { } }
if ( store != null ) return ; // Already done . // Will create a fresh chain if one doesn ' t exist or there is an issue with this one . boolean reset = ! chainFileName . exists ( ) ; if ( reset ) { // No chain , so reset the wallet as we will be downloading from scratch . System . out . println ( "Chain file is missing so resetting the wallet." ) ; reset ( ) ; } if ( mode == ValidationMode . SPV ) { store = new SPVBlockStore ( params , chainFileName ) ; if ( reset ) { try { CheckpointManager . checkpoint ( params , CheckpointManager . openStream ( params ) , store , wallet . getEarliestKeyCreationTime ( ) ) ; StoredBlock head = store . getChainHead ( ) ; System . out . println ( "Skipped to checkpoint " + head . getHeight ( ) + " at " + Utils . dateTimeFormat ( head . getHeader ( ) . getTimeSeconds ( ) * 1000 ) ) ; } catch ( IOException x ) { System . out . println ( "Could not load checkpoints: " + x . getMessage ( ) ) ; } } chain = new BlockChain ( params , wallet , store ) ; } else if ( mode == ValidationMode . FULL ) { store = new H2FullPrunedBlockStore ( params , chainFileName . getAbsolutePath ( ) , 5000 ) ; chain = new FullPrunedBlockChain ( params , wallet , ( FullPrunedBlockStore ) store ) ; } // This will ensure the wallet is saved when it changes . wallet . autosaveToFile ( walletFile , 5 , TimeUnit . SECONDS , null ) ; if ( peerGroup == null ) { peerGroup = new PeerGroup ( params , chain ) ; } peerGroup . setUserAgent ( "WalletTool" , "1.0" ) ; if ( params == RegTestParams . get ( ) ) peerGroup . setMinBroadcastConnections ( 1 ) ; peerGroup . addWallet ( wallet ) ; if ( options . has ( "peers" ) ) { String peersFlag = ( String ) options . valueOf ( "peers" ) ; String [ ] peerAddrs = peersFlag . split ( "," ) ; for ( String peer : peerAddrs ) { try { peerGroup . addAddress ( new PeerAddress ( params , InetAddress . getByName ( peer ) ) ) ; } catch ( UnknownHostException e ) { System . err . println ( "Could not understand peer domain name/IP address: " + peer + ": " + e . getMessage ( ) ) ; System . exit ( 1 ) ; } } } else { peerGroup . setRequiredServices ( 0 ) ; }
public class ListCreateAccountStatusRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListCreateAccountStatusRequest listCreateAccountStatusRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listCreateAccountStatusRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listCreateAccountStatusRequest . getStates ( ) , STATES_BINDING ) ; protocolMarshaller . marshall ( listCreateAccountStatusRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listCreateAccountStatusRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class StringHelper { /** * Get everything from the string from and excluding the passed string . * @ param sStr * The source string . May be < code > null < / code > . * @ param sSearch * The string to search . May be < code > null < / code > . * @ return < code > null < / code > if the passed string does not contain the search * string . If the search string is empty , the input string is returned * unmodified . */ @ Nullable public static String getFromLastExcl ( @ Nullable final String sStr , @ Nullable final String sSearch ) { } }
return _getFromLast ( sStr , sSearch , false ) ;
public class ClientAliasedDiscoveryConfigUtils { /** * Gets the { @ link AliasedDiscoveryConfig } from { @ code config } by { @ code tag } . */ public static AliasedDiscoveryConfig getConfigByTag ( ClientNetworkConfig config , String tag ) { } }
if ( "aws" . equals ( tag ) ) { return config . getAwsConfig ( ) ; } else if ( "gcp" . equals ( tag ) ) { return config . getGcpConfig ( ) ; } else if ( "azure" . equals ( tag ) ) { return config . getAzureConfig ( ) ; } else if ( "kubernetes" . equals ( tag ) ) { return config . getKubernetesConfig ( ) ; } else if ( "eureka" . equals ( tag ) ) { return config . getEurekaConfig ( ) ; } else { throw new InvalidConfigurationException ( String . format ( "Invalid configuration tag: '%s'" , tag ) ) ; }
public class SqlLineHighlighter { /** * Marks single / double quoted string position * in sqlline command based on input . * < p > Assumes that the input is a sqlline command but not SQL itself . * @ param line line with sqlline command where to handle * single / double quoted string * @ param quoteBitSet BitSet to use for positions of single - quoted lines * @ param doubleQuoteBitSet BitSet to use for positions of double - quoted lines * < p > For example , * < blockquote > < code > handleQuotesInCommands ( " ! set csvDelimiter ' " ' " , * quoteBitSet , doubleQuoteBitSet ) ; < / code > < / blockquote > * < p > should mark a single - quoted string only . as a double - quote is * inside the quoted line . */ void handleQuotesInCommands ( String line , BitSet quoteBitSet , BitSet doubleQuoteBitSet ) { } }
int doubleQuoteStart = - 1 ; int quoteStart = - 1 ; for ( int pos = 0 ; pos < line . length ( ) ; pos ++ ) { char ch = line . charAt ( pos ) ; if ( doubleQuoteStart > - 1 ) { doubleQuoteBitSet . set ( pos ) ; if ( ch == '"' ) { doubleQuoteStart = - 1 ; } continue ; } else if ( quoteStart > - 1 ) { quoteBitSet . set ( pos ) ; if ( ch == '\'' ) { quoteStart = - 1 ; } continue ; } // so far doubleQuoteBitSet MUST BE - 1 and quoteStart MUST BE - 1 if ( ch == '"' ) { doubleQuoteBitSet . set ( pos ) ; doubleQuoteStart = pos ; } // so far quoteStart MUST BE - 1 if ( doubleQuoteStart == - 1 && ch == '\'' ) { quoteBitSet . set ( pos ) ; quoteStart = pos ; } }
public class backup_file { /** * Use this API to fetch filtered set of backup _ file resources . * filter string should be in JSON format . eg : " vm _ state : DOWN , name : [ a - z ] + " */ public static backup_file [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
backup_file obj = new backup_file ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; backup_file [ ] response = ( backup_file [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class Caster { /** * cast a Object to a DateTime Object * @ param o Object to cast * @ param alsoNumbers define if also numbers will casted to a datetime value * @ param tz * @ return casted DateTime Object * @ throws PageException */ public static DateTime toDate ( Object o , boolean alsoNumbers , TimeZone tz ) throws PageException { } }
return DateCaster . toDateAdvanced ( o , alsoNumbers ? DateCaster . CONVERTING_TYPE_OFFSET : DateCaster . CONVERTING_TYPE_NONE , tz ) ;
public class ConverterRegistry { /** * 注册默认转换器 * @ return 转换器 */ private ConverterRegistry defaultConverter ( ) { } }
defaultConverterMap = new ConcurrentHashMap < > ( ) ; // 原始类型转换器 defaultConverterMap . put ( int . class , new PrimitiveConverter ( int . class ) ) ; defaultConverterMap . put ( long . class , new PrimitiveConverter ( long . class ) ) ; defaultConverterMap . put ( byte . class , new PrimitiveConverter ( byte . class ) ) ; defaultConverterMap . put ( short . class , new PrimitiveConverter ( short . class ) ) ; defaultConverterMap . put ( float . class , new PrimitiveConverter ( float . class ) ) ; defaultConverterMap . put ( double . class , new PrimitiveConverter ( double . class ) ) ; defaultConverterMap . put ( char . class , new PrimitiveConverter ( char . class ) ) ; defaultConverterMap . put ( boolean . class , new PrimitiveConverter ( boolean . class ) ) ; // 包装类转换器 defaultConverterMap . put ( Number . class , new NumberConverter ( ) ) ; defaultConverterMap . put ( Integer . class , new NumberConverter ( Integer . class ) ) ; defaultConverterMap . put ( AtomicInteger . class , new NumberConverter ( AtomicInteger . class ) ) ; // since 3.0.8 defaultConverterMap . put ( Long . class , new NumberConverter ( Long . class ) ) ; defaultConverterMap . put ( AtomicLong . class , new NumberConverter ( AtomicLong . class ) ) ; // since 3.0.8 defaultConverterMap . put ( Byte . class , new NumberConverter ( Byte . class ) ) ; defaultConverterMap . put ( Short . class , new NumberConverter ( Short . class ) ) ; defaultConverterMap . put ( Float . class , new NumberConverter ( Float . class ) ) ; defaultConverterMap . put ( Double . class , new NumberConverter ( Double . class ) ) ; defaultConverterMap . put ( Character . class , new CharacterConverter ( ) ) ; defaultConverterMap . put ( Boolean . class , new BooleanConverter ( ) ) ; defaultConverterMap . put ( AtomicBoolean . class , new AtomicBooleanConverter ( ) ) ; // since 3.0.8 defaultConverterMap . put ( BigDecimal . class , new NumberConverter ( BigDecimal . class ) ) ; defaultConverterMap . put ( BigInteger . class , new NumberConverter ( BigInteger . class ) ) ; defaultConverterMap . put ( CharSequence . class , new StringConverter ( ) ) ; defaultConverterMap . put ( String . class , new StringConverter ( ) ) ; // URI and URL defaultConverterMap . put ( URI . class , new URIConverter ( ) ) ; defaultConverterMap . put ( URL . class , new URLConverter ( ) ) ; // 日期时间 defaultConverterMap . put ( Calendar . class , new CalendarConverter ( ) ) ; defaultConverterMap . put ( java . util . Date . class , new DateConverter ( java . util . Date . class ) ) ; defaultConverterMap . put ( DateTime . class , new DateConverter ( DateTime . class ) ) ; defaultConverterMap . put ( java . sql . Date . class , new DateConverter ( java . sql . Date . class ) ) ; defaultConverterMap . put ( java . sql . Time . class , new DateConverter ( java . sql . Time . class ) ) ; defaultConverterMap . put ( java . sql . Timestamp . class , new DateConverter ( java . sql . Timestamp . class ) ) ; // Reference defaultConverterMap . put ( WeakReference . class , new ReferenceConverter ( WeakReference . class ) ) ; // since 3.0.8 defaultConverterMap . put ( SoftReference . class , new ReferenceConverter ( SoftReference . class ) ) ; // since 3.0.8 defaultConverterMap . put ( AtomicReference . class , new AtomicReferenceConverter ( ) ) ; // since 3.0.8 // 其它类型 defaultConverterMap . put ( Class . class , new ClassConverter ( ) ) ; defaultConverterMap . put ( TimeZone . class , new TimeZoneConverter ( ) ) ; defaultConverterMap . put ( Locale . class , new LocaleConverter ( ) ) ; defaultConverterMap . put ( Charset . class , new CharsetConverter ( ) ) ; defaultConverterMap . put ( Path . class , new PathConverter ( ) ) ; defaultConverterMap . put ( Currency . class , new CurrencyConverter ( ) ) ; // since 3.0.8 defaultConverterMap . put ( UUID . class , new UUIDConverter ( ) ) ; // since 4.0.10 defaultConverterMap . put ( StackTraceElement . class , new StackTraceElementConverter ( ) ) ; // since 4.5.2 // JDK8 + try { Class < ? > clazz ; for ( String className : Jdk8DateConverter . supportClassNames ) { clazz = ClassUtil . loadClass ( className ) ; defaultConverterMap . put ( clazz , new Jdk8DateConverter ( clazz ) ) ; // since 4.5.1 } } catch ( Exception e ) { // ignore // 在使用jdk8以下版本时 , 其转换器自动跳过失效 } return this ;
public class RtfField { /** * Writes the end of the field instruction area . * @ param result The < code > OutputStream < / code > to write to . */ private void writeFieldInstEnd ( OutputStream result ) throws IOException { } }
if ( fieldAlt ) { result . write ( DELIMITER ) ; result . write ( FIELD_ALT ) ; } result . write ( CLOSE_GROUP ) ;
public class ClassUtils { /** * < p > Gets the abbreviated name of a { @ code Class } . < / p > * @ param cls the class to get the abbreviated name for , may be { @ code null } * @ param len the desired length of the abbreviated name * @ return the abbreviated name or an empty string * @ throws IllegalArgumentException if len & lt ; = 0 * @ see # getAbbreviatedName ( String , int ) * @ since 3.4 */ public static String getAbbreviatedName ( final Class < ? > cls , final int len ) { } }
if ( cls == null ) { return StringUtils . EMPTY ; } return getAbbreviatedName ( cls . getName ( ) , len ) ;
public class AppServiceCertificateOrdersInner { /** * Get the certificate associated with a certificate order . * Get the certificate associated with a certificate order . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param certificateOrderName Name of the certificate order . * @ param name Name of the certificate . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the AppServiceCertificateResourceInner object */ public Observable < ServiceResponse < AppServiceCertificateResourceInner > > getCertificateWithServiceResponseAsync ( String resourceGroupName , String certificateOrderName , String name ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( certificateOrderName == null ) { throw new IllegalArgumentException ( "Parameter certificateOrderName is required and cannot be null." ) ; } if ( name == null ) { throw new IllegalArgumentException ( "Parameter name is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . getCertificate ( resourceGroupName , certificateOrderName , name , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < AppServiceCertificateResourceInner > > > ( ) { @ Override public Observable < ServiceResponse < AppServiceCertificateResourceInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < AppServiceCertificateResourceInner > clientResponse = getCertificateDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class JsTopicInterceptor { /** * The topic receive directly payload in json * @ param method * @ return */ boolean isJsonPayload ( Method method ) { } }
if ( null == method || ! method . isAnnotationPresent ( JsTopic . class ) ) { return false ; } JsTopic jsTopic = method . getAnnotation ( JsTopic . class ) ; return jsTopic . jsonPayload ( ) ;
public class JMElasticsearchSearchAndCount { /** * Gets search request builder . * @ param queryBuilder the query builder * @ param aggregationBuilders the aggregation builders * @ param indices the indices * @ return the search request builder */ public SearchRequestBuilder getSearchRequestBuilder ( QueryBuilder queryBuilder , AggregationBuilder [ ] aggregationBuilders , String ... indices ) { } }
return getSearchRequestBuilder ( indices , null , queryBuilder , aggregationBuilders ) ;
public class DescribeElasticLoadBalancersResult { /** * A list of < code > ElasticLoadBalancer < / code > objects that describe the specified Elastic Load Balancing instances . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setElasticLoadBalancers ( java . util . Collection ) } or { @ link # withElasticLoadBalancers ( java . util . Collection ) } * if you want to override the existing values . * @ param elasticLoadBalancers * A list of < code > ElasticLoadBalancer < / code > objects that describe the specified Elastic Load Balancing * instances . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeElasticLoadBalancersResult withElasticLoadBalancers ( ElasticLoadBalancer ... elasticLoadBalancers ) { } }
if ( this . elasticLoadBalancers == null ) { setElasticLoadBalancers ( new com . amazonaws . internal . SdkInternalList < ElasticLoadBalancer > ( elasticLoadBalancers . length ) ) ; } for ( ElasticLoadBalancer ele : elasticLoadBalancers ) { this . elasticLoadBalancers . add ( ele ) ; } return this ;
public class JapaneseDate { /** * Checks if the specified field is supported . * This checks if this date can be queried for the specified field . * If false , then calling the { @ link # range ( TemporalField ) range } and * { @ link # get ( TemporalField ) get } methods will throw an exception . * If the field is a { @ link ChronoField } then the query is implemented here . * The supported fields are : * < ul > * < li > { @ code DAY _ OF _ WEEK } * < li > { @ code DAY _ OF _ MONTH } * < li > { @ code DAY _ OF _ YEAR } * < li > { @ code EPOCH _ DAY } * < li > { @ code MONTH _ OF _ YEAR } * < li > { @ code PROLEPTIC _ MONTH } * < li > { @ code YEAR _ OF _ ERA } * < li > { @ code YEAR } * < li > { @ code ERA } * < / ul > * All other { @ code ChronoField } instances will return false . * If the field is not a { @ code ChronoField } , then the result of this method * is obtained by invoking { @ code TemporalField . isSupportedBy ( TemporalAccessor ) } * passing { @ code this } as the argument . * Whether the field is supported is determined by the field . * @ param field the field to check , null returns false * @ return true if the field is supported on this date , false if not */ @ Override public boolean isSupported ( TemporalField field ) { } }
if ( field == ChronoField . ALIGNED_DAY_OF_WEEK_IN_MONTH || field == ChronoField . ALIGNED_DAY_OF_WEEK_IN_YEAR || field == ChronoField . ALIGNED_WEEK_OF_MONTH || field == ChronoField . ALIGNED_WEEK_OF_YEAR ) { return false ; } return super . isSupported ( field ) ;
public class RelationGraph { /** * Gets sub tree nodes . * @ param node the node * @ param childRelations the child relations * @ return the sub tree nodes */ public Set < Annotation > getSubTreeNodes ( @ NonNull Annotation node , String ... childRelations ) { } }
Set < Annotation > children = new HashSet < > ( ) ; Set < String > targetRel = childRelations == null ? Collections . emptySet ( ) : Sets . asSet ( Arrays . asList ( childRelations ) ) ; Predicate < RelationEdge > keep = edge -> targetRel . size ( ) == 0 || targetRel . contains ( edge . getRelation ( ) ) ; Queue < RelationEdge > queue = new LinkedList < > ( getInEdges ( node ) . stream ( ) . filter ( keep ) . collect ( Collectors . toList ( ) ) ) ; while ( ! queue . isEmpty ( ) ) { RelationEdge n = queue . remove ( ) ; if ( ! "relcl" . equals ( n . getRelation ( ) ) && ! "parataxis" . equals ( n . getRelation ( ) ) ) { children . add ( n . getFirstVertex ( ) ) ; queue . addAll ( getInEdges ( n . getFirstVertex ( ) ) . stream ( ) . filter ( e -> ! children . contains ( e . getFirstVertex ( ) ) ) . collect ( Collectors . toSet ( ) ) ) ; } } return children ;
public class BetterCFGBuilder2 { /** * Inline all JSR subroutines into the top - level subroutine . This produces a * complete CFG for the entire method , in which all JSR subroutines are * inlined . * @ return the CFG for the method */ private CFG inlineAll ( ) throws CFGBuilderException { } }
CFG result = new CFG ( ) ; Context rootContext = new Context ( null , topLevelSubroutine , result ) ; rootContext . mapBlock ( topLevelSubroutine . getEntry ( ) , result . getEntry ( ) ) ; rootContext . mapBlock ( topLevelSubroutine . getExit ( ) , result . getExit ( ) ) ; BasicBlock resultStartBlock = rootContext . getBlock ( topLevelSubroutine . getStartBlock ( ) ) ; result . createEdge ( result . getEntry ( ) , resultStartBlock , START_EDGE ) ; inline ( rootContext ) ; return result ;
public class StaticConfiguration { /** * Get QTaste root directory from QTASTE _ ROOT environment variable . * @ return the QTaste root directory */ private static String getQTasteRoot ( ) { } }
String qtasteRoot = System . getenv ( "QTASTE_ROOT" ) ; if ( qtasteRoot == null ) { System . err . println ( "QTASTE_ROOT environment variable is not defined" ) ; System . exit ( 1 ) ; } try { qtasteRoot = new File ( qtasteRoot ) . getCanonicalPath ( ) ; } catch ( IOException e ) { System . err . println ( "QTASTE_ROOT environment variable is invalid (" + qtasteRoot + ")" ) ; System . exit ( 1 ) ; } return qtasteRoot ;
public class AdminToolLog4j2Util { /** * closes output stream and removes appender from loggers * @ param appenderName * @ throws IOException * @ since 1.1.1 */ public void closeOutputStreamAppender ( String appenderName ) throws IOException { } }
if ( null == appenderName ) { return ; } final LoggerContext ctx = ( LoggerContext ) LogManager . getContext ( false ) ; final Configuration config = ctx . getConfiguration ( ) ; AdminToolLog4j2OutputStream baos = outputStreams . get ( appenderName ) ; if ( null != config && null != config . getAppenders ( ) ) { OutputStreamAppender appender = config . getAppender ( appenderName ) ; if ( null != appender ) { appender . stop ( ) ; Collection < String > parentLoggerNames = getParentLoggerNames ( ) ; for ( String configuredLoggerName : getAllLoggerNames ( ) ) { LoggerConfig loggerConfig = config . getLoggerConfig ( configuredLoggerName ) ; loggerConfig . removeAppender ( appender . getName ( ) ) ; if ( null != baos . getOriginalLevel ( configuredLoggerName ) ) { changeLogger ( configuredLoggerName , baos . getOriginalLevel ( configuredLoggerName ) , parentLoggerNames . contains ( configuredLoggerName ) ) ; } } // unsure about , if removing the appender from logger config if it gets also removed from logger instance too . . . removeAppender ( appender , getParentLoggers ( ) ) ; removeAppender ( appender , getLoggers ( ) ) ; appender . getManager ( ) . getByteBuffer ( ) . clear ( ) ; ctx . updateLoggers ( ) ; } } if ( null != baos ) { try { baos . close ( ) ; baos . clearOriginalLevels ( ) ; } catch ( Exception ignore ) { } finally { outputStreams . remove ( appenderName ) ; } }
public class Mapper1_0 { /** * / * ( non - Javadoc ) * @ see com . att . authz . certman . mapper . Mapper # toDrop ( com . att . authz . env . AuthzTrans , java . lang . Object ) */ @ Override public Result < CertDrop > toDrop ( AuthzTrans trans , BaseRequest req ) { } }
return Result . err ( Result . ERR_NotImplemented , "Not Implemented... yet" ) ;
public class Identity { /** * OJB can handle only classes that declare at least one primary key attribute , * this method checks this condition . * @ param realObject The real object to check * @ throws ClassNotPersistenceCapableException thrown if no primary key is specified for the objects class */ protected void checkForPrimaryKeys ( final Object realObject ) throws ClassNotPersistenceCapableException { } }
// if no PKs are specified OJB can ' t handle this class ! if ( m_pkValues == null || m_pkValues . length == 0 ) { throw createException ( "OJB needs at least one primary key attribute for class: " , realObject , null ) ; } // arminw : should never happen // if ( m _ pkValues [ 0 ] instanceof ValueContainer ) // throw new OJBRuntimeException ( " Can ' t handle pk values of type " + ValueContainer . class . getName ( ) ) ;
public class JaxWsHttpServletRequestAdapter { /** * ( non - Javadoc ) * @ see javax . servlet . http . HttpServletRequest # getPart ( java . lang . String ) */ @ Override public Part getPart ( String arg0 ) throws IOException , ServletException { } }
try { collaborator . preInvoke ( componentMetaData ) ; return request . getPart ( arg0 ) ; } finally { collaborator . postInvoke ( ) ; }
public class MultipartPostRequest { /** * Adds a file parameter to the request * @ param name * parameter name * @ param filename * the name of the file * @ param is * input stream to read the contents of the file from */ public void setParameter ( final String name , final String filename , final InputStream is ) throws IOException { } }
boundary ( ) ; writeName ( name ) ; write ( "; filename=\"" ) ; write ( filename ) ; write ( '"' ) ; newline ( ) ; write ( "Content-Type: " ) ; String type = URLConnection . guessContentTypeFromName ( filename ) ; if ( type == null ) { type = "application/octet-stream" ; } writeln ( type ) ; newline ( ) ; pipe ( is , os ) ; newline ( ) ;
public class ColumnInfo { /** * Get the value associated with this field from the object parameter either by getting from the field or calling * the get method . */ public T getValue ( Object obj ) throws IllegalAccessException , InvocationTargetException { } }
if ( field == null ) { @ SuppressWarnings ( "unchecked" ) T cast = ( T ) getMethod . invoke ( obj ) ; return cast ; } else { @ SuppressWarnings ( "unchecked" ) T cast = ( T ) field . get ( obj ) ; return cast ; }
public class RRDToolWriter { /** * Get a list of DsNames used to create the datasource . */ private List < String > getDsNames ( DsDef [ ] defs ) { } }
List < String > names = new ArrayList < > ( ) ; for ( DsDef def : defs ) { names . add ( def . getDsName ( ) ) ; } return names ;
public class UpdateRuleRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateRuleRequest updateRuleRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateRuleRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateRuleRequest . getRuleId ( ) , RULEID_BINDING ) ; protocolMarshaller . marshall ( updateRuleRequest . getChangeToken ( ) , CHANGETOKEN_BINDING ) ; protocolMarshaller . marshall ( updateRuleRequest . getUpdates ( ) , UPDATES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CmsSecurityManager { /** * Moves an user to the given organizational unit . < p > * @ param context the current request context * @ param orgUnit the organizational unit to add the principal to * @ param user the user that is to be move to the organizational unit * @ throws CmsException if something goes wrong * @ see org . opencms . security . CmsOrgUnitManager # setUsersOrganizationalUnit ( CmsObject , String , String ) */ public void setUsersOrganizationalUnit ( CmsRequestContext context , CmsOrganizationalUnit orgUnit , CmsUser user ) throws CmsException { } }
CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; try { checkRole ( dbc , CmsRole . ADMINISTRATOR . forOrgUnit ( orgUnit . getName ( ) ) ) ; checkOfflineProject ( dbc ) ; m_driverManager . setUsersOrganizationalUnit ( dbc , orgUnit , user ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_SET_USERS_ORGUNIT_2 , orgUnit . getName ( ) , user . getName ( ) ) , e ) ; } finally { dbc . clear ( ) ; }
public class TransactionContext { /** * Aborts the given transaction , and rolls back all data set changes . If rollback fails , * the transaction is invalidated . If an exception is caught during rollback , the exception * is rethrown wrapped into a TransactionFailureException , after all remaining TransactionAwares have * completed rollback . If an existing exception is passed in , that exception is thrown in either * case , whether the rollback is successful or not . In other words , this method always throws the * first exception that it encounters . * @ param cause the original exception that caused the abort * @ throws TransactionFailureException for any exception that is encountered . */ public void abort ( TransactionFailureException cause ) throws TransactionFailureException { } }
if ( currentTx == null ) { // might be called by some generic exception handler even though already aborted / finished - we allow that return ; } try { boolean success = true ; for ( TransactionAware txAware : txAwares ) { try { if ( ! txAware . rollbackTx ( ) ) { success = false ; } } catch ( Throwable e ) { String message = String . format ( "Unable to roll back changes in transaction-aware '%s' for transaction %d. " , txAware . getTransactionAwareName ( ) , currentTx . getTransactionId ( ) ) ; LOG . warn ( message , e ) ; if ( cause == null ) { cause = new TransactionFailureException ( message , e ) ; } success = false ; } } if ( success ) { txClient . abort ( currentTx ) ; } else { txClient . invalidate ( currentTx . getTransactionId ( ) ) ; } if ( cause != null ) { throw cause ; } } finally { currentTx = null ; }
public class GeomajasServiceImpl { /** * Register the given { @ link Map } with applicationId and mapId . * @ param applicationId * the application id . * @ param mapId * the map id . * @ param map * the map to register . */ public void registerMap ( String applicationId , String mapId , Map map ) { } }
HashMap < String , Map > mapMap ; if ( maps . containsKey ( applicationId ) ) { mapMap = maps . get ( applicationId ) ; if ( ! mapMap . containsKey ( mapId ) ) { mapMap . put ( mapId , map ) ; } } else { mapMap = new HashMap < String , Map > ( ) ; mapMap . put ( mapId , map ) ; maps . put ( applicationId , mapMap ) ; }
public class Ix { /** * Emits the elements of the other sequence if this sequence is empty . * The result ' s Iterator forwards calls of remove ( ) to this ' or the other ' s Iterator . * @ param other the other Iterable instance , not null * @ return the new Ix instance * @ throws NullPointerException if other is null * @ since 1.0 */ public final Ix < T > switchIfEmpty ( Iterable < ? extends T > other ) { } }
return new IxSwitchIfEmpty < T > ( this , nullCheck ( other , "other is null" ) ) ;
public class Ledgers { /** * Closes the given LedgerHandle . * @ param handle The LedgerHandle to close . * @ throws DurableDataLogException If an exception occurred . The causing exception is wrapped inside it . */ static void close ( LedgerHandle handle ) throws DurableDataLogException { } }
try { Exceptions . handleInterrupted ( handle :: close ) ; } catch ( BKException bkEx ) { throw new DurableDataLogException ( String . format ( "Unable to close ledger %d." , handle . getId ( ) ) , bkEx ) ; }
public class RBBITableBuilder { void mergeRuleStatusVals ( ) { } }
// The basic outline of what happens here is this . . . // for each state in this state table // if the status tag list for this state is in the global statuses list // record where and // continue with the next state // else // add the tag list for this state to the global list . int n ; // Pre - load a single tag of { 0 } into the table . // We will need this as a default , for rule sets with no explicit tagging , // or with explicit tagging of { 0 } . if ( fRB . fRuleStatusVals . size ( ) == 0 ) { fRB . fRuleStatusVals . add ( Integer . valueOf ( 1 ) ) ; // Num of statuses in group fRB . fRuleStatusVals . add ( Integer . valueOf ( 0 ) ) ; // and our single status of zero SortedSet < Integer > s0 = new TreeSet < Integer > ( ) ; Integer izero = Integer . valueOf ( 0 ) ; fRB . fStatusSets . put ( s0 , izero ) ; SortedSet < Integer > s1 = new TreeSet < Integer > ( ) ; s1 . add ( izero ) ; fRB . fStatusSets . put ( s0 , izero ) ; } // For each state , check whether the state ' s status tag values are // already entered into the status values array , and add them if not . for ( n = 0 ; n < fDStates . size ( ) ; n ++ ) { RBBIStateDescriptor sd = fDStates . get ( n ) ; Set < Integer > statusVals = sd . fTagVals ; Integer arrayIndexI = fRB . fStatusSets . get ( statusVals ) ; if ( arrayIndexI == null ) { // This is the first encounter of this set of status values . // Add them to the statusSets map , This map associates // the set of status values with an index in the runtime status // values array . arrayIndexI = Integer . valueOf ( fRB . fRuleStatusVals . size ( ) ) ; fRB . fStatusSets . put ( statusVals , arrayIndexI ) ; // Add the new set of status values to the vector of values that // will eventually become the array used by the runtime engine . fRB . fRuleStatusVals . add ( Integer . valueOf ( statusVals . size ( ) ) ) ; fRB . fRuleStatusVals . addAll ( statusVals ) ; } // Save the runtime array index back into the state descriptor . sd . fTagsIdx = arrayIndexI . intValue ( ) ; }
public class SendTaskHeartbeatRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( SendTaskHeartbeatRequest sendTaskHeartbeatRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( sendTaskHeartbeatRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( sendTaskHeartbeatRequest . getTaskToken ( ) , TASKTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CssBoxPngRenderer { /** * Renders the URL and prints the result to the specified output stream in * the specified format . * @ param urlstring * the source URL * @ param out * output stream * @ param type * output type * @ return true in case of success , false otherwise * @ throws SAXException */ private boolean renderURL ( URL urlstring , URL baseUrl , OutputStream out ) throws IOException , SAXException { } }
// Open the network connection DocumentSource docSource = new DefaultDocumentSource ( urlstring ) ; // Parse the input document DOMSource parser = new DefaultDOMSource ( docSource ) ; Document doc = parser . parse ( ) ; // create the media specification MediaSpec media = new MediaSpec ( mediaType ) ; media . setDimensions ( windowSize . width , windowSize . height ) ; media . setDeviceDimensions ( windowSize . width , windowSize . height ) ; // Create the CSS analyzer DOMAnalyzer da = new DOMAnalyzer ( doc , baseUrl ) ; da . setMediaSpec ( media ) ; da . attributesToStyles ( ) ; // convert the HTML presentation attributes to // inline styles da . addStyleSheet ( baseUrl , CSSNorm . stdStyleSheet ( ) , DOMAnalyzer . Origin . AGENT ) ; // use the standard style sheet da . addStyleSheet ( null , CSSNorm . userStyleSheet ( ) , DOMAnalyzer . Origin . AGENT ) ; // use the additional style sheet da . addStyleSheet ( null , CSSNorm . formsStyleSheet ( ) , DOMAnalyzer . Origin . AGENT ) ; // render form fields using css da . getStyleSheets ( ) ; // load the author style sheets BrowserCanvas contentCanvas = new BrowserCanvas ( da . getRoot ( ) , da , baseUrl ) ; // contentCanvas . setAutoMediaUpdate ( false ) ; / / we have a correct media // / / specification , do not // / / update contentCanvas . getConfig ( ) . setClipViewport ( cropWindow ) ; contentCanvas . getConfig ( ) . setLoadImages ( loadImages ) ; contentCanvas . getConfig ( ) . setLoadBackgroundImages ( loadBackgroundImages ) ; contentCanvas . setPreferredSize ( new Dimension ( windowSize . width , 10 ) ) ; contentCanvas . setAutoSizeUpdate ( true ) ; setDefaultFonts ( contentCanvas . getConfig ( ) ) ; contentCanvas . createLayout ( windowSize ) ; contentCanvas . validate ( ) ; ImageIO . write ( contentCanvas . getImage ( ) , "png" , out ) ; // Image image = contentCanvas . createImage ( windowSize . width , // windowSize . height ) ; docSource . close ( ) ; return true ;
public class CmsNewResourceBuilder { /** * Creates a resource , but doesn ' t throw any exceptions . < p > * Exceptions will be passed to the onError method of registered callbacks . < p > * @ return the created resource */ public CmsResource safeCreateResource ( ) { } }
try { return createResource ( ) ; } catch ( Exception e ) { for ( I_Callback callback : m_callbacks ) { callback . onError ( e ) ; } return null ; }
public class NonBlockingBufferedOutputStream { /** * Writes < code > len < / code > bytes from the specified byte array starting at * offset < code > off < / code > to this buffered output stream . * Ordinarily this method stores bytes from the given array into this stream ' s * buffer , flushing the buffer to the underlying output stream as needed . If * the requested length is at least as large as this stream ' s buffer , however , * then this method will flush the buffer and write the bytes directly to the * underlying output stream . Thus redundant < code > BufferedOutputStream < / code > s * will not copy data unnecessarily . * @ param aBuf * the data . * @ param nOfs * the start offset in the data . * @ param nLen * the number of bytes to write . * @ exception IOException * if an I / O error occurs . */ @ Override public void write ( final byte [ ] aBuf , final int nOfs , final int nLen ) throws IOException { } }
if ( nLen >= m_aBuf . length ) { /* * If the request length exceeds the size of the output buffer , flush the * output buffer and then write the data directly . In this way buffered * streams will cascade harmlessly . */ _flushBuffer ( ) ; out . write ( aBuf , nOfs , nLen ) ; return ; } if ( nLen > m_aBuf . length - m_nCount ) _flushBuffer ( ) ; System . arraycopy ( aBuf , nOfs , m_aBuf , m_nCount , nLen ) ; m_nCount += nLen ;
public class ProducerService { /** * Publish a message to the specified producer . Use this only when publishing to a single * producer . * @ param channel The channel on which to publish the message . * @ param message Message to publish . * @ param producer The message producer . * @ param recipients Optional list of targeted recipients . * @ return True if successfully published . */ private boolean publish ( String channel , Message message , IMessageProducer producer , Recipient [ ] recipients ) { } }
if ( producer != null ) { prepare ( channel , message , recipients ) ; return producer . publish ( channel , message ) ; } return false ;
public class BufferInt { /** * Adds a new value . * @ param value new value */ public void addInt ( int value ) { } }
data [ endOffset ] = value ; endOffset ++ ; // Grow the buffer if needed if ( endOffset == data . length ) resize ( ) ;
public class ExampleColorBackend { /** * Creates a simple animation backend that cycles through a list of colors . * @ return the backend to use */ public static AnimationBackend createSampleColorAnimationBackend ( Resources resources ) { } }
// Get the animation duration in ms for each color frame int frameDurationMs = resources . getInteger ( android . R . integer . config_mediumAnimTime ) ; // Create and return the backend return new ExampleColorBackend ( SampleData . COLORS , frameDurationMs ) ;
public class BinaryString { /** * Reverse each character in current string . * @ return a new string which character order is reverse to current string . */ public BinaryString reverse ( ) { } }
ensureMaterialized ( ) ; if ( inFirstSegment ( ) ) { byte [ ] result = new byte [ this . sizeInBytes ] ; // position in byte int byteIdx = 0 ; while ( byteIdx < sizeInBytes ) { int charBytes = numBytesForFirstByte ( getByteOneSegment ( byteIdx ) ) ; segments [ 0 ] . get ( offset + byteIdx , result , result . length - byteIdx - charBytes , charBytes ) ; byteIdx += charBytes ; } return BinaryString . fromBytes ( result ) ; } else { return reverseSlow ( ) ; }
public class VisualizeImageData { /** * Renders a gray scale image using color values from cold to hot . * @ param disparity Input disparity image * @ param dst Where the image is rendered into . If null a new BufferedImage will be created and return . * @ param minDisparity Minimum disparity that can be computed * @ param maxDisparity Maximum disparity that can be computed * @ param invalidColor RGB value for invalid pixels . Try 0xFF < < 8 for green * @ return Rendered image . */ public static BufferedImage disparity ( ImageGray disparity , BufferedImage dst , int minDisparity , int maxDisparity , int invalidColor ) { } }
if ( dst == null ) dst = new BufferedImage ( disparity . getWidth ( ) , disparity . getHeight ( ) , BufferedImage . TYPE_INT_RGB ) ; if ( disparity . getDataType ( ) . isInteger ( ) ) { return disparity ( ( GrayI ) disparity , dst , minDisparity , maxDisparity , invalidColor ) ; } else if ( disparity instanceof GrayF32 ) { return disparity ( ( GrayF32 ) disparity , dst , minDisparity , maxDisparity , invalidColor ) ; } else { throw new RuntimeException ( "Add support" ) ; }
public class EmbeddedGobblinDistcp { /** * If { @ link # delete ( ) } is used , specifies that newly empty parent directories should also be deleted . */ @ CliObjectOption ( description = "If deleting files on target, also delete newly empty parent directories." ) public EmbeddedGobblinDistcp deleteEmptyParentDirectories ( ) { } }
this . setConfiguration ( RecursiveCopyableDataset . DELETE_EMPTY_DIRECTORIES_KEY , Boolean . toString ( true ) ) ; return this ;
public class CmsUser { /** * Sets the zip code information of this user . < p > * @ param zipcode the zip code information to set */ public void setZipcode ( String zipcode ) { } }
checkZipCode ( zipcode ) ; if ( zipcode != null ) { zipcode = zipcode . toUpperCase ( ) ; } setAdditionalInfo ( CmsUserSettings . ADDITIONAL_INFO_ZIPCODE , zipcode ) ;
public class DefaultYAMLParser { /** * the generated parser . * Maintains a dynamic state and value stack . * @ param yyLex scanner . * @ return result of the last reduction , if any . */ public Object yyparse ( yyInput yyLex ) throws java . io . IOException { } }
if ( yyMax <= 0 ) yyMax = 256 ; // initial size int yyState = 0 , yyStates [ ] = new int [ yyMax ] ; // state stack Object yyVal = null , yyVals [ ] = new Object [ yyMax ] ; // value stack int yyToken = - 1 ; // current input int yyErrorFlag = 0 ; // # tokens to shift yyLoop : for ( int yyTop = 0 ; ; ++ yyTop ) { if ( yyTop >= yyStates . length ) { // dynamically increase int [ ] i = new int [ yyStates . length + yyMax ] ; System . arraycopy ( yyStates , 0 , i , 0 , yyStates . length ) ; yyStates = i ; Object [ ] o = new Object [ yyVals . length + yyMax ] ; System . arraycopy ( yyVals , 0 , o , 0 , yyVals . length ) ; yyVals = o ; } yyStates [ yyTop ] = yyState ; yyVals [ yyTop ] = yyVal ; yyDiscarded : for ( ; ; ) { // discarding a token does not change stack int yyN ; if ( ( yyN = yyDefRed [ yyState ] ) == 0 ) { // else [ default ] reduce ( yyN ) if ( yyToken < 0 ) { yyToken = yyLex . advance ( ) ? yyLex . token ( ) : 0 ; } if ( ( yyN = yySindex [ yyState ] ) != 0 && ( yyN += yyToken ) >= 0 && yyN < yyTable . length && yyCheck [ yyN ] == yyToken ) { yyState = yyTable [ yyN ] ; // shift to yyN yyVal = yyLex . value ( ) ; yyToken = - 1 ; if ( yyErrorFlag > 0 ) -- yyErrorFlag ; continue yyLoop ; } if ( ( yyN = yyRindex [ yyState ] ) != 0 && ( yyN += yyToken ) >= 0 && yyN < yyTable . length && yyCheck [ yyN ] == yyToken ) yyN = yyTable [ yyN ] ; // reduce ( yyN ) else switch ( yyErrorFlag ) { case 0 : yyerror ( "syntax error" ) ; case 1 : case 2 : yyErrorFlag = 3 ; do { if ( ( yyN = yySindex [ yyStates [ yyTop ] ] ) != 0 && ( yyN += yyErrorCode ) >= 0 && yyN < yyTable . length && yyCheck [ yyN ] == yyErrorCode ) { yyState = yyTable [ yyN ] ; yyVal = yyLex . value ( ) ; continue yyLoop ; } } while ( -- yyTop >= 0 ) ; yyerror ( "irrecoverable syntax error" ) ; case 3 : if ( yyToken == 0 ) { yyerror ( "irrecoverable syntax error at end-of-file" ) ; } yyToken = - 1 ; continue yyDiscarded ; // leave stack alone } } int yyV = yyTop + 1 - yyLen [ yyN ] ; yyVal = yyDefault ( yyV > yyTop ? null : yyVals [ yyV ] ) ; switch ( yyN ) { // ACTIONS _ BEGIN case 1 : // line 18 " src / main / org / yecht / DefaultYAMLParser . y " { parser . root = parser . addNode ( ( Node ) yyVals [ 0 + yyTop ] ) ; } break ; case 2 : // line 21 " src / main / org / yecht / DefaultYAMLParser . y " { parser . root = parser . addNode ( ( Node ) yyVals [ 0 + yyTop ] ) ; } break ; case 3 : // line 24 " src / main / org / yecht / DefaultYAMLParser . y " { parser . eof = true ; } break ; case 7 : // line 34 " src / main / org / yecht / DefaultYAMLParser . y " { Parser . addTransfer ( ( String ) yyVals [ - 1 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] , parser . taguri_expansion ) ; yyVal = yyVals [ 0 + yyTop ] ; } break ; case 8 : // line 38 " src / main / org / yecht / DefaultYAMLParser . y " { Parser . addTransfer ( ( String ) yyVals [ - 1 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] , false ) ; yyVal = yyVals [ 0 + yyTop ] ; } break ; case 9 : // line 42 " src / main / org / yecht / DefaultYAMLParser . y " { /* * _ Anchors _ : The language binding must keep a separate symbol table * for anchors . The actual ID in the symbol table is returned to the * higher nodes , though . */ yyVal = parser . addAnchor ( ( String ) yyVals [ - 1 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] ) ; } break ; case 10 : // line 50 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = yyVals [ - 1 + yyTop ] ; } break ; case 13 : // line 59 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = yyVals [ - 1 + yyTop ] ; } break ; case 14 : // line 62 " src / main / org / yecht / DefaultYAMLParser . y " { Node n = NULL_NODE ( parser ) ; yyVal = n ; } break ; case 15 : // line 67 " src / main / org / yecht / DefaultYAMLParser . y " { if ( parser . implicit_typing ) { ImplicitScanner2 . tryTagImplicit ( ( Node ) yyVals [ 0 + yyTop ] , parser . taguri_expansion ) ; } yyVal = yyVals [ 0 + yyTop ] ; } break ; case 16 : // line 75 " src / main / org / yecht / DefaultYAMLParser . y " { Parser . addTransfer ( ( String ) yyVals [ - 1 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] , parser . taguri_expansion ) ; yyVal = yyVals [ 0 + yyTop ] ; } break ; case 17 : // line 80 " src / main / org / yecht / DefaultYAMLParser . y " { Parser . addTransfer ( ( String ) yyVals [ - 1 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] , false ) ; yyVal = yyVals [ 0 + yyTop ] ; } break ; case 18 : // line 85 " src / main / org / yecht / DefaultYAMLParser . y " { /* * _ Anchors _ : The language binding must keep a separate symbol table * for anchors . The actual ID in the symbol table is returned to the * higher nodes , though . */ yyVal = parser . addAnchor ( ( String ) yyVals [ - 1 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] ) ; } break ; case 25 : // line 118 " src / main / org / yecht / DefaultYAMLParser . y " { Parser . addTransfer ( ( String ) yyVals [ - 1 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] , parser . taguri_expansion ) ; yyVal = yyVals [ 0 + yyTop ] ; } break ; case 26 : // line 123 " src / main / org / yecht / DefaultYAMLParser . y " { Parser . addTransfer ( ( String ) yyVals [ - 1 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] , false ) ; yyVal = yyVals [ 0 + yyTop ] ; } break ; case 27 : // line 128 " src / main / org / yecht / DefaultYAMLParser . y " { if ( parser . implicit_typing ) { ImplicitScanner2 . tryTagImplicit ( ( Node ) yyVals [ 0 + yyTop ] , parser . taguri_expansion ) ; } yyVal = yyVals [ 0 + yyTop ] ; } break ; case 28 : // line 136 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = parser . addAnchor ( ( String ) yyVals [ - 1 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] ) ; } break ; case 29 : // line 140 " src / main / org / yecht / DefaultYAMLParser . y " { /* * _ Aliases _ : The anchor symbol table is scanned for the anchor name . * The anchor ' s ID in the language ' s symbol table is returned . */ yyVal = parser . getAnchor ( ( String ) yyVals [ 0 + yyTop ] ) ; } break ; case 30 : // line 148 " src / main / org / yecht / DefaultYAMLParser . y " { Node n = ( Node ) yyVals [ 0 + yyTop ] ; if ( parser . taguri_expansion ) { n . type_id = Parser . taguri ( YAML . DOMAIN , "str" ) ; } else { n . type_id = "str" ; } yyVal = n ; } break ; case 32 : // line 159 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = yyVals [ - 1 + yyTop ] ; } break ; case 38 : // line 179 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = yyVals [ - 1 + yyTop ] ; } break ; case 39 : // line 183 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = yyVals [ - 1 + yyTop ] ; } break ; case 40 : // line 189 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = parser . addNode ( ( Node ) yyVals [ 0 + yyTop ] ) ; } break ; case 41 : // line 195 " src / main / org / yecht / DefaultYAMLParser . y " { Parser . addTransfer ( ( String ) yyVals [ - 2 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] , parser . taguri_expansion ) ; yyVal = yyVals [ 0 + yyTop ] ; } break ; case 42 : // line 200 " src / main / org / yecht / DefaultYAMLParser . y " { Parser . addTransfer ( ( String ) yyVals [ - 1 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] , parser . taguri_expansion ) ; yyVal = yyVals [ 0 + yyTop ] ; } break ; case 43 : // line 205 " src / main / org / yecht / DefaultYAMLParser . y " { Parser . addTransfer ( ( String ) yyVals [ - 2 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] , false ) ; yyVal = yyVals [ 0 + yyTop ] ; } break ; case 44 : // line 210 " src / main / org / yecht / DefaultYAMLParser . y " { Parser . addTransfer ( ( String ) yyVals [ - 1 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] , false ) ; yyVal = yyVals [ 0 + yyTop ] ; } break ; case 45 : // line 215 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = parser . addAnchor ( ( String ) yyVals [ - 2 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] ) ; } break ; case 46 : // line 219 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = parser . addAnchor ( ( String ) yyVals [ - 1 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] ) ; } break ; case 47 : // line 225 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = Node . newSeq ( yyVals [ 0 + yyTop ] ) ; } break ; case 48 : // line 229 " src / main / org / yecht / DefaultYAMLParser . y " { ( ( Node ) yyVals [ - 2 + yyTop ] ) . seqAdd ( yyVals [ 0 + yyTop ] ) ; yyVal = yyVals [ - 2 + yyTop ] ; } break ; case 49 : // line 234 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = yyVals [ - 1 + yyTop ] ; } break ; case 50 : // line 243 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = yyVals [ - 1 + yyTop ] ; } break ; case 51 : // line 247 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = Node . allocSeq ( ) ; } break ; case 52 : // line 253 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = Node . newSeq ( parser . addNode ( ( Node ) yyVals [ 0 + yyTop ] ) ) ; } break ; case 53 : // line 257 " src / main / org / yecht / DefaultYAMLParser . y " { ( ( Node ) yyVals [ - 2 + yyTop ] ) . seqAdd ( parser . addNode ( ( Node ) yyVals [ 0 + yyTop ] ) ) ; yyVal = yyVals [ - 2 + yyTop ] ; } break ; case 56 : // line 271 " src / main / org / yecht / DefaultYAMLParser . y " { applySeqInMap ( parser , ( Node ) yyVals [ - 1 + yyTop ] ) ; yyVal = yyVals [ - 1 + yyTop ] ; } break ; case 57 : // line 276 " src / main / org / yecht / DefaultYAMLParser . y " { applySeqInMap ( parser , ( Node ) yyVals [ - 1 + yyTop ] ) ; yyVal = yyVals [ - 1 + yyTop ] ; } break ; case 58 : // line 283 " src / main / org / yecht / DefaultYAMLParser . y " { Parser . addTransfer ( ( String ) yyVals [ - 2 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] , parser . taguri_expansion ) ; yyVal = yyVals [ 0 + yyTop ] ; } break ; case 59 : // line 288 " src / main / org / yecht / DefaultYAMLParser . y " { Parser . addTransfer ( ( String ) yyVals [ - 1 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] , parser . taguri_expansion ) ; yyVal = yyVals [ 0 + yyTop ] ; } break ; case 60 : // line 293 " src / main / org / yecht / DefaultYAMLParser . y " { Parser . addTransfer ( ( String ) yyVals [ - 2 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] , false ) ; yyVal = yyVals [ 0 + yyTop ] ; } break ; case 61 : // line 298 " src / main / org / yecht / DefaultYAMLParser . y " { Parser . addTransfer ( ( String ) yyVals [ - 1 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] , false ) ; yyVal = yyVals [ 0 + yyTop ] ; } break ; case 62 : // line 303 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = parser . addAnchor ( ( String ) yyVals [ - 2 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] ) ; } break ; case 63 : // line 307 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = parser . addAnchor ( ( String ) yyVals [ - 1 + yyTop ] , ( Node ) yyVals [ 0 + yyTop ] ) ; } break ; case 65 : // line 314 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = yyVals [ - 1 + yyTop ] ; } break ; case 67 : // line 323 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = Node . newMap ( parser . addNode ( ( Node ) yyVals [ - 2 + yyTop ] ) , parser . addNode ( ( Node ) yyVals [ 0 + yyTop ] ) ) ; } break ; case 69 : // line 332 " src / main / org / yecht / DefaultYAMLParser . y " { if ( ( ( Node ) yyVals [ - 2 + yyTop ] ) . shortcut == null ) { ( ( Node ) yyVals [ - 2 + yyTop ] ) . shortcut = Node . newSeq ( yyVals [ 0 + yyTop ] ) ; } else { ( ( Node ) ( ( Node ) yyVals [ - 2 + yyTop ] ) . shortcut ) . seqAdd ( yyVals [ 0 + yyTop ] ) ; } yyVal = yyVals [ - 2 + yyTop ] ; } break ; case 70 : // line 344 " src / main / org / yecht / DefaultYAMLParser . y " { applySeqInMap ( parser , ( Node ) yyVals [ - 2 + yyTop ] ) ; ( ( Node ) yyVals [ - 2 + yyTop ] ) . mapUpdate ( ( Node ) yyVals [ 0 + yyTop ] ) ; yyVals [ 0 + yyTop ] = null ; yyVal = yyVals [ - 2 + yyTop ] ; } break ; case 71 : // line 351 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = yyVals [ - 1 + yyTop ] ; } break ; case 72 : // line 360 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = Node . newMap ( parser . addNode ( ( Node ) yyVals [ - 2 + yyTop ] ) , parser . addNode ( ( Node ) yyVals [ 0 + yyTop ] ) ) ; } break ; case 73 : // line 368 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = yyVals [ - 1 + yyTop ] ; } break ; case 74 : // line 372 " src / main / org / yecht / DefaultYAMLParser . y " { yyVal = Node . allocMap ( ) ; } break ; case 76 : // line 379 " src / main / org / yecht / DefaultYAMLParser . y " { ( ( Node ) yyVals [ - 2 + yyTop ] ) . mapUpdate ( ( Node ) yyVals [ 0 + yyTop ] ) ; yyVals [ 0 + yyTop ] = null ; yyVal = yyVals [ - 2 + yyTop ] ; } break ; case 77 : // line 387 " src / main / org / yecht / DefaultYAMLParser . y " { Node n = NULL_NODE ( parser ) ; yyVal = Node . newMap ( parser . addNode ( ( Node ) yyVals [ 0 + yyTop ] ) , parser . addNode ( n ) ) ; } break ; // line 749 " - " // ACTIONS _ END } yyTop -= yyLen [ yyN ] ; yyState = yyStates [ yyTop ] ; int yyM = yyLhs [ yyN ] ; if ( yyState == 0 && yyM == 0 ) { yyState = yyFinal ; if ( yyToken < 0 ) { yyToken = yyLex . advance ( ) ? yyLex . token ( ) : 0 ; } if ( yyToken == 0 ) { return yyVal ; } continue yyLoop ; } if ( ( yyN = yyGindex [ yyM ] ) != 0 && ( yyN += yyState ) >= 0 && yyN < yyTable . length && yyCheck [ yyN ] == yyState ) yyState = yyTable [ yyN ] ; else yyState = yyDgoto [ yyM ] ; continue yyLoop ; } }
public class ClassUtils { /** * Return a list of class setters . Supports inheritance and overriding , that is when a method is found on the * lowest level of inheritance chain , no other method can override it . Supports inheritance * and doesn ' t return synthetic methods . * @ param beanClass class to be searched for * @ return a list of found getters */ public static List < Method > getClassSetters ( Class < ? > beanClass ) { } }
Map < String , Method > result = new HashMap < > ( ) ; Class < ? > currentClass = beanClass ; while ( currentClass != null && currentClass != Object . class ) { for ( Method method : currentClass . getDeclaredMethods ( ) ) { if ( ! method . isSynthetic ( ) && isSetter ( method ) ) { result . putIfAbsent ( method . getName ( ) , method ) ; } } currentClass = currentClass . getSuperclass ( ) ; } return new LinkedList < > ( result . values ( ) ) ;
public class StandardChecker { /** * Checkt to make sure an arg doesn ' t have duplicate modifiers */ private List < SemanticError > check ( DataType dataType , Constructor constructor , Arg arg ) { } }
logger . finest ( "Checking semantic constraints on data type " + dataType . name + ", constructor " + constructor . name ) ; final List < SemanticError > errors = new ArrayList < SemanticError > ( ) ; final Set < ArgModifier > modifiers = new HashSet < ArgModifier > ( ) ; for ( ArgModifier modifier : arg . modifiers ) { if ( modifiers . contains ( modifier ) ) { final String modName = ASTPrinter . print ( modifier ) ; errors . add ( _DuplicateModifier ( dataType . name , constructor . name , arg . name , modName ) ) ; } else { modifiers . add ( modifier ) ; } } return errors ;
public class ImportCatalogActionRelative { /** * get result . */ @ Override public void getResult ( final ContentHandler buf ) throws SAXException { } }
final String templateFilePath = paramTable . get ( FileGenerator . PARAM_TEMPLATE ) ; for ( final Value value : valueSet ) { buf . startElement ( "urn:oasis:names:tc:entity:xmlns:xml:catalog" , "nextCatalog" , "nextCatalog" , new AttributesBuilder ( ) . add ( "catalog" , FileUtils . getRelativeUnixPath ( templateFilePath , value . value ) ) . build ( ) ) ; buf . endElement ( "urn:oasis:names:tc:entity:xmlns:xml:catalog" , "nextCatalog" , "nextCatalog" ) ; }
public class CachedMonitorSource { /** * Get monitor information for given location . * First monitor information is looked up in cache . * Then , when not found , delegate is called . * @ param location Location * @ return Monitor information */ private MonitorInformation getMonitorInformation ( L location ) { } }
final K monitorKey = getLocationKey ( location ) ; MonitorInformation monitorInformation = monitorInformations . get ( monitorKey ) ; if ( monitorInformation == null ) { // Not found , let ' s call delegate if ( delegate . isMonitored ( location ) ) { monitorInformation = new MonitorInformation ( true , delegate . getMonitor ( location ) ) ; } else { monitorInformation = NULL_MONITOR_INFORMATION ; } monitorInformations . put ( monitorKey , monitorInformation ) ; } return monitorInformation ;
public class TableModel { /** * Adds a new column into the table model as the last column . You can optionally supply values for the existing rows * through the { @ code newColumnValues } . * @ param label Label for the header of the new column * @ param newColumnValues Optional values to assign to the existing rows , where the first element in the array will * be the value of the first row and so on . . . * @ return Itself */ public synchronized TableModel < V > addColumn ( String label , V [ ] newColumnValues ) { } }
return insertColumn ( getColumnCount ( ) , label , newColumnValues ) ;
public class XmlJobDefExporter { /** * Exports several ( given ) job def to a given stream . Stream is not closed here . */ public static void export ( OutputStream os , List < JobDef > jobDefList , DbConn cnx ) throws JqmXmlException { } }
// Argument tests if ( os == null ) { throw new IllegalArgumentException ( "output stream cannot be null" ) ; } if ( jobDefList == null || jobDefList . isEmpty ( ) ) { throw new IllegalArgumentException ( "job def list cannot be null or empty" ) ; } if ( cnx == null ) { throw new IllegalArgumentException ( "database connection cannot be null" ) ; } Collections . sort ( jobDefList , new Comparator < JobDef > ( ) { @ Override public int compare ( JobDef o1 , JobDef o2 ) { return o1 . getJarPath ( ) . compareTo ( o2 . getJarPath ( ) ) ; } } ) ; // Create XML document Element root = new Element ( "jqm" ) ; Document document = new Document ( root ) ; Element jobDefinitions = null ; String currentJarPath = null ; Set < Cl > cls = new HashSet < > ( ) ; for ( JobDef j : jobDefList ) { if ( currentJarPath == null || ! j . getJarPath ( ) . equals ( currentJarPath ) ) { currentJarPath = j . getJarPath ( ) ; Element jar = new Element ( "jar" ) ; addTextElementToParentElement ( jar , "path" , currentJarPath ) ; addTextElementToParentElement ( jar , "pathType" , j . getPathType ( ) . toString ( ) ) ; jobDefinitions = new Element ( "jobdefinitions" ) ; jar . addContent ( jobDefinitions ) ; root . addContent ( jar ) ; } Element jobDefinition = getJobDefinitionElement ( j , cnx ) ; jobDefinitions . addContent ( jobDefinition ) ; if ( j . getClassLoader ( cnx ) != null ) { cls . add ( j . getClassLoader ( ) ) ; } } for ( Cl cl : cls ) { root . addContent ( getClElement ( cl ) ) ; } // Done : output XML to stream . try { XMLOutputter out = new XMLOutputter ( Format . getPrettyFormat ( ) ) ; out . output ( document , os ) ; } catch ( java . io . IOException e ) { throw new JqmXmlException ( "Could not output XML to stream" , e ) ; }
public class SiteJarResourceLoader { /** * Things won ' t ever be modified when loaded from the servlet context * because they came from the webapp . war file and if that is reloaded , * everything will be thrown away and started afresh . */ @ Override public boolean isSourceModified ( Resource resource ) { } }
SiteKey skey = new SiteKey ( resource . getName ( ) ) ; // if the resource is for the default site , it is never considered to // be modified if ( skey . siteId == SiteIdentifier . DEFAULT_SITE_ID ) { return false ; } else { // otherwise compare the last modified time of the loaded resource // with that of the associated site - specific jar file try { return ( resource . getLastModified ( ) < _loader . getLastModified ( skey . siteId ) ) ; } catch ( IOException ioe ) { Log . log . warning ( "Failure obtaining last modified time of site-specific jar file" , "siteId" , skey . siteId , "error" , ioe ) ; return false ; } }
public class ProtocolDataUnit { /** * Reads from the given < code > SocketChannel < / code > all the neccassary bytes to fill this PDU . * @ param sChannel < code > SocketChannel < / code > to read from . * @ return The number of bytes , possibly zero , or < code > - 1 < / code > if the channel has reached end - of - stream * @ throws IOException if an I / O error occurs . * @ throws InternetSCSIException if any violation of the iSCSI - Standard emerge . * @ throws DigestException if a mismatch of the digest exists . */ public final int read ( final SocketChannel sChannel ) throws InternetSCSIException , IOException , DigestException { } }
// read Basic Header Segment first to determine the total length of this // Protocol Data Unit . clear ( ) ; final ByteBuffer bhs = ByteBuffer . allocate ( BasicHeaderSegment . BHS_FIXED_SIZE ) ; int len = 0 ; while ( len < BasicHeaderSegment . BHS_FIXED_SIZE ) { int lens = sChannel . read ( bhs ) ; if ( lens == - 1 ) { // The Channel was closed at the Target ( e . g . the Target does // not support Multiple Connections ) // throw new ClosedChannelException ( ) ; return lens ; } len += lens ; LOGGER . trace ( "Receiving through SocketChannel: " + len + " of maximal " + BasicHeaderSegment . BHS_FIXED_SIZE ) ; } bhs . flip ( ) ; deserializeBasicHeaderSegment ( bhs ) ; // check for further reading if ( getBasicHeaderSegment ( ) . getTotalAHSLength ( ) > 0 ) { final ByteBuffer ahs = ByteBuffer . allocate ( basicHeaderSegment . getTotalAHSLength ( ) ) ; int ahsLength = 0 ; while ( ahsLength < getBasicHeaderSegment ( ) . getTotalAHSLength ( ) ) { ahsLength += sChannel . read ( ahs ) ; } len += ahsLength ; ahs . flip ( ) ; deserializeAdditionalHeaderSegments ( ahs ) ; } if ( basicHeaderSegment . getDataSegmentLength ( ) > 0 ) { dataSegment = ByteBuffer . allocate ( AbstractDataSegment . getTotalLength ( basicHeaderSegment . getDataSegmentLength ( ) ) ) ; int dataSegmentLength = 0 ; while ( dataSegmentLength < basicHeaderSegment . getDataSegmentLength ( ) ) { dataSegmentLength += sChannel . read ( dataSegment ) ; } len += dataSegmentLength ; dataSegment . flip ( ) ; } // print debug informations if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( basicHeaderSegment . getParser ( ) . getShortInfo ( ) ) ; } return len ;
public class DatanodeID { /** * { @ inheritDoc } */ public void write ( DataOutput out ) throws IOException { } }
UTF8 . writeStringOpt ( out , name ) ; UTF8 . writeStringOpt ( out , storageID ) ; out . writeShort ( infoPort ) ;
public class ProductPartitionTree { /** * Returns a new instance of this class by retrieving the product partitions of the * specified ad group . All parameters are required . */ public static ProductPartitionTree createAdGroupTree ( AdWordsServicesInterface services , AdWordsSession session , Long adGroupId ) throws ApiException , RemoteException { } }
return new ProductPartitionTree ( ProductPartitionTreeImpl . createAdGroupTree ( services , session , adGroupId ) ) ;
public class CSSWriter { /** * Write the CSS content to the passed writer . No specific charset is used . * @ param aCSS * The CSS to write . May not be < code > null < / code > . * @ param aWriter * The write to write the text to . May not be < code > null < / code > . Is * automatically closed after the writing ! * @ throws IOException * In case writing fails . * @ throws IllegalStateException * In case some elements cannot be written in the version supplied in * the constructor . * @ see # getCSSAsString ( CascadingStyleSheet ) */ public void writeCSS ( @ Nonnull final CascadingStyleSheet aCSS , @ Nonnull @ WillClose final Writer aWriter ) throws IOException { } }
ValueEnforcer . notNull ( aCSS , "CSS" ) ; ValueEnforcer . notNull ( aWriter , "Writer" ) ; try { final boolean bOptimizedOutput = m_aSettings . isOptimizedOutput ( ) ; final String sNewLineString = m_aSettings . getNewLineString ( ) ; // Write file header if ( m_bWriteHeaderText && StringHelper . hasText ( m_sHeaderText ) ) { aWriter . write ( "/*" ) ; aWriter . write ( sNewLineString ) ; for ( final String sLine : StringHelper . getExploded ( "\n" , m_sHeaderText ) ) { aWriter . write ( " * " + sLine ) ; aWriter . write ( sNewLineString ) ; } aWriter . write ( " */" ) ; aWriter . write ( sNewLineString ) ; } // Charset ? Must be the first element before the import if ( StringHelper . hasText ( m_sContentCharset ) ) { aWriter . write ( "@charset \"" + m_sContentCharset + "\";" ) ; if ( ! bOptimizedOutput ) aWriter . write ( sNewLineString ) ; } // Import rules int nRulesEmitted = 0 ; final ICommonsList < CSSImportRule > aImportRules = aCSS . getAllImportRules ( ) ; if ( aImportRules . isNotEmpty ( ) ) for ( final CSSImportRule aImportRule : aImportRules ) { aWriter . write ( aImportRule . getAsCSSString ( m_aSettings ) ) ; ++ nRulesEmitted ; } // Namespace rules final ICommonsList < CSSNamespaceRule > aNamespaceRules = aCSS . getAllNamespaceRules ( ) ; if ( aNamespaceRules . isNotEmpty ( ) ) for ( final CSSNamespaceRule aNamespaceRule : aNamespaceRules ) { aWriter . write ( aNamespaceRule . getAsCSSString ( m_aSettings ) ) ; ++ nRulesEmitted ; } // Main CSS rules for ( final ICSSTopLevelRule aRule : aCSS . getAllRules ( ) ) { final String sRuleCSS = aRule . getAsCSSString ( m_aSettings ) ; if ( StringHelper . hasText ( sRuleCSS ) ) { if ( ! bOptimizedOutput && nRulesEmitted > 0 ) aWriter . write ( sNewLineString ) ; aWriter . write ( sRuleCSS ) ; ++ nRulesEmitted ; } } // Write file footer if ( m_bWriteFooterText && StringHelper . hasText ( m_sFooterText ) ) { aWriter . write ( "/*" ) ; aWriter . write ( sNewLineString ) ; for ( final String sLine : StringHelper . getExploded ( '\n' , m_sFooterText ) ) { aWriter . write ( " * " + sLine ) ; aWriter . write ( sNewLineString ) ; } aWriter . write ( " */" ) ; aWriter . write ( sNewLineString ) ; } } finally { StreamHelper . close ( aWriter ) ; }
public class CmsWidgetDialogParameter { /** * Checks if a value for this widget base type with the given id is available . < p > * This should only be used if the base object is a collection . < p > * @ param index the index to check * @ return < code > true < / code > if a value for this widget base type with the given id is available */ public boolean hasValue ( int index ) { } }
if ( m_baseCollection instanceof List ) { return index < ( ( List < ? > ) m_baseCollection ) . size ( ) ; } else if ( m_baseCollection instanceof SortedMap ) { return index < ( ( SortedMap < ? , ? > ) m_baseCollection ) . size ( ) ; } return false ;
public class CcgParse { /** * Gets the lexicon entries for all terminal children of this * parse tree node , in left - to - right order . * @ return */ public List < LexiconEntryInfo > getSpannedLexiconEntries ( ) { } }
if ( isTerminal ( ) ) { return Arrays . asList ( lexiconEntry ) ; } else { List < LexiconEntryInfo > lexiconEntries = Lists . newArrayList ( ) ; lexiconEntries . addAll ( left . getSpannedLexiconEntries ( ) ) ; lexiconEntries . addAll ( right . getSpannedLexiconEntries ( ) ) ; return lexiconEntries ; }
public class MoreThrowables { /** * Throws { code t } if it is an instance of { @ link RuntimeException } or { @ link Error } . * < p > This is intended to mimic Guava ' s method by the same name , but which is unavailable to us * due to compatibility with older Guava versions . */ public static void throwIfUnchecked ( Throwable t ) { } }
Preconditions . checkNotNull ( t ) ; if ( t instanceof RuntimeException ) { throw ( RuntimeException ) t ; } if ( t instanceof Error ) { throw ( Error ) t ; }
public class ConnectionPool { /** * Sets the login timeout for any segments that are configured * with a { @ code DataSource } for connections . * @ param seconds The timeout in seconds . */ void setDataSourceLoginTimeout ( int seconds ) throws SQLException { } }
for ( ConnectionPoolSegment segment : segments ) { if ( segment . dbConnection . datasource != null ) { segment . dbConnection . datasource . setLoginTimeout ( seconds ) ; } }
public class Task { /** * Update resource information counters */ void updateResourceCounters ( ) { } }
if ( resourceCalculator == null ) { return ; } ProcResourceValues res = resourceCalculator . getProcResourceValues ( ) ; long cpuTime = res . getCumulativeCpuTime ( ) ; long pMem = res . getPhysicalMemorySize ( ) ; long vMem = res . getVirtualMemorySize ( ) ; long cpuJvmTime = this . jmxThreadInfoTracker . getCumulativeCPUTime ( ) ; // Remove the CPU time consumed previously by JVM reuse cpuTime -= initCpuCumulativeTime ; cpuJvmTime -= this . initJvmCpuCumulativeTime ; counters . findCounter ( Counter . CPU_MILLISECONDS ) . setValue ( cpuTime ) ; counters . findCounter ( Counter . PHYSICAL_MEMORY_BYTES ) . setValue ( pMem ) ; counters . findCounter ( Counter . VIRTUAL_MEMORY_BYTES ) . setValue ( vMem ) ; counters . findCounter ( Counter . CPU_MILLISECONDS_JVM ) . setValue ( cpuJvmTime ) ; if ( isMapTask ( ) ) { // Mapper Task counters . findCounter ( MapCounter . MAP_CPU_MILLISECONDS ) . setValue ( cpuTime ) ; } else { counters . findCounter ( ReduceCounter . REDUCE_CPU_MILLISECONDS ) . setValue ( cpuTime ) ; }
public class InstanceClient { /** * Deletes the specified Instance resource . For more information , see Stopping or Deleting an * Instance . * < p > Sample code : * < pre > < code > * try ( InstanceClient instanceClient = InstanceClient . create ( ) ) { * ProjectZoneInstanceName instance = ProjectZoneInstanceName . of ( " [ PROJECT ] " , " [ ZONE ] " , " [ INSTANCE ] " ) ; * Operation response = instanceClient . deleteInstance ( instance . toString ( ) ) ; * < / code > < / pre > * @ param instance Name of the instance resource to delete . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation deleteInstance ( String instance ) { } }
DeleteInstanceHttpRequest request = DeleteInstanceHttpRequest . newBuilder ( ) . setInstance ( instance ) . build ( ) ; return deleteInstance ( request ) ;
public class NodeOverridesMarshaller { /** * Marshall the given parameter object . */ public void marshall ( NodeOverrides nodeOverrides , ProtocolMarshaller protocolMarshaller ) { } }
if ( nodeOverrides == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( nodeOverrides . getNumNodes ( ) , NUMNODES_BINDING ) ; protocolMarshaller . marshall ( nodeOverrides . getNodePropertyOverrides ( ) , NODEPROPERTYOVERRIDES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class KeyEncoder { /** * Encodes the given Double object into exactly 8 bytes for descending * order . A non - canonical NaN value is used to represent null . * @ param value optional Double value to encode * @ param dst destination for encoded bytes * @ param dstOffset offset into destination array */ public static void encodeDesc ( Double value , byte [ ] dst , int dstOffset ) { } }
if ( value == null ) { DataEncoder . encode ( ~ 0x7fffffffffffffffL , dst , dstOffset ) ; } else { encodeDesc ( value . doubleValue ( ) , dst , dstOffset ) ; }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcMapConversion ( ) { } }
if ( ifcMapConversionEClass == null ) { ifcMapConversionEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 354 ) ; } return ifcMapConversionEClass ;
public class FactorGraph { /** * Gets the indices of all factors which are adjacent ( that is , * share at least one variable ) with { @ code factorNum } . * @ param factorNum * @ return */ public Set < Integer > getAdjacentFactors ( int factorNum ) { } }
Set < Integer > adjacentFactors = Sets . newHashSet ( ) ; for ( Integer variableNum : factorVariableMap . get ( factorNum ) ) { adjacentFactors . addAll ( variableFactorMap . get ( variableNum ) ) ; } return adjacentFactors ;
public class PredictionsImpl { /** * Gets predictions for a given utterance , in the form of intents and entities . The current maximum query size is 500 characters . * @ param appId The LUIS application ID ( Guid ) . * @ param query The utterance to predict . * @ param resolveOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the LuisResult object */ public Observable < ServiceResponse < LuisResult > > resolveWithServiceResponseAsync ( String appId , String query , ResolveOptionalParameter resolveOptionalParameter ) { } }
if ( this . client . endpoint ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.endpoint() is required and cannot be null." ) ; } if ( appId == null ) { throw new IllegalArgumentException ( "Parameter appId is required and cannot be null." ) ; } if ( query == null ) { throw new IllegalArgumentException ( "Parameter query is required and cannot be null." ) ; } final Double timezoneOffset = resolveOptionalParameter != null ? resolveOptionalParameter . timezoneOffset ( ) : null ; final Boolean verbose = resolveOptionalParameter != null ? resolveOptionalParameter . verbose ( ) : null ; final Boolean staging = resolveOptionalParameter != null ? resolveOptionalParameter . staging ( ) : null ; final Boolean spellCheck = resolveOptionalParameter != null ? resolveOptionalParameter . spellCheck ( ) : null ; final String bingSpellCheckSubscriptionKey = resolveOptionalParameter != null ? resolveOptionalParameter . bingSpellCheckSubscriptionKey ( ) : null ; final Boolean log = resolveOptionalParameter != null ? resolveOptionalParameter . log ( ) : null ; return resolveWithServiceResponseAsync ( appId , query , timezoneOffset , verbose , staging , spellCheck , bingSpellCheckSubscriptionKey , log ) ;
public class TracingSugar { /** * Convenience shorthand for < code > obj . trace . appendTemplate ( ' ' ' some template ' ' ' ) < / code > */ public CompositeGeneratorNode trace ( final EObject obj , final StringConcatenationClient code ) { } }
return this . appendTemplate ( this . trace ( obj ) , code ) ;
public class FileUtils { /** * Deprecated in favor of the CharSource version to force the user to define their encoding . If * you call this , it will use UTF _ 8 encoding . * @ deprecated */ @ Deprecated public static ImmutableMultimap < Symbol , Symbol > loadSymbolMultimap ( File multimapFile ) throws IOException { } }
return loadSymbolMultimap ( Files . asCharSource ( multimapFile , Charsets . UTF_8 ) ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcRelConnectsPathElements ( ) { } }
if ( ifcRelConnectsPathElementsEClass == null ) { ifcRelConnectsPathElementsEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 535 ) ; } return ifcRelConnectsPathElementsEClass ;
public class AnnotationSagaAnalyzer { /** * Checks all methods for saga annotations . */ private SagaHandlersMap determineMessageHandlers ( final Class < ? extends Saga > sagaType ) { } }
SagaHandlersMap handlerMap = new SagaHandlersMap ( sagaType ) ; Method [ ] methods = sagaType . getMethods ( ) ; for ( Method method : methods ) { if ( isHandlerMethod ( method ) ) { // method matches expected handler signature - > add to handler map Class < ? > handlerType = method . getParameterTypes ( ) [ 0 ] ; boolean isSagaStart = hasStartSagaAnnotation ( method ) ; handlerMap . add ( MessageHandler . reflectionInvokedHandler ( handlerType , method , isSagaStart ) ) ; } } return handlerMap ;
public class RemoteMongoCollectionImpl { /** * Finds a document in the collection and delete it . * @ param filter the query filter * @ return a task containing the resulting document */ public Task < DocumentT > findOneAndDelete ( final Bson filter ) { } }
return dispatcher . dispatchTask ( new Callable < DocumentT > ( ) { @ Override public DocumentT call ( ) { return proxy . findOneAndDelete ( filter ) ; } } ) ;
public class CommerceWarehouseItemLocalServiceUtil { /** * Updates the commerce warehouse item in the database or adds it if it does not yet exist . Also notifies the appropriate model listeners . * @ param commerceWarehouseItem the commerce warehouse item * @ return the commerce warehouse item that was updated */ public static com . liferay . commerce . model . CommerceWarehouseItem updateCommerceWarehouseItem ( com . liferay . commerce . model . CommerceWarehouseItem commerceWarehouseItem ) { } }
return getService ( ) . updateCommerceWarehouseItem ( commerceWarehouseItem ) ;
public class Lock { /** * Releases the exclusive lock . * @ exception NoExclusiveLockException The caller does not hold the exclusive lock . */ public void releaseExclusiveLock ( ) throws NoExclusiveLockException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "releaseExclusiveLock" , this ) ; Thread currentThread = Thread . currentThread ( ) ; synchronized ( this ) { if ( ( _threadHoldingExclusiveLock == null ) || ( ! _threadHoldingExclusiveLock . equals ( currentThread ) ) ) { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "releaseExclusiveLock" , "NoExclusiveLockException" ) ; throw new NoExclusiveLockException ( ) ; } _threadHoldingExclusiveLock = null ; _threadRequestingExclusiveLock = null ; // There may be thread waiting for the exclusive lock to be released . Notify them of this event . this . notifyAll ( ) ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "releaseExclusiveLock" ) ;
public class AbstractMessageSelector { /** * Reads message payload as String either from message object directly or from nested Citrus message representation . * @ param message * @ return */ String getPayloadAsString ( Message < ? > message ) { } }
if ( message . getPayload ( ) instanceof com . consol . citrus . message . Message ) { return ( ( com . consol . citrus . message . Message ) message . getPayload ( ) ) . getPayload ( String . class ) ; } else { return message . getPayload ( ) . toString ( ) ; }
public class PingsActivity { /** * Listeners section */ public View . OnClickListener getPingDetailListener ( final GeoPoint targetUserGeoPoint ) { } }
return new View . OnClickListener ( ) { @ Override public void onClick ( View view ) { Lifecycle . runMatchViewActivityFromPings ( PingsActivity . this , currentUserGeoPoint , targetUserGeoPoint , "1" ) ; } } ;
public class TitlePaneCloseButtonPainter { /** * Create the gradient for the close button . * @ param s the shape to fill . * @ param top the top color . * @ param bottom the bottom color . * @ return the gradient . */ private Paint decodeCloseGradient ( Shape s , Color top , Color bottom ) { } }
Rectangle r = s . getBounds ( ) ; int width = r . width ; int height = r . height ; return createGradient ( r . x + width / 2 , r . y , r . x + width / 2 , r . y + height - 1 , new float [ ] { 0f , 1f } , new Color [ ] { top , bottom } ) ;
public class ElementPlugin { /** * Sets a value for a registered property . * @ param propInfo Property info . * @ param value The value to set . * @ throws Exception Unspecified exception . */ @ Override public void setPropertyValue ( PropertyInfo propInfo , Object value ) throws Exception { } }
String propId = propInfo . getId ( ) ; Object obj = registeredProperties == null ? null : registeredProperties . get ( propId ) ; if ( obj == null ) { obj = new PropertyProxy ( propInfo , value ) ; registerProperties ( obj , propId ) ; } else if ( obj instanceof PropertyProxy ) { ( ( PropertyProxy ) obj ) . setValue ( value ) ; } else { propInfo . setPropertyValue ( obj , value , obj == this ) ; }
public class DefaultVOMSProxyInfoBehaviour { /** * Returns a formatted list of KeyUsage */ private String getProxyKeyUsages ( ) { } }
StringBuilder usage = new StringBuilder ( ) ; Iterator < String > it = proxyKeyUsageList . iterator ( ) ; if ( it . hasNext ( ) ) usage . append ( it . next ( ) ) ; while ( it . hasNext ( ) ) { usage . append ( ", " + it . next ( ) ) ; } return usage . toString ( ) ;
public class SmartsFragmentExtractor { /** * Select the lowest ring number for use in SMARTS . * @ return ring number * @ throws IllegalStateException all ring numbers are used */ private int chooseRingNumber ( ) { } }
for ( int i = 1 ; i < rnums . length ; i ++ ) { if ( rnums [ i ] == 0 ) { rnums [ i ] = 1 ; return i ; } } throw new IllegalStateException ( "No more ring numbers available!" ) ;
public class Message { /** * Returns timestamp as an { @ link java . time . Instant } . * @ return a java . time . Instant representing the message timestamp */ Instant getInstant ( ) { } }
// long tsSeconds = TimeUnit . NANOSECONDS . toSeconds ( this . timestamp ) ; long tsSeconds = this . timestamp / 1000000000L ; long tsNanos = this . timestamp - ( tsSeconds * 1000000000L ) ; return Instant . ofEpochSecond ( tsSeconds ) . plusNanos ( tsNanos ) ;
public class BlacklistedDirectories { /** * Adds parent directory of the file ( or the file itself , if it is a directory ) * to the set of unreadable directories . * @ return the blacklisted directory or null if nothing has been added to the list . */ public static File maybeMarkUnreadable ( File path ) { } }
File directory = getDirectory ( path ) ; if ( instance . unreadableDirectories . add ( directory ) ) { logger . warn ( "Blacklisting {} for reads" , directory ) ; return directory ; } return null ;
public class JsonFluentAssert { /** * Compares JSON for equality . The expected object is converted to JSON * before comparison . Ignores order of sibling nodes and whitespaces . * Please note that if you pass a String , it ' s parsed as JSON which can lead to an * unexpected behavior . If you pass in " 1 " it is parsed as a JSON containing * integer 1 . If you compare it with a string it fails due to a different type . * If you want to pass in real string you have to quote it " \ " 1 \ " " or use * { @ link # isStringEqualTo ( String ) } . * If the string parameter is not a valid JSON , it is quoted automatically . * @ param expected * @ return { @ code this } object . * @ see # isStringEqualTo ( String ) */ public JsonFluentAssert isEqualTo ( Object expected ) { } }
Diff diff = createDiff ( expected , configuration ) ; diff . failIfDifferent ( description ) ; return this ;
public class FieldAccess { /** * Check that the Variables determined for the field and the value * loaded / stored are consistent with previous variable definitions . * @ param field * Variable representing the field * @ param value * Variable representing the value loaded / stored * @ param bindingSet * previous definitions * @ return a MatchResult containing an updated BindingSet if successful , or * null if unsuccessful */ protected MatchResult checkConsistent ( Variable field , Variable value , BindingSet bindingSet ) { } }
// Ensure that the field and value variables are consistent with // previous definitions ( if any ) bindingSet = addOrCheckDefinition ( fieldVarName , field , bindingSet ) ; if ( bindingSet == null ) { return null ; } bindingSet = addOrCheckDefinition ( valueVarName , value , bindingSet ) ; if ( bindingSet == null ) { return null ; } return new MatchResult ( this , bindingSet ) ;
public class DateTimeFormatter { /** * Parses the default style of the { @ link DateFormat } from context labels . * @ param args The arguments of the macro . * @ return The id of the style . */ private int parseDateFormatStyle ( Arguments args ) { } }
if ( args . has ( SHORT_STYLE ) ) { return DateFormat . SHORT ; } else if ( args . has ( MEDIUM_STYLE ) ) { return DateFormat . MEDIUM ; } else if ( args . has ( LONG_STYLE ) ) { return DateFormat . LONG ; } else if ( args . has ( FULL_STYLE ) ) { return DateFormat . FULL ; } return DateFormat . DEFAULT ;
public class BPR { /** * The function determines the subbuckets after refining this bucket and recursively * calls the refinement function for the subbuckets . * @ param leftPtr points to the leftmost suffix of the current bucket . * @ param rightPtr points to the rightmost suffix of the current bucket . * @ param offset is the length of the common prefix of the suffixes ( a multiple of q ) . * @ param q is the initial prefix length used for the bucket sort . It also determines * the increase of offset . */ private void updatePtrAndRefineBuckets_SaBucket ( int leftPtr , int rightPtr , int offset , int q ) { } }
/* * for all buckets with resp . pointer > rightPtr determine buckets via setting * sufPtrMap */ int leftIntervalPtr = rightPtr ; int rightIntervalPtr = rightPtr ; int tmpPtr ; while ( leftPtr <= leftIntervalPtr && rightPtr < ( tmpPtr = sufPtrMap [ suffixArray [ leftIntervalPtr ] + offset ] ) ) { do { sufPtrMap [ suffixArray [ leftIntervalPtr ] ] = rightIntervalPtr ; leftIntervalPtr -- ; } while ( leftPtr <= leftIntervalPtr && sufPtrMap [ suffixArray [ leftIntervalPtr ] + offset ] == tmpPtr ) ; rightIntervalPtr = leftIntervalPtr ; } /* * since the sufPtrMap for the suffixes between leftPtr and rightPtr might change * in previous 2 steps */ /* * determine the bucket concerning suffixptr + offset between leftPtr and rightPtr * separately */ rightIntervalPtr = leftIntervalPtr ; while ( leftPtr <= leftIntervalPtr && leftPtr <= sufPtrMap [ suffixArray [ leftIntervalPtr ] + offset ] && sufPtrMap [ suffixArray [ leftIntervalPtr ] + offset ] <= rightPtr ) { sufPtrMap [ suffixArray [ leftIntervalPtr ] ] = rightIntervalPtr ; leftIntervalPtr -- ; } /* * for all buckets with resp . pointer + offset < leftPtr determine buckets via * setting sufPtrMap */ /* * start with rightIntervalPtr which indicates leftend - 1 of bucket with resp . * pointer + offset between */ /* leftPtr and rightPtr */ int middleRightPtr = rightIntervalPtr ; int middleLeftPtr = leftIntervalPtr ; rightIntervalPtr = leftIntervalPtr ; while ( leftPtr <= leftIntervalPtr ) { int tmpPtr2 = sufPtrMap [ suffixArray [ leftIntervalPtr ] + offset ] ; do { sufPtrMap [ suffixArray [ leftIntervalPtr ] ] = rightIntervalPtr ; leftIntervalPtr -- ; } while ( leftPtr <= leftIntervalPtr && sufPtrMap [ suffixArray [ leftIntervalPtr ] + offset ] == tmpPtr2 ) ; rightIntervalPtr = leftIntervalPtr ; } int newOffset = offset + q ; if ( sufPtrMap [ suffixArray [ leftPtr ] ] == rightPtr ) { newOffset = computeDiffDepthBucket_SaBucket ( leftPtr , rightPtr , newOffset , q ) ; } int leftTmpPtr = leftPtr ; while ( leftTmpPtr < middleLeftPtr ) { int rightTmpPtr = sufPtrMap [ suffixArray [ leftTmpPtr ] ] ; int tmpLong = rightTmpPtr - leftTmpPtr ; if ( tmpLong > 0 ) { if ( tmpLong == 1 ) { computeBucketSize2_SaBucket ( leftTmpPtr , rightTmpPtr , newOffset , q ) ; leftTmpPtr = rightTmpPtr + 1 ; continue ; } if ( tmpLong == 2 ) { computeBucketSize3_SaBucket ( leftTmpPtr , rightTmpPtr , newOffset , q ) ; leftTmpPtr = rightTmpPtr + 1 ; continue ; } insSortUpdateRecurse_SaBucket ( leftTmpPtr , rightTmpPtr , newOffset , q ) ; } leftTmpPtr = rightTmpPtr + 1 ; } /* for buckets refering to this bucket , the offset can be doubled */ if ( middleRightPtr > middleLeftPtr + 1 ) { if ( middleRightPtr - middleLeftPtr == 2 ) { computeBucketSize2_SaBucket ( middleLeftPtr + 1 , middleRightPtr , Math . max ( 2 * offset , newOffset ) , q ) ; } else { if ( middleRightPtr - middleLeftPtr == 3 ) { computeBucketSize3_SaBucket ( middleLeftPtr + 1 , middleRightPtr , Math . max ( 2 * offset , newOffset ) , q ) ; } else { insSortUpdateRecurse_SaBucket ( middleLeftPtr + 1 , middleRightPtr , Math . max ( 2 * offset , newOffset ) , q ) ; } } } leftTmpPtr = middleRightPtr + 1 ; while ( leftTmpPtr < rightPtr ) { int rightTmpPtr = sufPtrMap [ suffixArray [ leftTmpPtr ] ] ; int tmpLong = rightTmpPtr - leftTmpPtr ; if ( tmpLong > 0 ) { if ( tmpLong == 1 ) { computeBucketSize2_SaBucket ( leftTmpPtr , rightTmpPtr , newOffset , q ) ; leftTmpPtr = rightTmpPtr + 1 ; continue ; } if ( tmpLong == 2 ) { computeBucketSize3_SaBucket ( leftTmpPtr , rightTmpPtr , newOffset , q ) ; leftTmpPtr = rightTmpPtr + 1 ; continue ; } insSortUpdateRecurse_SaBucket ( leftTmpPtr , rightTmpPtr , newOffset , q ) ; } leftTmpPtr = rightTmpPtr + 1 ; }
public class AffectedChecker { /** * Find all non affected classes . */ private static void includeAffected ( Set < String > allClasses , Set < String > affectedClasses , List < File > sortedFiles ) { } }
Storer storer = Config . createStorer ( ) ; Hasher hasher = Config . createHasher ( ) ; NameBasedCheck classCheck = Config . DEBUG_MODE_V != Config . DebugMode . NONE ? new DebugNameCheck ( storer , hasher , DependencyAnalyzer . CLASS_EXT ) : new NameBasedCheck ( storer , hasher , DependencyAnalyzer . CLASS_EXT ) ; NameBasedCheck covCheck = new NameBasedCheck ( storer , hasher , DependencyAnalyzer . COV_EXT ) ; MethodCheck methodCheck = new MethodCheck ( storer , hasher ) ; String prevClassName = null ; for ( File file : sortedFiles ) { String fileName = file . getName ( ) ; String dirName = file . getParent ( ) ; String className = null ; if ( file . isDirectory ( ) ) { continue ; } if ( fileName . endsWith ( DependencyAnalyzer . COV_EXT ) ) { className = covCheck . includeAll ( fileName , dirName ) ; } else if ( fileName . endsWith ( DependencyAnalyzer . CLASS_EXT ) ) { className = classCheck . includeAll ( fileName , dirName ) ; } else { className = methodCheck . includeAll ( fileName , dirName ) ; } // Reset after some time to free space . if ( prevClassName != null && className != null && ! prevClassName . equals ( className ) ) { methodCheck . includeAffected ( affectedClasses ) ; methodCheck = new MethodCheck ( Config . createStorer ( ) , Config . createHasher ( ) ) ; } if ( className != null ) { allClasses . add ( className ) ; prevClassName = className ; } } classCheck . includeAffected ( affectedClasses ) ; covCheck . includeAffected ( affectedClasses ) ; methodCheck . includeAffected ( affectedClasses ) ;
public class StringValueArrayComparator { /** * Read the length of the next serialized { @ code StringValue } . * @ param source the input view containing the record * @ return the length of the next serialized { @ code StringValue } * @ throws IOException if the input view raised an exception when reading the length */ private static int readStringLength ( DataInputView source ) throws IOException { } }
int len = source . readByte ( ) & 0xFF ; if ( len >= HIGH_BIT ) { int shift = 7 ; int curr ; len = len & 0x7F ; while ( ( curr = source . readByte ( ) & 0xFF ) >= HIGH_BIT ) { len |= ( curr & 0x7F ) << shift ; shift += 7 ; } len |= curr << shift ; } return len ;
public class DefaultGroovyMethods { /** * Removes the last item from the List . * < pre class = " groovyTestCase " > * def list = [ " a " , false , 2] * assert list . removeLast ( ) = = 2 * assert list = = [ " a " , false ] * < / pre > * Using add ( ) and removeLast ( ) is similar to push and pop on a Stack * where the last item in the list represents the top of the stack . * @ param self a List * @ return the item removed from the List * @ throws NoSuchElementException if the list is empty * @ since 2.5.0 */ public static < T > T removeLast ( List < T > self ) { } }
if ( self . isEmpty ( ) ) { throw new NoSuchElementException ( "Cannot removeLast() an empty List" ) ; } return self . remove ( self . size ( ) - 1 ) ;
public class AssociativeArray2D { /** * Convenience function used to put a value in a particular key positions . * @ param key1 * @ param key2 * @ param value * @ return */ public final Object put2d ( Object key1 , Object key2 , Object value ) { } }
AssociativeArray tmp = internalData . get ( key1 ) ; if ( tmp == null ) { internalData . put ( key1 , new AssociativeArray ( ) ) ; } return internalData . get ( key1 ) . internalData . put ( key2 , value ) ;