signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DateUtils { /** * 第一个值是否小于第二个值 * @ param small 第一个值 * @ param big 第二个值 * @ return { @ link Boolean } * @ since 1.1.0 */ public static boolean lessThan ( Time small , Time big ) { } }
return compare ( small , big , CompareWay . LT ) ;
public class SmsStatusPuller { /** * 拉取回复信息 * @ param max 最大条数 , 最多100 * @ return { @ link } SmsStatusPullReplyResult * @ throws HTTPException http status exception * @ throws JSONException json parse exception * @ throws IOException network problem */ public SmsStatusPullReplyResult pullReply ( int max ) throws HTTPException , JSONException , IOException { } }
// May throw IOException HTTPResponse res = pull ( 1 , max ) ; // May throw HTTPException handleError ( res ) ; // May throw JSONException return ( new SmsStatusPullReplyResult ( ) ) . parseFromHTTPResponse ( res ) ;
public class SecureHash { /** * Get the hash of the supplied content , using the digest identified by the supplied name . * @ param digestName the name of the hashing function ( or { @ link MessageDigest message digest } ) that should be used * @ param content the content to be hashed ; may not be null * @ return the hash of the contents as a byte array * @ throws NoSuchAlgorithmException if the supplied algorithm could not be found */ public static byte [ ] getHash ( String digestName , byte [ ] content ) throws NoSuchAlgorithmException { } }
MessageDigest digest = MessageDigest . getInstance ( digestName ) ; assert digest != null ; return digest . digest ( content ) ;
public class ClientBase { /** * Uses the { @ link PayloadStorage } for downloading large payloads to be used by the client . * Gets the uri of the payload fom the server and then downloads from this location . * @ param payloadType the { @ link com . netflix . conductor . common . utils . ExternalPayloadStorage . PayloadType } to be downloaded * @ param path the relative of the payload in external storage * @ return the payload object that is stored in external storage */ @ SuppressWarnings ( "unchecked" ) protected Map < String , Object > downloadFromExternalStorage ( ExternalPayloadStorage . PayloadType payloadType , String path ) { } }
Preconditions . checkArgument ( StringUtils . isNotBlank ( path ) , "uri cannot be blank" ) ; ExternalStorageLocation externalStorageLocation = payloadStorage . getLocation ( ExternalPayloadStorage . Operation . READ , payloadType , path ) ; try ( InputStream inputStream = payloadStorage . download ( externalStorageLocation . getUri ( ) ) ) { return objectMapper . readValue ( inputStream , Map . class ) ; } catch ( IOException e ) { String errorMsg = String . format ( "Unable to download payload from external storage location: %s" , path ) ; logger . error ( errorMsg , e ) ; throw new ConductorClientException ( errorMsg , e ) ; }
public class AmazonElastiCacheClient { /** * Deletes an existing replication group . By default , this operation deletes the entire replication group , including * the primary / primaries and all of the read replicas . If the replication group has only one primary , you can * optionally delete only the read replicas , while retaining the primary by setting * < code > RetainPrimaryCluster = true < / code > . * When you receive a successful response from this operation , Amazon ElastiCache immediately begins deleting the * selected resources ; you cannot cancel or revert this operation . * < note > * This operation is valid for Redis only . * < / note > * @ param deleteReplicationGroupRequest * Represents the input of a < code > DeleteReplicationGroup < / code > operation . * @ return Result of the DeleteReplicationGroup operation returned by the service . * @ throws ReplicationGroupNotFoundException * The specified replication group does not exist . * @ throws InvalidReplicationGroupStateException * The requested replication group is not in the < code > available < / code > state . * @ throws SnapshotAlreadyExistsException * You already have a snapshot with the given name . * @ throws SnapshotFeatureNotSupportedException * You attempted one of the following operations : < / p > * < ul > * < li > * Creating a snapshot of a Redis cluster running on a < code > cache . t1 . micro < / code > cache node . * < / li > * < li > * Creating a snapshot of a cluster that is running Memcached rather than Redis . * < / li > * < / ul > * Neither of these are supported by ElastiCache . * @ throws SnapshotQuotaExceededException * The request cannot be processed because it would exceed the maximum number of snapshots . * @ throws InvalidParameterValueException * The value for a parameter is invalid . * @ throws InvalidParameterCombinationException * Two or more incompatible parameters were specified . * @ sample AmazonElastiCache . DeleteReplicationGroup * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticache - 2015-02-02 / DeleteReplicationGroup " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ReplicationGroup deleteReplicationGroup ( DeleteReplicationGroupRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteReplicationGroup ( request ) ;
public class Emitter { /** * syck _ emit _ tag */ public void emitTag ( String tag , String ignore ) { } }
if ( tag == null ) { return ; } if ( ignore != null && ImplicitScanner2 . tagcmp ( tag , ignore ) && ! this . explicit_typing ) { return ; } Level lvl = currentLevel ( ) ; if ( tag . length ( ) == 0 ) { write ( BANG_SPACE , 2 ) ; } else if ( tag . startsWith ( "tag:" ) ) { int taglen = tag . length ( ) ; Pointer ptag = Pointer . create ( tag ) ; write ( BANG , 1 ) ; if ( tag . substring ( 4 ) . startsWith ( YAML . DOMAIN ) ) { int skip = 4 + YAML . DOMAIN . length ( ) + 1 ; write ( ptag . withStart ( skip ) , taglen - skip ) ; } else { int subd = 4 ; while ( subd < taglen && tag . charAt ( subd ) != ':' ) { subd ++ ; } if ( subd < taglen && tag . charAt ( subd ) == ':' ) { if ( subd > ( YAML . DOMAIN . length ( ) + 5 ) && tag . substring ( subd - YAML . DOMAIN . length ( ) ) . startsWith ( YAML . DOMAIN ) ) { write ( ptag . withStart ( 4 ) , ( subd - YAML . DOMAIN . length ( ) ) - 5 ) ; write ( SLASH , 1 ) ; write ( ptag . withStart ( subd + 1 ) , taglen - ( subd + 1 ) ) ; } else { write ( ptag . withStart ( 4 ) , subd - 4 ) ; write ( SLASH , 1 ) ; write ( ptag . withStart ( subd + 1 ) , taglen - ( subd + 1 ) ) ; } } else { /* TODO : Invalid tag ( no colon after domain ) */ return ; } } write ( SPACE , 1 ) ; } else if ( tag . startsWith ( "x-private:" ) ) { write ( TWO_BANGS , 2 ) ; write ( Pointer . create ( tag . substring ( 10 ) ) , tag . length ( ) - 10 ) ; write ( SPACE , 1 ) ; } lvl . anctag = 1 ;
public class AsyncServer { /** * Equivalent of " main " , but non - static . */ public void run ( String [ ] args ) throws Exception { } }
ServerConfiguration . Builder configBuilder = ServerConfiguration . newBuilder ( ) ; ServerConfiguration config ; try { config = configBuilder . build ( args ) ; } catch ( Exception e ) { System . out . println ( e . getMessage ( ) ) ; configBuilder . printUsage ( ) ; return ; } final Server server = newServer ( config ) ; server . start ( ) ; System . out . println ( "QPS Server started on " + config . address ) ; Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( ) { @ Override @ SuppressWarnings ( "CatchAndPrintStackTrace" ) public void run ( ) { try { System . out . println ( "QPS Server shutting down" ) ; server . shutdown ( ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } } } ) ; server . awaitTermination ( ) ;
public class DescribeKeyPairsRequest { /** * The key pair names . * Default : Describes all your key pairs . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setKeyNames ( java . util . Collection ) } or { @ link # withKeyNames ( java . util . Collection ) } if you want to override * the existing values . * @ param keyNames * The key pair names . < / p > * Default : Describes all your key pairs . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeKeyPairsRequest withKeyNames ( String ... keyNames ) { } }
if ( this . keyNames == null ) { setKeyNames ( new com . amazonaws . internal . SdkInternalList < String > ( keyNames . length ) ) ; } for ( String ele : keyNames ) { this . keyNames . add ( ele ) ; } return this ;
public class TreeQuery { /** * Returns true iff child is a descendant of parent , or if child is equal to parent . */ public static < T > boolean isDescendantOfOrEqualTo ( TreeDef . Parented < T > treeDef , T child , T parent ) { } }
if ( child . equals ( parent ) ) { return true ; } else { return isDescendantOf ( treeDef , child , parent ) ; }
public class Security { /** * Returns the property ( if any ) mapping the key for the given provider . */ private static String getProviderProperty ( String key , Provider provider ) { } }
String prop = provider . getProperty ( key ) ; if ( prop == null ) { // Is there a match if we do a case - insensitive property name // comparison ? Let ' s try . . . for ( Enumeration < Object > e = provider . keys ( ) ; e . hasMoreElements ( ) && prop == null ; ) { String matchKey = ( String ) e . nextElement ( ) ; if ( key . equalsIgnoreCase ( matchKey ) ) { prop = provider . getProperty ( matchKey ) ; break ; } } } return prop ;
public class FEELCodeMarshaller { /** * Unmarshalls the string into a FEEL value by executing it . * @ param feelType this parameter is ignored by this marshaller and can be set to null * @ param value the FEEL code to execute for unmarshalling * @ return the value resulting from executing the code */ @ Override public Object unmarshall ( Type feelType , String value ) { } }
return feel . evaluate ( value ) ;
public class Binary { /** * Writes data to some existing byte array starting from specific offset * @ param target byte array destination * @ param targetOffset destination offset to start * @ return length of source ( written ) data * @ throws IOException */ public int toByteArray ( byte [ ] target , int targetOffset ) throws IOException { } }
long length = length ( ) ; if ( ( long ) targetOffset + length > Integer . MAX_VALUE ) { throw new IOException ( "Unable to write - too big data" ) ; } if ( target . length < targetOffset + length ) { throw new IOException ( "Insufficient target byte array size" ) ; } if ( length < 0 ) { length = 0 ; int curOffset = targetOffset ; InputStream in = asStream ( ) ; try { int readbyte ; while ( ( readbyte = in . read ( ) ) != EOF ) { target [ curOffset ] = ( byte ) readbyte ; curOffset ++ ; length ++ ; } } finally { in . close ( ) ; } } else { System . arraycopy ( asByteArray ( false ) , 0 , target , targetOffset , ( int ) length ) ; } return ( int ) length ;
public class AwsSesServiceClientImpl { /** * Sends an email . * @ param to the email address to send the email to . * @ param from the email address to send the email from . * @ param subject the subject of the email . * @ param body the body text of the email . * @ return the result of the send . */ public AwsSesSendResult sendEmail ( @ Nonnull final String to , @ Nonnull final String from , @ Nonnull final String subject , @ Nonnull final String body ) { } }
return proxy . sendEmail ( to , from , subject , body ) ;
public class PngtasticInterlaceHandler { /** * { @ inheritDoc } */ @ Override public List < byte [ ] > deInterlace ( int width , int height , int sampleBitCount , PngByteArrayOutputStream inflatedImageData ) { } }
log . debug ( "Deinterlacing" ) ; final List < byte [ ] > results = new ArrayList < > ( ) ; final int sampleSize = Math . max ( 1 , sampleBitCount / 8 ) ; final byte [ ] [ ] rows = new byte [ height ] [ Double . valueOf ( Math . ceil ( width * sampleBitCount / 8D ) ) . intValue ( ) + 1 ] ; int subImageOffset = 0 ; for ( int pass = 0 ; pass < 7 ; pass ++ ) { final int subImageRows = ( ( ( height - interlaceRowOffset [ pass ] ) + ( interlaceRowFrequency [ pass ] - 1 ) ) / interlaceRowFrequency [ pass ] ) ; final int subImageColumns = ( ( ( width - interlaceColumnOffset [ pass ] ) + ( interlaceColumnFrequency [ pass ] - 1 ) ) / interlaceColumnFrequency [ pass ] ) ; final int rowLength = Double . valueOf ( Math . ceil ( subImageColumns * sampleBitCount / 8D ) ) . intValue ( ) + 1 ; final int cf = interlaceColumnFrequency [ pass ] * sampleSize ; final int co = interlaceColumnOffset [ pass ] * sampleSize ; final int rf = interlaceRowFrequency [ pass ] ; final int ro = interlaceRowOffset [ pass ] ; byte [ ] previousRow = new byte [ rowLength ] ; int offset = 0 ; for ( int i = 0 ; i < subImageRows ; i ++ ) { offset = subImageOffset + i * rowLength ; final byte [ ] row = new byte [ rowLength ] ; System . arraycopy ( inflatedImageData . get ( ) , offset , row , 0 , rowLength ) ; try { pngFilterHandler . deFilter ( row , previousRow , sampleBitCount ) ; } catch ( PngException e ) { log . error ( "Error: %s" , e . getMessage ( ) ) ; } final int samples = ( row . length - 1 ) / sampleSize ; for ( int sample = 0 ; sample < samples ; sample ++ ) { for ( int b = 0 ; b < sampleSize ; b ++ ) { rows [ i * rf + ro ] [ sample * cf + co + b + 1 ] = row [ ( sample * sampleSize ) + b + 1 ] ; } } previousRow = row . clone ( ) ; } subImageOffset = offset + rowLength ; } for ( int i = 0 ; i < rows . length ; i ++ ) { results . add ( rows [ i ] ) ; } return results ;
public class DateField { /** * Converts the given date into a string which follows the format used by * date fields . * @ param date * The date value to format , which may be null . * @ return * The formatted date , or null if the provided time was null . */ public static String format ( Date date ) { } }
DateFormat dateFormat = new SimpleDateFormat ( DateField . FORMAT ) ; return date == null ? null : dateFormat . format ( date ) ;
public class PEMReader { /** * Read the lines between BEGIN and END marker and convert * the Base64 encoded content into binary byte array . * @ return DER encoded octet stream * @ throws IOException */ private static byte [ ] readBytes ( BufferedReader reader , String endMarker ) throws IOException { } }
String line = null ; StringBuilder buf = new StringBuilder ( ) ; while ( ( line = reader . readLine ( ) ) != null ) { if ( line . contains ( endMarker ) ) { return Base64 . getDecoder ( ) . decode ( buf . toString ( ) ) ; } buf . append ( line . trim ( ) ) ; } throw new IOException ( "Invalid PEM file: No end marker" ) ;
public class JsMsgMap { /** * Is the map ' fluffed up ' into a HashMap ? d317373.1 */ public boolean isChanged ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "isChanged" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "isChanged" , changed ) ; return changed ;
public class Postconditions { /** * An { @ code int } specialized version of { @ link # checkPostcondition ( Object , * ContractConditionType ) } . * @ param value The value * @ param predicate The predicate * @ param describer The describer for the predicate * @ return value * @ throws PostconditionViolationException If the predicate is false */ public static int checkPostconditionI ( final int value , final IntPredicate predicate , final IntFunction < String > describer ) { } }
final boolean ok ; try { ok = predicate . test ( value ) ; } catch ( final Throwable e ) { throw failed ( e , Integer . valueOf ( value ) , singleViolation ( failedPredicate ( e ) ) ) ; } return innerCheckI ( value , ok , describer ) ;
public class AbstractJaxb { /** * < p > find tag by " name " property returning as List . < / p > * @ param name * @ param tagType * @ param < T > * @ return if not found , returns empty list . */ public < T extends AbstractJaxb > List < T > getByNameAsList ( String name , Class < T > tagType ) { } }
return GetByNameUtil . getByNameAsList ( ( T ) this , new ArrayList < T > ( ) , name , tagType ) ;
public class ExecutionStrategy { /** * Called to fetch a value for a field and resolve it further in terms of the graphql query . This will call * # fetchField followed by # completeField and the completed { @ link ExecutionResult } is returned . * An execution strategy can iterate the fields to be executed and call this method for each one * Graphql fragments mean that for any give logical field can have one or more { @ link Field } values associated with it * in the query , hence the fieldList . However the first entry is representative of the field for most purposes . * @ param executionContext contains the top level execution parameters * @ param parameters contains the parameters holding the fields to be executed and source object * @ return a promise to an { @ link ExecutionResult } * @ throws NonNullableFieldWasNullException in the future if a non null field resolves to a null value */ protected CompletableFuture < ExecutionResult > resolveField ( ExecutionContext executionContext , ExecutionStrategyParameters parameters ) { } }
return resolveFieldWithInfo ( executionContext , parameters ) . thenCompose ( FieldValueInfo :: getFieldValue ) ;
public class UniversalDecompressor { /** * Creates a InputStream where the unpacked data could be read from . * Internal GZip and BZip2 archive formats are supported . The list archive * formats can be increased with settings file decompressor . xml . Thereby * < ul > * < li > key is the archive extension < / li > * < li > value is the external command . The archive file should be specified * with an place holder < code > UniversalDecompressor . FILEPLACEHOLDER < / code > < / li > * < / ul > * External decompression utilities are in preference to the internal . If * there is nether external nor internal possibilities to unpack the file - * the standard < code > FileInputSteam < / code > will be returned * @ see UniversalDecompressor */ @ Override public InputStream getInputStream ( String fileName ) throws IOException { } }
InputStream inputStream = null ; if ( fileExists ( fileName ) ) { String extension = getExtension ( fileName ) ; if ( isExternalSupported ( extension ) ) { inputStream = startExternal ( fileName ) ; } else if ( isInternalSupported ( extension ) ) { inputStream = internalSupport . get ( extension ) . getInputStream ( fileName ) ; } else { inputStream = getDefault ( fileName ) ; } } return inputStream ;
public class GeographyValue { /** * Return the number of bytes in the serialization for this polygon . * Returned value does not include the 4 - byte length prefix that precedes variable - length types . * @ return The number of bytes in the serialization for this polygon . */ public int getLengthInBytes ( ) { } }
long length = polygonOverheadInBytes ( ) ; for ( List < XYZPoint > loop : m_loops ) { length += loopLengthInBytes ( loop . size ( ) ) ; } return ( int ) length ;
public class LocatorSoapServiceImpl { /** * Build Endpoint Reference for giving service name and address * @ param serviceName * @ param adress * @ return */ private W3CEndpointReference buildEndpoint ( QName serviceName , String adress ) throws ServiceLocatorFault , InterruptedExceptionFault { } }
W3CEndpointReferenceBuilder builder = new W3CEndpointReferenceBuilder ( ) ; // builder . serviceName ( serviceName ) ; builder . address ( adress ) ; SLEndpoint endpoint = null ; try { endpoint = locatorClient . getEndpoint ( serviceName , adress ) ; } catch ( ServiceLocatorException e ) { ServiceLocatorFaultDetail serviceFaultDetail = new ServiceLocatorFaultDetail ( ) ; serviceFaultDetail . setLocatorFaultDetail ( serviceName . toString ( ) + "throws ServiceLocatorFault" ) ; throw new ServiceLocatorFault ( e . getMessage ( ) , serviceFaultDetail ) ; } catch ( InterruptedException e ) { InterruptionFaultDetail interruptionFaultDetail = new InterruptionFaultDetail ( ) ; interruptionFaultDetail . setInterruptionDetail ( serviceName . toString ( ) + "throws InterruptionFault" ) ; throw new InterruptedExceptionFault ( e . getMessage ( ) , interruptionFaultDetail ) ; } if ( endpoint != null ) { SLProperties properties = endpoint . getProperties ( ) ; if ( properties != null && ! properties . getPropertyNames ( ) . isEmpty ( ) ) { EndpointTransformerImpl transformer = new EndpointTransformerImpl ( ) ; DOMResult result = new DOMResult ( ) ; transformer . writePropertiesTo ( properties , result ) ; Document docResult = ( Document ) result . getNode ( ) ; builder . metadata ( docResult . getDocumentElement ( ) ) ; } } return builder . build ( ) ;
public class ConverterSet { /** * Returns the closest matching converter for the given type , but not very * efficiently . */ private static Converter selectSlow ( ConverterSet set , Class < ? > type ) { } }
Converter [ ] converters = set . iConverters ; int length = converters . length ; Converter converter ; for ( int i = length ; -- i >= 0 ; ) { converter = converters [ i ] ; Class < ? > supportedType = converter . getSupportedType ( ) ; if ( supportedType == type ) { // Exact match . return converter ; } if ( supportedType == null || ( type != null && ! supportedType . isAssignableFrom ( type ) ) ) { // Eliminate the impossible . set = set . remove ( i , null ) ; converters = set . iConverters ; length = converters . length ; } } // Haven ' t found exact match , so check what remains in the set . if ( type == null || length == 0 ) { return null ; } if ( length == 1 ) { // Found the one best match . return converters [ 0 ] ; } // At this point , there exist multiple potential converters . // Eliminate supertypes . for ( int i = length ; -- i >= 0 ; ) { converter = converters [ i ] ; Class < ? > supportedType = converter . getSupportedType ( ) ; for ( int j = length ; -- j >= 0 ; ) { if ( j != i && converters [ j ] . getSupportedType ( ) . isAssignableFrom ( supportedType ) ) { // Eliminate supertype . set = set . remove ( j , null ) ; converters = set . iConverters ; length = converters . length ; i = length - 1 ; } } } // Check what remains in the set . if ( length == 1 ) { // Found the one best match . return converters [ 0 ] ; } // Class c implements a , b { } // Converters exist only for a and b . Which is better ? Neither . StringBuilder msg = new StringBuilder ( ) ; msg . append ( "Unable to find best converter for type \"" ) ; msg . append ( type . getName ( ) ) ; msg . append ( "\" from remaining set: " ) ; for ( int i = 0 ; i < length ; i ++ ) { converter = converters [ i ] ; Class < ? > supportedType = converter . getSupportedType ( ) ; msg . append ( converter . getClass ( ) . getName ( ) ) ; msg . append ( '[' ) ; msg . append ( supportedType == null ? null : supportedType . getName ( ) ) ; msg . append ( "], " ) ; } throw new IllegalStateException ( msg . toString ( ) ) ;
public class DateUtil { /** * Copied from Apache Commons Lang under Apache License v2. * < br / > * Determines how two calendars compare up to no more than the specified * most significant field . * @ param cal1 the first calendar , not < code > null < / code > * @ param cal2 the second calendar , not < code > null < / code > * @ param field the field from { @ code Calendar } * @ return a negative integer , zero , or a positive integer as the first * calendar is less than , equal to , or greater than the second . * @ throws IllegalArgumentException if any argument is < code > null < / code > * @ see # truncate ( Calendar , int ) * @ see # truncatedCompareTo ( Date , Date , int ) * @ since 3.0 */ public static int truncatedCompareTo ( final Calendar cal1 , final Calendar cal2 , final int field ) { } }
return truncate ( cal1 , field ) . compareTo ( truncate ( cal2 , field ) ) ;
public class ArrayUtil { /** * the smallest value , of all values inside the array , only work when all values are numeric * @ param array * @ return the smallest value * @ throws PageException */ public static double min ( Array array ) throws PageException { } }
if ( array . getDimension ( ) > 1 ) throw new ExpressionException ( "can only get max value from 1 dimensional arrays" ) ; if ( array . size ( ) == 0 ) return 0 ; double rtn = _toDoubleValue ( array , 1 ) ; int len = array . size ( ) ; try { for ( int i = 2 ; i <= len ; i ++ ) { double v = _toDoubleValue ( array , i ) ; if ( rtn > v ) rtn = v ; } } catch ( PageException e ) { throw new ExpressionException ( "exception while execute array operation: " + e . getMessage ( ) ) ; } return rtn ;
public class HttpLogEntry { /** * Mark the time an event occurred . Should include at least the start and end of a request . */ public HttpLogEntry mark ( String name ) { } }
events . add ( new Event ( name , System . currentTimeMillis ( ) ) ) ; return this ;
public class FilterNode { /** * 该方法需要重载 * @ param < T > Entity类的泛型 * @ param joinTabalis 关联表的集合 * @ param info EntityInfo * @ return JOIN的SQL语句 */ protected < T > CharSequence createSQLExpress ( final EntityInfo < T > info , final Map < Class , String > joinTabalis ) { } }
CharSequence sb0 = this . column == null || this . column . isEmpty ( ) || this . column . charAt ( 0 ) == '#' || info == null ? null : createElementSQLExpress ( info , joinTabalis == null ? null : joinTabalis . get ( info . getType ( ) ) ) ; if ( this . nodes == null ) return sb0 ; final StringBuilder rs = new StringBuilder ( ) ; rs . append ( '(' ) ; boolean more = false ; if ( sb0 != null && sb0 . length ( ) > 2 ) { more = true ; rs . append ( sb0 ) ; } for ( FilterNode node : this . nodes ) { CharSequence f = node . createSQLExpress ( info , joinTabalis ) ; if ( f == null || f . length ( ) < 3 ) continue ; if ( more ) rs . append ( or ? " OR " : " AND " ) ; rs . append ( f ) ; more = true ; } rs . append ( ')' ) ; if ( rs . length ( ) < 5 ) return null ; return rs ;
public class NativeDatagramPacketArray { /** * Try to add the given { @ link DatagramPacket } . Returns { @ code true } on success , * { @ code false } otherwise . */ boolean add ( DatagramPacket packet ) { } }
if ( count == packets . length ) { // We already filled up to UIO _ MAX _ IOV messages . This is the max allowed per sendmmsg ( . . . ) call , we will // try again later . return false ; } ByteBuf content = packet . content ( ) ; int len = content . readableBytes ( ) ; if ( len == 0 ) { return true ; } NativeDatagramPacket p = packets [ count ] ; InetSocketAddress recipient = packet . recipient ( ) ; int offset = iovArray . count ( ) ; if ( ! iovArray . add ( content ) ) { // Not enough space to hold the whole content , we will try again later . return false ; } p . init ( iovArray . memoryAddress ( offset ) , iovArray . count ( ) - offset , recipient ) ; count ++ ; return true ;
public class Packer { /** * Sets he usage of " AES / CBC / PKCS5Padding " with a Random Integer IV for encryption ( default no ) . Instead * of use full - random - IV , use a integer seed ( this allow compact output ) * @ param passphrase shared secret * @ return * @ throws NoSuchAlgorithmException * @ throws NoSuchPaddingException */ public Packer useAESwithRandomIntIV ( final String passphrase ) throws NoSuchAlgorithmException , NoSuchPaddingException { } }
initCipher ( ) ; integerIV = ( rnd . nextInt ( ) & 0x7FFFFFFF ) ; initCipherIV ( generateMdfromInteger ( this . integerIV , aesCipherLen ) , AESTYPEIV . RANDOM_INT_IV ) ; initCipherKey ( passphrase ) ; return this ;
public class ControllerLinkBuilderFactory { /** * ( non - Javadoc ) * @ see org . springframework . hateoas . MethodLinkBuilderFactory # linkTo ( java . lang . Class , java . lang . reflect . Method , java . lang . Object [ ] ) */ @ Override public ControllerLinkBuilder linkTo ( Class < ? > controller , Method method , Object ... parameters ) { } }
return ControllerLinkBuilder . linkTo ( controller , method , parameters ) ;
public class BaseCalendar { /** * Returns the start of the given day as a < code > { @ link Calendar } < / code > . This * calculation will take the < code > BaseCalendar < / code > time zone into account * if it is not < code > null < / code > . * @ param timeInMillis * A time containing the desired date for the start - of - day time * @ return A < code > { @ link Calendar } < / code > set to the start of the given day . */ protected Calendar getStartOfDayJavaCalendar ( final long timeInMillis ) { } }
final Calendar startOfDay = createJavaCalendar ( timeInMillis ) ; startOfDay . set ( Calendar . HOUR_OF_DAY , 0 ) ; startOfDay . set ( Calendar . MINUTE , 0 ) ; startOfDay . set ( Calendar . SECOND , 0 ) ; startOfDay . set ( Calendar . MILLISECOND , 0 ) ; return startOfDay ;
public class ReflectionUtilities { /** * Get the simple name of the class . This varies from the one in Class , in * that it returns " 1 " for Classes like java . lang . String $ 1 In contrast , * String . class . getSimpleName ( ) returns " " , which is not unique if * java . lang . String $ 2 exists , causing all sorts of strange bugs . * @ param name * @ return */ public static String getSimpleName ( final Type name ) { } }
final Class < ? > clazz = getRawClass ( name ) ; final String [ ] nameArray = clazz . getName ( ) . split ( REGEXP ) ; final String ret = nameArray [ nameArray . length - 1 ] ; if ( ret . length ( ) == 0 ) { throw new IllegalArgumentException ( "Class " + name + " has zero-length simple name. Can't happen?!?" ) ; } return ret ;
public class AstBuilder { /** * literal { - - - - - */ @ Override public ConstantExpression visitIntegerLiteralAlt ( IntegerLiteralAltContext ctx ) { } }
String text = ctx . IntegerLiteral ( ) . getText ( ) ; Number num = null ; try { num = Numbers . parseInteger ( null , text ) ; } catch ( Exception e ) { this . numberFormatError = tuple ( ctx , e ) ; } ConstantExpression constantExpression = new ConstantExpression ( num , ! text . startsWith ( SUB_STR ) ) ; constantExpression . putNodeMetaData ( IS_NUMERIC , true ) ; constantExpression . putNodeMetaData ( INTEGER_LITERAL_TEXT , text ) ; return configureAST ( constantExpression , ctx ) ;
public class ZanataInterface { /** * Get a Translation from Zanata using the Zanata Document ID and Locale . * @ param id The ID of the document in Zanata . * @ param locale The locale of the translation to find . * @ return null if the translation doesn ' t exist or an error occurred , otherwise the TranslationResource containing the * Translation Strings ( TextFlowTargets ) . * @ throws NotModifiedException Thrown if the translation has not been modified since it was last retrieved . */ public TranslationsResource getTranslations ( final String id , final LocaleId locale ) throws NotModifiedException { } }
ClientResponse < TranslationsResource > response = null ; try { final ITranslatedDocResource client = proxyFactory . getTranslatedDocResource ( details . getProject ( ) , details . getVersion ( ) ) ; response = client . getTranslations ( id , locale , null ) ; final Status status = Response . Status . fromStatusCode ( response . getStatus ( ) ) ; /* Remove the locale if it is forbidden */ if ( status == Response . Status . FORBIDDEN ) { localeManager . removeLocale ( locale ) ; } else if ( status == Status . NOT_MODIFIED ) { throw new NotModifiedException ( ) ; } else if ( status == Response . Status . OK ) { final TranslationsResource retValue = response . getEntity ( ) ; return retValue ; } } catch ( final Exception ex ) { if ( ex instanceof NotModifiedException ) { throw ( NotModifiedException ) ex ; } else { log . error ( "Failed to retrieve the Zanata Translated Document" , ex ) ; } } finally { /* * If you are using RESTEasy client framework , and returning a Response from your service method , you will * explicitly need to release the connection . */ if ( response != null ) response . releaseConnection ( ) ; /* Perform a small wait to ensure zanata isn ' t overloaded */ performZanataRESTCallWaiting ( ) ; } return null ;
public class AWSGlueClient { /** * Gets code to perform a specified mapping . * @ param getPlanRequest * @ return Result of the GetPlan operation returned by the service . * @ throws InvalidInputException * The input provided was not valid . * @ throws InternalServiceException * An internal service error occurred . * @ throws OperationTimeoutException * The operation timed out . * @ sample AWSGlue . GetPlan * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / glue - 2017-03-31 / GetPlan " target = " _ top " > AWS API * Documentation < / a > */ @ Override public GetPlanResult getPlan ( GetPlanRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetPlan ( request ) ;
public class AbstractControllerServer { /** * { @ inheritDoc } * @ return { @ inheritDoc } */ @ Override public boolean isActive ( ) { } }
try { validateInitialization ( ) ; } catch ( InvalidStateException ex ) { return false ; } return informerWatchDog . isActive ( ) && serverWatchDog . isActive ( ) ;
public class LBoolToIntFunctionBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static LBoolToIntFunction boolToIntFunctionFrom ( Consumer < LBoolToIntFunctionBuilder > buildingFunction ) { } }
LBoolToIntFunctionBuilder builder = new LBoolToIntFunctionBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class ProfileSummaryBuilder { /** * Build the profile documentation . * @ param node the XML element that specifies which components to document * @ param contentTree the content tree to which the documentation will be added */ public void buildProfileDoc ( XMLNode node , Content contentTree ) throws Exception { } }
contentTree = profileWriter . getProfileHeader ( profile . name ) ; buildChildren ( node , contentTree ) ; profileWriter . addProfileFooter ( contentTree ) ; profileWriter . printDocument ( contentTree ) ; profileWriter . close ( ) ; Util . copyDocFiles ( configuration , DocPaths . profileSummary ( profile . name ) ) ;
public class CommerceWishListItemPersistenceImpl { /** * Clears the cache for the commerce wish list item . * The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */ @ Override public void clearCache ( CommerceWishListItem commerceWishListItem ) { } }
entityCache . removeResult ( CommerceWishListItemModelImpl . ENTITY_CACHE_ENABLED , CommerceWishListItemImpl . class , commerceWishListItem . getPrimaryKey ( ) ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ; clearUniqueFindersCache ( ( CommerceWishListItemModelImpl ) commerceWishListItem , true ) ;
public class TypedEntityLinks { /** * Returns a { @ link LinkBuilder } able to create links to the controller managing the given entity . Implementations * will know about the URI structure being used to expose item - resource URIs . * @ param type the entity to point to , must not be { @ literal null } . * @ return the { @ link LinkBuilder } pointing to the item resource identified by the given entity . Will never be * { @ literal null } . * @ throws IllegalArgumentException in case the given type is unknown the entity links infrastructure . */ public LinkBuilder linkForItemResource ( T entity ) { } }
return entityLinks . linkForItemResource ( entity . getClass ( ) , identifierExtractor . apply ( entity ) ) ;
public class RateLimitedLog { /** * @ return a LogWithPatternAndLevel object for the supplied @ param message and * @ param level . This can be cached and reused by callers in performance - sensitive * cases to avoid performing two ConcurrentHashMap lookups . * Note that the string is the sole key used , so the same string cannot be reused with differing period * settings ; any periods which differ from the first one used are ignored . * @ throws IllegalStateException if we exceed the limit on number of RateLimitedLogWithPattern objects * in any one period ; if this happens , it ' s probable that an already - interpolated string is * accidentally being used as a log pattern . */ public LogWithPatternAndLevel get ( String pattern , Level level ) { } }
return get ( pattern ) . get ( level ) ;
public class StandardsSubscriptionRequest { /** * @ param standardsInput * @ return Returns a reference to this object so that method calls can be chained together . */ public StandardsSubscriptionRequest withStandardsInput ( java . util . Map < String , String > standardsInput ) { } }
setStandardsInput ( standardsInput ) ; return this ;
public class Polymer { /** * Adds the atom oAtom to a specified Monomer . * @ param oAtom The atom to add * @ param oMonomer The monomer the atom belongs to */ @ Override public void addAtom ( IAtom oAtom , IMonomer oMonomer ) { } }
if ( ! contains ( oAtom ) ) { super . addAtom ( oAtom ) ; if ( oMonomer != null ) { // Not sure what ' s better here . . . throw nullpointer exception ? oMonomer . addAtom ( oAtom ) ; if ( ! monomers . containsKey ( oMonomer . getMonomerName ( ) ) ) { monomers . put ( oMonomer . getMonomerName ( ) , oMonomer ) ; } } } /* * notifyChanged ( ) is called by addAtom in AtomContainer */
public class DecoratorUtils { /** * Finds the decorator lineage of the given object . * If object is not a { @ link org . apache . gobblin . util . Decorator } , this method will return a singleton list with just the object . * If object is a { @ link org . apache . gobblin . util . Decorator } , it will return a list of the underlying object followed by the * decorator lineage up to the input decorator object . * @ param obj an object . * @ return List of the non - decorator underlying object and all decorators on top of it , * starting with underlying object and ending with the input object itself ( inclusive ) . */ public static List < Object > getDecoratorLineage ( Object obj ) { } }
List < Object > lineage = Lists . newArrayList ( obj ) ; Object currentObject = obj ; while ( currentObject instanceof Decorator ) { currentObject = ( ( Decorator ) currentObject ) . getDecoratedObject ( ) ; lineage . add ( currentObject ) ; } return Lists . reverse ( lineage ) ;
public class HornSchunckPyramid { /** * See equation 25 . Fast unsafe version */ protected static float A ( int x , int y , GrayF32 flow ) { } }
int index = flow . getIndex ( x , y ) ; float u0 = flow . data [ index - 1 ] ; float u1 = flow . data [ index + 1 ] ; float u2 = flow . data [ index - flow . stride ] ; float u3 = flow . data [ index + flow . stride ] ; float u4 = flow . data [ index - 1 - flow . stride ] ; float u5 = flow . data [ index + 1 - flow . stride ] ; float u6 = flow . data [ index - 1 + flow . stride ] ; float u7 = flow . data [ index + 1 + flow . stride ] ; return ( 1.0f / 6.0f ) * ( u0 + u1 + u2 + u3 ) + ( 1.0f / 12.0f ) * ( u4 + u5 + u6 + u7 ) ;
public class Log { /** * Adds a trace to the log . * @ param trace Trace to add . * @ throws ParameterException */ public void addTrace ( LogTrace < E > trace ) throws ParameterException { } }
Validate . notNull ( trace ) ; trace . setCaseNumber ( traces . size ( ) + 1 ) ; traces . add ( trace ) ; summary . addTrace ( trace ) ; if ( ! distinctTraces . add ( trace ) ) { for ( LogTrace < E > storedTrace : traces ) { if ( storedTrace . equals ( trace ) ) { storedTrace . addSimilarInstance ( trace . getCaseNumber ( ) ) ; trace . addSimilarInstance ( storedTrace . getCaseNumber ( ) ) ; } } }
public class Objects { /** * Stores array elements along with more individuals into a new Object [ ] . * @ param < T > the type of the elements in the array * @ param array an array * @ param elements a list of elements * @ return an { @ code Object } array containing all elements */ public static < T > Object [ ] store ( T [ ] array , Object ... elements ) { } }
Object [ ] result = new Object [ array . length + elements . length ] ; System . arraycopy ( array , 0 , result , 0 , array . length ) ; System . arraycopy ( elements , 0 , result , array . length , elements . length ) ; return result ;
public class xen_health_monitor_fan_speed { /** * Use this API to fetch filtered set of xen _ health _ monitor _ fan _ speed resources . * set the filter parameter values in filtervalue object . */ public static xen_health_monitor_fan_speed [ ] get_filtered ( nitro_service service , filtervalue [ ] filter ) throws Exception { } }
xen_health_monitor_fan_speed obj = new xen_health_monitor_fan_speed ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; xen_health_monitor_fan_speed [ ] response = ( xen_health_monitor_fan_speed [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class UpdateApnsSandboxChannelRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateApnsSandboxChannelRequest updateApnsSandboxChannelRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateApnsSandboxChannelRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateApnsSandboxChannelRequest . getAPNSSandboxChannelRequest ( ) , APNSSANDBOXCHANNELREQUEST_BINDING ) ; protocolMarshaller . marshall ( updateApnsSandboxChannelRequest . getApplicationId ( ) , APPLICATIONID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class House { /** * Build the house with physics properties and add it to rootNode ( the * world ) . * @ param rootNode * @ param assetManager * @ param physicsSpace */ public void build ( Node rootNode , SimpleApplication app ) { } }
house = ( Node ) app . getAssetManager ( ) . loadModel ( urlResource ) ; house . setUserData ( "ID" , houseId ) ; house . setUserData ( "ROLE" , "House" ) ; System . out . println ( "\n\nBuinding " + house . getName ( ) ) ; physicalEntities = ( Node ) house . getChild ( "PhysicalEntities" ) ; Node physicalStructure = ( Node ) house . getChild ( "Structure" ) ; visualStructure = ( Node ) physicalStructure . getChild ( "Visual" ) ; physicsStructure = ( Node ) physicalStructure . getChild ( "Physics" ) ; logicalEntities = ( Node ) house . getChild ( "LogicalEntities" ) ; spatialCoordenates = ( Node ) logicalEntities . getChild ( "SpatialCoordenates" ) ; initSpatials ( ) ; initLights ( app ) ; app . getStateManager ( ) . getState ( BulletAppState . class ) . getPhysicsSpace ( ) . addAll ( physicalEntities ) ; app . getStateManager ( ) . getState ( BulletAppState . class ) . getPhysicsSpace ( ) . addAll ( physicsStructure ) ; rootNode . attachChild ( house ) ; PhysicsUtils . updateLocationAndRotation ( house ) ;
public class NCSARequestLog { /** * Log Extended fields . * This method can be extended by a derived class to add extened fields to * each log entry . It is called by the log method after all standard * fields have been added , but before the line terminator . * Derived implementations should write extra fields to the Writer * provided . * The default implementation writes the referer and user agent . * @ param request The request to log . * @ param response The response to log . * @ param log The writer to write the extra fields to . * @ exception IOException Problem writing log */ protected void logExtended ( HttpRequest request , HttpResponse response , Writer log ) throws IOException { } }
String referer = request . getField ( HttpFields . __Referer ) ; if ( referer == null ) log . write ( "\"-\" " ) ; else { log . write ( '"' ) ; log . write ( referer ) ; log . write ( "\" " ) ; } String agent = request . getField ( HttpFields . __UserAgent ) ; if ( agent == null ) log . write ( "\"-\"" ) ; else { log . write ( '"' ) ; log . write ( agent ) ; log . write ( '"' ) ; }
public class Sign { /** * 获取单次签名 , 一次有效 , 针对删除和更新文件目录 * @ param bucketName * bucket名称 * @ param cosPath * 要签名的cos路径 * @ param cred * 用户的身份信息 , 包括appid , secret _ id和secret _ key * @ return base64编码的字符串 * @ throws AbstractCosException */ public static String getOneEffectiveSign ( String bucketName , String cosPath , Credentials cred ) throws AbstractCosException { } }
return appSignatureBase ( cred , bucketName , cosPath , 0 , true ) ;
public class FCWsByteBufferImpl { /** * @ see com . ibm . ws . bytebuffer . internal . WsByteBufferImpl # limit ( int ) */ @ Override public WsByteBuffer limit ( int newLimit ) { } }
if ( isFCEnabled ( ) ) { String errorMsg = null ; if ( newLimit > fcSize ) { errorMsg = "Requested value for the WsByteBuffer limit was greater than the capacity" ; } else if ( newLimit < 0 ) { errorMsg = "Requested value for the WsByteBuffer limit was less than 0" ; } if ( errorMsg != null ) { IllegalArgumentException iae = new IllegalArgumentException ( errorMsg ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , errorMsg ) ; } FFDCFilter . processException ( iae , CLASS_NAME + ".limit(int)" , "625" , this ) ; throw iae ; } // set our FC limit this . fcLimit = newLimit ; return this ; } return super . limit ( newLimit ) ;
public class AmazonMQClient { /** * Creates a new configuration for the specified configuration name . Amazon MQ uses the default configuration ( the * engine type and version ) . * @ param createConfigurationRequest * Creates a new configuration for the specified configuration name . Amazon MQ uses the default configuration * ( the engine type and version ) . * @ return Result of the CreateConfiguration operation returned by the service . * @ throws BadRequestException * HTTP Status Code 400 : Bad request due to incorrect input . Correct your request and then retry it . * @ throws InternalServerErrorException * HTTP Status Code 500 : Unexpected internal server error . Retrying your request might resolve the issue . * @ throws ConflictException * HTTP Status Code 409 : Conflict . This configuration name already exists . Retry your request with another * configuration name . * @ throws ForbiddenException * HTTP Status Code 403 : Access forbidden . Correct your input and then retry your request . * @ sample AmazonMQ . CreateConfiguration * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / mq - 2017-11-27 / CreateConfiguration " target = " _ top " > AWS API * Documentation < / a > */ @ Override public CreateConfigurationResult createConfiguration ( CreateConfigurationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateConfiguration ( request ) ;
public class AbstractSubCodeBuilderFragment { /** * Replies the first assignement with the given name in the given rule . * @ param rule the rule . * @ param pattern pattern for the name of the terminal . * @ return the assignment or < code > null < / code > . */ protected static Assignment findAssignmentFromTerminalPattern ( EObject rule , String pattern ) { } }
return IterableExtensions . findFirst ( GrammarUtil . containedAssignments ( rule ) , assignment -> nameMatches ( assignment . getTerminal ( ) , pattern ) ) ;
public class IncrementalMealyDAGBuilder { /** * Update the signature of a state , changing only the successor state of a single transition index . * @ param state * the state which ' s signature to update * @ param idx * the transition index to modify * @ param succ * the new successor state * @ return the resulting state , which can either be the same as the input state ( if the new signature is unique ) , or * the result of merging with another state . */ private State < O > updateSignature ( State < O > state , int idx , State < O > succ ) { } }
StateSignature < O > sig = state . getSignature ( ) ; if ( sig . successors . array [ idx ] == succ ) { return state ; } register . remove ( sig ) ; if ( sig . successors . array [ idx ] != null ) { sig . successors . array [ idx ] . decreaseIncoming ( ) ; } sig . successors . array [ idx ] = succ ; succ . increaseIncoming ( ) ; sig . updateHashCode ( ) ; return replaceOrRegister ( state ) ;
public class nsmemory_stats { /** * Use this API to fetch the statistics of all nsmemory _ stats resources that are configured on netscaler . */ public static nsmemory_stats [ ] get ( nitro_service service ) throws Exception { } }
nsmemory_stats obj = new nsmemory_stats ( ) ; nsmemory_stats [ ] response = ( nsmemory_stats [ ] ) obj . stat_resources ( service ) ; return response ;
public class Client { /** * Returns whether or not the host editor service is available * @ return * @ throws Exception */ public static boolean isAvailable ( ) throws Exception { } }
try { Registry myRegistry = LocateRegistry . getRegistry ( "127.0.0.1" , port ) ; com . groupon . odo . proxylib . hostsedit . rmi . Message impl = ( com . groupon . odo . proxylib . hostsedit . rmi . Message ) myRegistry . lookup ( SERVICE_NAME ) ; return true ; } catch ( Exception e ) { return false ; }
public class SynchroReader { /** * Extract resource data . */ private void processResources ( ) throws IOException { } }
CompanyReader reader = new CompanyReader ( m_data . getTableData ( "Companies" ) ) ; reader . read ( ) ; for ( MapRow companyRow : reader . getRows ( ) ) { // TODO : need to sort by type as well as by name ! for ( MapRow resourceRow : sort ( companyRow . getRows ( "RESOURCES" ) , "NAME" ) ) { processResource ( resourceRow ) ; } }
public class TreeWriter { /** * Generate the interface hierarchy and class hierarchy . */ public void generateTreeFile ( ) throws IOException { } }
HtmlTree body = getTreeHeader ( ) ; Content headContent = getResource ( "doclet.Hierarchy_For_All_Packages" ) ; Content heading = HtmlTree . HEADING ( HtmlConstants . TITLE_HEADING , false , HtmlStyle . title , headContent ) ; Content div = HtmlTree . DIV ( HtmlStyle . header , heading ) ; addPackageTreeLinks ( div ) ; HtmlTree htmlTree = ( configuration . allowTag ( HtmlTag . MAIN ) ) ? HtmlTree . MAIN ( ) : body ; htmlTree . addContent ( div ) ; HtmlTree divTree = new HtmlTree ( HtmlTag . DIV ) ; divTree . addStyle ( HtmlStyle . contentContainer ) ; addTree ( classtree . baseclasses ( ) , "doclet.Class_Hierarchy" , divTree ) ; addTree ( classtree . baseinterfaces ( ) , "doclet.Interface_Hierarchy" , divTree ) ; addTree ( classtree . baseAnnotationTypes ( ) , "doclet.Annotation_Type_Hierarchy" , divTree ) ; addTree ( classtree . baseEnums ( ) , "doclet.Enum_Hierarchy" , divTree ) ; htmlTree . addContent ( divTree ) ; if ( configuration . allowTag ( HtmlTag . MAIN ) ) { body . addContent ( htmlTree ) ; } if ( configuration . allowTag ( HtmlTag . FOOTER ) ) { htmlTree = HtmlTree . FOOTER ( ) ; } else { htmlTree = body ; } addNavLinks ( false , htmlTree ) ; addBottom ( htmlTree ) ; if ( configuration . allowTag ( HtmlTag . FOOTER ) ) { body . addContent ( htmlTree ) ; } printHtmlDocument ( null , true , body ) ;
public class CharacterDecoder { /** * Decode the text from the InputStream and write the decoded * octets to the OutputStream . This method runs until the stream * is exhausted . * @ exception CEFormatException An error has occured while decoding * @ exception CEStreamExhausted The input stream is unexpectedly out of data */ public void decodeBuffer ( InputStream aStream , OutputStream bStream ) throws IOException { } }
int i ; int totalBytes = 0 ; PushbackInputStream ps = new PushbackInputStream ( aStream ) ; decodeBufferPrefix ( ps , bStream ) ; while ( true ) { int length ; try { length = decodeLinePrefix ( ps , bStream ) ; for ( i = 0 ; ( i + bytesPerAtom ( ) ) < length ; i += bytesPerAtom ( ) ) { decodeAtom ( ps , bStream , bytesPerAtom ( ) ) ; totalBytes += bytesPerAtom ( ) ; } if ( ( i + bytesPerAtom ( ) ) == length ) { decodeAtom ( ps , bStream , bytesPerAtom ( ) ) ; totalBytes += bytesPerAtom ( ) ; } else { decodeAtom ( ps , bStream , length - i ) ; totalBytes += ( length - i ) ; } decodeLineSuffix ( ps , bStream ) ; } catch ( CEStreamExhausted e ) { break ; } } decodeBufferSuffix ( ps , bStream ) ;
public class ArtifactUploadState { /** * Clears all temp data collected while uploading files . */ public void clearUploadTempData ( ) { } }
LOG . debug ( "Cleaning up temp data..." ) ; // delete file system zombies for ( final FileUploadProgress fileUploadProgress : getAllFileUploadProgressValuesFromOverallUploadProcessList ( ) ) { if ( ! StringUtils . isBlank ( fileUploadProgress . getFilePath ( ) ) ) { final boolean deleted = FileUtils . deleteQuietly ( new File ( fileUploadProgress . getFilePath ( ) ) ) ; if ( ! deleted ) { LOG . warn ( "TempFile was not deleted: {}" , fileUploadProgress . getFilePath ( ) ) ; } } } clearFileStates ( ) ;
public class Agg { /** * Get a { @ link Collector } that calculates the < code > PERCENT _ RANK ( ) < / code > function given a specific ordering . */ public static < T > Collector < T , ? , Optional < Double > > percentRank ( T value , Comparator < ? super T > comparator ) { } }
return percentRankBy ( value , t -> t , comparator ) ;
public class DateUtils { /** * 在日期上加指定的月数 * @ param * @ param * @ return */ public static Date addMonth ( Date date1 , int addMonth ) { } }
Date resultDate = null ; Calendar c = Calendar . getInstance ( ) ; c . setTime ( date1 ) ; // 设置当前日期 c . add ( Calendar . MONTH , addMonth ) ; // 日期加1 resultDate = c . getTime ( ) ; // 结果 return resultDate ;
public class ForkJoinTask { /** * Tries to set SIGNAL status unless already completed . Used by * ForkJoinPool . Other variants are directly incorporated into * externalAwaitDone etc . * @ return true if successful */ final boolean trySetSignal ( ) { } }
int s = status ; return s >= 0 && U . compareAndSwapInt ( this , STATUS , s , s | SIGNAL ) ;
public class SocketCache { /** * Get a cached socket to the given address * @ param remote Remote address the socket is connected to . * @ return A socket with unknown state , possibly closed underneath . Or null . */ public Socket get ( SocketAddress remote ) { } }
synchronized ( multimap ) { List < Socket > sockList = multimap . get ( remote ) ; if ( sockList == null ) { return null ; } Iterator < Socket > iter = sockList . iterator ( ) ; while ( iter . hasNext ( ) ) { Socket candidate = iter . next ( ) ; iter . remove ( ) ; if ( ! candidate . isClosed ( ) ) { return candidate ; } } } return null ;
public class MetadataFinder { /** * Attach a metadata cache file to a particular player media slot , so the cache will be used instead of querying * the player for metadata . This supports operation with metadata during shows where DJs are using all four player * numbers and heavily cross - linking between them . * If the media is ejected from that player slot , the cache will be detached . * @ param slot the media slot to which a meta data cache is to be attached * @ param file the metadata cache to be attached * @ throws IOException if there is a problem reading the cache file * @ throws IllegalArgumentException if an invalid player number or slot is supplied * @ throws IllegalStateException if the metadata finder is not running */ public void attachMetadataCache ( SlotReference slot , File file ) throws IOException { } }
ensureRunning ( ) ; if ( slot . player < 1 || slot . player > 4 || DeviceFinder . getInstance ( ) . getLatestAnnouncementFrom ( slot . player ) == null ) { throw new IllegalArgumentException ( "unable to attach metadata cache for player " + slot . player ) ; } if ( ( slot . slot != CdjStatus . TrackSourceSlot . USB_SLOT ) && ( slot . slot != CdjStatus . TrackSourceSlot . SD_SLOT ) ) { throw new IllegalArgumentException ( "unable to attach metadata cache for slot " + slot . slot ) ; } MetadataCache cache = new MetadataCache ( file ) ; final MediaDetails slotDetails = getMediaDetailsFor ( slot ) ; if ( cache . sourceMedia != null && slotDetails != null ) { if ( ! slotDetails . hashKey ( ) . equals ( cache . sourceMedia . hashKey ( ) ) ) { throw new IllegalArgumentException ( "Cache was created for different media (" + cache . sourceMedia . hashKey ( ) + ") than is in the slot (" + slotDetails . hashKey ( ) + ")." ) ; } if ( slotDetails . hasChanged ( cache . sourceMedia ) ) { logger . warn ( "Media has changed (" + slotDetails + ") since cache was created (" + cache . sourceMedia + "). Attaching anyway as instructed." ) ; } } attachMetadataCacheInternal ( slot , cache ) ;
public class Interceptors { /** * Creates an interceptor chain . * @ param < T > the function parameter type * @ param < R > the function result type * @ param innermost the function to be intercepted * @ param interceptors an array of interceptor * @ return the resulting function */ public static < T , R > Function < T , R > intercept ( Function < T , R > innermost , Interceptor < T > ... interceptors ) { } }
return new InterceptorChain < > ( innermost , new ArrayIterator < Interceptor < T > > ( interceptors ) ) ;
public class AmazonRdsDataSourceFactoryBean { /** * Retrieves the { @ link com . amazonaws . services . rds . model . DBInstance } information . * @ param identifier - the database identifier used * @ return - the db instance * @ throws IllegalStateException if the db instance is not found */ protected DBInstance getDbInstance ( String identifier ) throws IllegalStateException { } }
DBInstance instance ; try { DescribeDBInstancesResult describeDBInstancesResult = this . amazonRds . describeDBInstances ( new DescribeDBInstancesRequest ( ) . withDBInstanceIdentifier ( identifier ) ) ; instance = describeDBInstancesResult . getDBInstances ( ) . get ( 0 ) ; } catch ( DBInstanceNotFoundException e ) { throw new IllegalStateException ( MessageFormat . format ( "No database instance with id:''{0}'' found. Please specify a valid db instance" , identifier ) ) ; } return instance ;
public class ChatDirector { /** * Configures a message that will be automatically reported to anyone that sends a tell message * to this client to indicate that we are busy or away from the keyboard . */ public void setAwayMessage ( String message ) { } }
if ( message != null ) { message = filter ( message , null , true ) ; if ( message == null ) { // they filtered away their own away message . . . change it to something message = "..." ; } } // pass the buck right on along _cservice . away ( message ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcThermalLoadTypeEnum ( ) { } }
if ( ifcThermalLoadTypeEnumEEnum == null ) { ifcThermalLoadTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 915 ) ; } return ifcThermalLoadTypeEnumEEnum ;
public class CodingAnnotationStudy { /** * Shorthand for invoking { @ link # addItem ( Object . . . ) } with the same * parameters multiple times . This method is useful for modeling * annotation data based on a contingency table . */ public void addMultipleItems ( int times , final Object ... values ) { } }
for ( int i = 0 ; i < times ; i ++ ) { addItemAsArray ( values ) ; }
public class AbstractLStar { /** * Incorporates the information provided by a counterexample into the observation data structure . * @ param ce * the query which contradicts the hypothesis * @ return the rows ( equivalence classes ) which became unclosed by adding the information . */ protected List < List < Row < I > > > incorporateCounterExample ( DefaultQuery < I , D > ce ) { } }
return ObservationTableCEXHandlers . handleClassicLStar ( ce , table , oracle ) ;
public class ExpressionParser { /** * Add mapper to mapper list . * @ param map * @ return */ public ExpressionParser addMapper ( Map < String , String > map ) { } }
mapperList . add ( ( s ) -> map . get ( s ) ) ; return this ;
public class MongodbQueue { /** * Initialize collection : * < ul > * < li > Check if collection exists . < / li > * < li > If collection does not exist , create collection and indexes . < / li > * < / ul > */ protected void initCollection ( ) { } }
boolean collectionExists = MongoUtils . collectionExists ( getDatabase ( ) , getCollectionName ( ) ) ; if ( ! collectionExists ) { LOGGER . info ( "Creating collection [" + getCollectionName ( ) + "]..." ) ; getDatabase ( ) . createCollection ( getCollectionName ( ) ) ; MongoCollection < ? > collection = getCollection ( ) ; LOGGER . info ( "Creating index for field [" + getCollectionName ( ) + "." + MongodbQueue . COLLECTION_FIELD_ID + "]..." ) ; collection . createIndex ( new Document ( ) . append ( MongodbQueue . COLLECTION_FIELD_ID , 1 ) , new IndexOptions ( ) . unique ( true ) ) ; LOGGER . info ( "Creating index for field [" + getCollectionName ( ) + "." + MongodbQueue . COLLECTION_FIELD_EPHEMERAL_KEY + "]..." ) ; collection . createIndex ( new Document ( ) . append ( MongodbQueue . COLLECTION_FIELD_EPHEMERAL_KEY , 1 ) , new IndexOptions ( ) ) ; LOGGER . info ( "Creating index for field [" + getCollectionName ( ) + "." + MongodbQueue . COLLECTION_FIELD_QUEUE_TIME + "]..." ) ; collection . createIndex ( new Document ( ) . append ( MongodbQueue . COLLECTION_FIELD_QUEUE_TIME , 1 ) , new IndexOptions ( ) ) ; LOGGER . info ( "Creating index for field [" + getCollectionName ( ) + "." + MongodbQueue . COLLECTION_FIELD_TIME + "]..." ) ; collection . createIndex ( new Document ( ) . append ( MongodbQueue . COLLECTION_FIELD_TIME , 1 ) , new IndexOptions ( ) ) ; }
public class DataContextUtils { /** * Replace the embedded properties of the form ' $ { key . name } ' in the input Strings with the value from the data * context * @ param args argument string array * @ param data data context * @ param converter converter * @ param failIfUnexpanded true to fail if property is not found * @ return string array with replaced embedded properties */ public static String [ ] replaceDataReferencesInArray ( final String [ ] args , final Map < String , Map < String , String > > data , Converter < String , String > converter , boolean failIfUnexpanded ) { } }
return replaceDataReferencesInArray ( args , data , converter , failIfUnexpanded , false ) ;
public class DebugLogWrapper { /** * Opens a ledger for reading purposes ( does not fence it ) . * @ param ledgerMetadata LedgerMetadata for the ledger to open . * @ return A BookKeeper LedgerHandle representing the ledger . * @ throws DurableDataLogException If an exception occurred . */ public LedgerHandle openLedgerNoFencing ( LedgerMetadata ledgerMetadata ) throws DurableDataLogException { } }
return Ledgers . openRead ( ledgerMetadata . getLedgerId ( ) , this . bkClient , this . config ) ;
public class sslfipskey { /** * Use this API to create sslfipskey . */ public static base_response create ( nitro_service client , sslfipskey resource ) throws Exception { } }
sslfipskey createresource = new sslfipskey ( ) ; createresource . fipskeyname = resource . fipskeyname ; createresource . modulus = resource . modulus ; createresource . exponent = resource . exponent ; return createresource . perform_operation ( client , "create" ) ;
public class ResourceClaim { /** * Relinquish a claimed resource . * @ param zookeeper ZooKeeper connection to use . * @ param poolNode Path to the znode representing the resource pool . * @ param resource The resource . */ private void relinquishResource ( ZooKeeper zookeeper , String poolNode , int resource ) { } }
logger . debug ( "Relinquishing claimed resource {}." , resource ) ; try { zookeeper . delete ( poolNode + "/" + Integer . toString ( resource ) , - 1 ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; } catch ( KeeperException e ) { logger . error ( "Failed to remove resource claim node {}/{}" , poolNode , resource ) ; }
public class UpdateInstanceRequest { /** * Replaces the labels associated with the instance . * @ see < a href = " https : / / cloud . google . com / bigtable / docs / creating - managing - labels " > For more * details < / a > */ @ SuppressWarnings ( "WeakerAccess" ) public UpdateInstanceRequest setAllLabels ( @ Nonnull Map < String , String > labels ) { } }
Preconditions . checkNotNull ( labels , "labels can't be null" ) ; builder . getInstanceBuilder ( ) . clearLabels ( ) ; builder . getInstanceBuilder ( ) . putAllLabels ( labels ) ; updateFieldMask ( Instance . LABELS_FIELD_NUMBER ) ; return this ;
public class Agent { /** * Reloads user data . */ void reloadUserData ( ) { } }
if ( Utils . isEmptyOrWhitespaces ( this . parameters ) ) { this . logger . warning ( "No parameters were specified in the agent configuration. No user data will be retrieved." ) ; } else if ( ! this . overrideProperties ) { this . logger . fine ( "User data are NOT supposed to be used." ) ; } else if ( Constants . AGENT_RESET . equalsIgnoreCase ( this . parameters ) ) { if ( getMessagingClient ( ) != null && getMessagingClient ( ) . getMessageProcessor ( ) != null ) ( ( AgentMessageProcessor ) getMessagingClient ( ) . getMessageProcessor ( ) ) . resetRequest ( ) ; } else { // Retrieve the agent ' s configuration AgentProperties props = null ; this . logger . fine ( "User data are supposed to be used. Retrieving in progress..." ) ; if ( AgentConstants . PLATFORM_EC2 . equalsIgnoreCase ( this . parameters ) || AgentConstants . PLATFORM_OPENSTACK . equalsIgnoreCase ( this . parameters ) ) props = this . userDataHelper . findParametersForAmazonOrOpenStack ( this . logger ) ; else if ( AgentConstants . PLATFORM_AZURE . equalsIgnoreCase ( this . parameters ) ) props = this . userDataHelper . findParametersForAzure ( this . logger ) ; else if ( AgentConstants . PLATFORM_VMWARE . equalsIgnoreCase ( this . parameters ) ) props = this . userDataHelper . findParametersForVmware ( this . logger ) ; else if ( Constants . AGENT_RESET . equalsIgnoreCase ( this . parameters ) ) props = new AgentProperties ( ) ; else props = this . userDataHelper . findParametersFromUrl ( this . parameters , this . logger ) ; // If there was a configuration . . . if ( props != null ) { // Error messages do not matter when we reset an agent String errorMessage = null ; if ( ! Constants . AGENT_RESET . equalsIgnoreCase ( this . parameters ) && ( errorMessage = props . validate ( ) ) != null ) { this . logger . severe ( "An error was found in user data. " + errorMessage ) ; } this . applicationName = props . getApplicationName ( ) ; this . domain = props . getDomain ( ) ; this . scopedInstancePath = props . getScopedInstancePath ( ) ; if ( ! Utils . isEmptyOrWhitespaces ( props . getIpAddress ( ) ) ) { this . ipAddress = props . getIpAddress ( ) ; this . logger . info ( "The agent's address was overwritten from user data and set to " + this . ipAddress ) ; } try { this . logger . info ( "Reconfiguring the agent with user data." ) ; this . userDataHelper . reconfigureMessaging ( this . karafEtc , props . getMessagingConfiguration ( ) ) ; } catch ( Exception e ) { this . logger . severe ( "Error in messaging reconfiguration from user data: " + e ) ; } } }
public class CommerceCountryPersistenceImpl { /** * Removes the commerce country where uuid = & # 63 ; and groupId = & # 63 ; from the database . * @ param uuid the uuid * @ param groupId the group ID * @ return the commerce country that was removed */ @ Override public CommerceCountry removeByUUID_G ( String uuid , long groupId ) throws NoSuchCountryException { } }
CommerceCountry commerceCountry = findByUUID_G ( uuid , groupId ) ; return remove ( commerceCountry ) ;
public class Parameters { /** * Convenience method to call { @ link # getExistingFile ( String ) } and then apply { @ link * FileUtils # loadStringList ( CharSource ) } on it . */ public ImmutableList < String > getFileAsStringList ( String param ) throws IOException { } }
return FileUtils . loadStringList ( Files . asCharSource ( getExistingFile ( param ) , Charsets . UTF_8 ) ) ;
public class GrammarConverter { /** * This method converts the options part of the AST . * @ throws TreeException */ private void convertOptions ( ) throws TreeException { } }
options = new Properties ( ) ; ParseTreeNode optionList = parserTree . getChild ( "GrammarOptions" ) . getChild ( "GrammarOptionList" ) ; for ( ParseTreeNode option : optionList . getChildren ( "GrammarOption" ) ) { String name = option . getChild ( "PropertyIdentifier" ) . getText ( ) ; String value = option . getChild ( "Literal" ) . getText ( ) ; if ( value . startsWith ( "'" ) || value . startsWith ( "\"" ) ) { value = value . substring ( 1 , value . length ( ) - 1 ) ; } options . put ( name , value ) ; } // normalizeToBNF = Boolean . valueOf ( options . getProperty ( // " grammar . normalize _ to _ bnf " , " true " ) ) ;
public class Config { /** * Looks up the specified string - valued configuration entry , loads the class with that name and * instantiates a new instance of that class , which is returned . * @ param name the name of the property to be fetched . * @ param defcname the class name to use if the property is not specified in the config file . * @ exception Exception thrown if any error occurs while loading or instantiating the class . */ public Object instantiateValue ( String name , String defcname ) throws Exception { } }
return Class . forName ( getValue ( name , defcname ) ) . newInstance ( ) ;
public class EvaluationResultMarshaller { /** * Marshall the given parameter object . */ public void marshall ( EvaluationResult evaluationResult , ProtocolMarshaller protocolMarshaller ) { } }
if ( evaluationResult == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( evaluationResult . getEvaluationResultIdentifier ( ) , EVALUATIONRESULTIDENTIFIER_BINDING ) ; protocolMarshaller . marshall ( evaluationResult . getComplianceType ( ) , COMPLIANCETYPE_BINDING ) ; protocolMarshaller . marshall ( evaluationResult . getResultRecordedTime ( ) , RESULTRECORDEDTIME_BINDING ) ; protocolMarshaller . marshall ( evaluationResult . getConfigRuleInvokedTime ( ) , CONFIGRULEINVOKEDTIME_BINDING ) ; protocolMarshaller . marshall ( evaluationResult . getAnnotation ( ) , ANNOTATION_BINDING ) ; protocolMarshaller . marshall ( evaluationResult . getResultToken ( ) , RESULTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class MainPartExtracter { /** * 获取句子的主谓宾 * @ param question 问题 * @ param questionWords 问题词序 , 相互之间以空格分割 * @ return 问题结构 */ public QuestionStructure getMainPart ( String question , String questionWords ) { } }
List < edu . stanford . nlp . ling . Word > words = new ArrayList < > ( ) ; String [ ] qw = questionWords . split ( "\\s+" ) ; for ( String item : qw ) { item = item . trim ( ) ; if ( "" . equals ( item ) ) { continue ; } words . add ( new edu . stanford . nlp . ling . Word ( item ) ) ; } return getMainPart ( question , words ) ;
public class ElementPlugin { /** * Registers one or more named properties to the container . Using this , a plugin can expose * properties for serialization and deserialization . * @ param instance The object instance holding the property accessors . If null , any existing * registration will be removed . * @ param propertyNames One or more property names to register . */ public void registerProperties ( Object instance , String ... propertyNames ) { } }
for ( String propertyName : propertyNames ) { registerProperty ( instance , propertyName , true ) ; }
public class SslContext { /** * Generates a new { @ link KeyStore } . * @ param certChain a X . 509 certificate chain * @ param key a PKCS # 8 private key * @ param keyPasswordChars the password of the { @ code keyFile } . * { @ code null } if it ' s not password - protected . * @ return generated { @ link KeyStore } . */ static KeyStore buildKeyStore ( X509Certificate [ ] certChain , PrivateKey key , char [ ] keyPasswordChars ) throws KeyStoreException , NoSuchAlgorithmException , CertificateException , IOException { } }
KeyStore ks = KeyStore . getInstance ( KeyStore . getDefaultType ( ) ) ; ks . load ( null , null ) ; ks . setKeyEntry ( ALIAS , key , keyPasswordChars , certChain ) ; return ks ;
public class QuickDiff { /** * Return the specified line from the text . * @ param textLines * The source text as an array of lines . * @ param line * The line to return . * @ return the line as is , or trimed , according to { @ link # isIgnoreTrailingWhiteSpaces ( ) } . */ private String getTextLine ( String [ ] textLines , int line ) { } }
return ( isIgnoreTrailingWhiteSpaces ( ) ) ? textLines [ line ] . trim ( ) : textLines [ line ] ;
public class XmlParser { /** * Parses a given file . * @ param documentThe document that will listen to the parser * @ param isThe InputStream with the contents */ public void go ( DocListener document , InputSource is ) { } }
try { parser . parse ( is , new SAXiTextHandler ( document ) ) ; } catch ( SAXException se ) { throw new ExceptionConverter ( se ) ; } catch ( IOException ioe ) { throw new ExceptionConverter ( ioe ) ; }
public class sslcert { /** * Use this API to create sslcert . */ public static base_response create ( nitro_service client , sslcert resource ) throws Exception { } }
sslcert createresource = new sslcert ( ) ; createresource . certfile = resource . certfile ; createresource . reqfile = resource . reqfile ; createresource . certtype = resource . certtype ; createresource . keyfile = resource . keyfile ; createresource . keyform = resource . keyform ; createresource . pempassphrase = resource . pempassphrase ; createresource . days = resource . days ; createresource . certform = resource . certform ; createresource . cacert = resource . cacert ; createresource . cacertform = resource . cacertform ; createresource . cakey = resource . cakey ; createresource . cakeyform = resource . cakeyform ; createresource . caserial = resource . caserial ; return createresource . perform_operation ( client , "create" ) ;
public class WDropdownSubmitOnChangeExample { /** * Updates the options present in the region selector , depending on the state selector ' s value . */ private void updateRegion ( ) { } }
actMessage . setVisible ( false ) ; String state = ( String ) stateSelector . getSelected ( ) ; if ( STATE_ACT . equals ( state ) ) { actMessage . setVisible ( true ) ; regionSelector . setOptions ( new String [ ] { null , "Belconnen" , "City" , "Woden" } ) ; } else if ( STATE_NSW . equals ( state ) ) { regionSelector . setOptions ( new String [ ] { null , "Hunter" , "Riverina" , "Southern Tablelands" } ) ; } else if ( STATE_VIC . equals ( state ) ) { regionSelector . setOptions ( new String [ ] { null , "Gippsland" , "Melbourne" , "Mornington Peninsula" } ) ; } else { regionSelector . setOptions ( new Object [ ] { null } ) ; }
public class BasicEscapeDirective { /** * Default implementation for { @ link Streamable } . * < p > Subclasses can simply add { @ code implements Streamable } if they have an implementation in * Sanitizers . < name > Streaming . If they don ' t , this method will throw while trying to find it . */ public final AppendableAndOptions applyForJbcSrcStreaming ( JbcSrcPluginContext context , Expression delegateAppendable , List < SoyExpression > args ) { } }
MethodRef sanitizerMethod = javaStreamingSanitizer ; if ( sanitizerMethod == null ) { // lazily allocated sanitizerMethod = MethodRef . create ( Sanitizers . class , name . substring ( 1 ) + "Streaming" , LoggingAdvisingAppendable . class ) . asNonNullable ( ) ; javaStreamingSanitizer = sanitizerMethod ; } Expression streamingSanitizersExpr = sanitizerMethod . invoke ( delegateAppendable ) ; if ( isCloseable ( ) ) { return AppendableAndOptions . createCloseable ( streamingSanitizersExpr ) ; } else { return AppendableAndOptions . create ( streamingSanitizersExpr ) ; }
public class INodeHardLinkFile { /** * Create a HardLink file info if necessary and register to the hardLinkINodeIDToFileInfoMap * And return the hardLinkFileInfo which is registered in the hardLinkINodeIDToFileInfoMap * This function is not thread safe . * @ param inodeID * @ param context The context when loading the fsImage * @ return hardLinkFileInfo registered in the hardLinkINodeIDToFileInfoMap */ public static HardLinkFileInfo loadHardLinkFileInfo ( long hardLinkID , FSImageLoadingContext context ) { } }
// update the latest hard link ID context . getFSDirectory ( ) . resetLastHardLinkIDIfLarge ( hardLinkID ) ; // create the hard link file info if necessary HardLinkFileInfo fileInfo = context . getHardLinkFileInfo ( hardLinkID ) ; if ( fileInfo == null ) { fileInfo = new HardLinkFileInfo ( hardLinkID ) ; context . associateHardLinkIDWithFileInfo ( hardLinkID , fileInfo ) ; } return fileInfo ;
public class Connection { /** * In order to make sure that the connection is correctly initialized , the listener needs to be added post * constructor . Otherwise , there is a race - condition of the channel closed before the connection is completely * created and the Connection . close ( ) call on channel close can access the Connection object which isn ' t * constructed completely . IOW , " this " escapes from the constructor if the listener is added in the constructor . */ protected void connectCloseToChannelClose ( ) { } }
nettyChannel . closeFuture ( ) . addListener ( new ChannelFutureListener ( ) { @ Override public void operationComplete ( ChannelFuture future ) throws Exception { closeNow ( ) ; // Close this connection when the channel is closed . } } ) ; nettyChannel . attr ( CONNECTION_ATTRIBUTE_KEY ) . set ( this ) ;
public class XMLUtil { /** * Converts the raw characters to XML escape characters . * @ param rawContent * @ param charset Null when charset is not known , so we assume it ' s unicode * @ param isNoLines * @ return escape string */ public static String escapeXML ( String rawContent , String charset , boolean isNoLines ) { } }
if ( rawContent == null ) return "" ; else { StringBuffer sb = new StringBuffer ( ) ; for ( int i = 0 ; i < rawContent . length ( ) ; i ++ ) { char ch = rawContent . charAt ( i ) ; if ( ch == '\'' ) sb . append ( "&#39;" ) ; else if ( ch == '&' ) sb . append ( "&amp;" ) ; else if ( ch == '"' ) sb . append ( "&quot;" ) ; else if ( ch == '<' ) sb . append ( "&lt;" ) ; else if ( ch == '>' ) sb . append ( "&gt;" ) ; else if ( ch > '~' && charset != null && charSetImpliesAscii ( charset ) ) // TODO - why is hashcode the only way to get the unicode number for the character // in jre 5.0? sb . append ( "&#x" + Integer . toHexString ( ch ) . toUpperCase ( ) + ";" ) ; else if ( isNoLines ) { if ( ch == '\r' ) sb . append ( "&#xA;" ) ; else if ( ch != '\n' ) sb . append ( ch ) ; } else sb . append ( ch ) ; } return sb . toString ( ) ; }
public class DateFormatSymbols { /** * < strong > [ icu ] < / strong > Returns quarter strings . For example : " 1st Quarter " , " 2nd Quarter " , etc . * @ param context The quarter context , FORMAT or STANDALONE . * @ param width The width or the returned quarter string , * either WIDE or ABBREVIATED . There are no NARROW quarters . * @ return the quarter strings . */ public String [ ] getQuarters ( int context , int width ) { } }
String [ ] returnValue = null ; switch ( context ) { case FORMAT : switch ( width ) { case WIDE : returnValue = quarters ; break ; case ABBREVIATED : case SHORT : // no quarter data for this , defaults to ABBREVIATED returnValue = shortQuarters ; break ; case NARROW : returnValue = null ; break ; } break ; case STANDALONE : switch ( width ) { case WIDE : returnValue = standaloneQuarters ; break ; case ABBREVIATED : case SHORT : // no quarter data for this , defaults to ABBREVIATED returnValue = standaloneShortQuarters ; break ; case NARROW : returnValue = null ; break ; } break ; } if ( returnValue == null ) { throw new IllegalArgumentException ( "Bad context or width argument" ) ; } return duplicate ( returnValue ) ;
public class FileHeaderPage { /** * Return true if this file has deleted data records . * @ return true if this file has deleted data records */ public boolean hasDeletedSlots ( ) { } }
long blkNum = ( Long ) getVal ( OFFSET_LDS_BLOCKID , BIGINT ) . asJavaVal ( ) ; return blkNum != NO_SLOT_BLOCKID ? true : false ;