signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class TargetHttpsProxyClient { /** * Changes the URL map for TargetHttpsProxy .
* < p > Sample code :
* < pre > < code >
* try ( TargetHttpsProxyClient targetHttpsProxyClient = TargetHttpsProxyClient . create ( ) ) {
* ProjectTargetHttpsProxyName targetHttpsProxy = ProjectTargetHttpsProxyName . of ( " [ PROJECT ] " , " [ TARGET _ HTTPS _ PROXY ] " ) ;
* UrlMapReference urlMapReferenceResource = UrlMapReference . newBuilder ( ) . build ( ) ;
* Operation response = targetHttpsProxyClient . setUrlMapTargetHttpsProxy ( targetHttpsProxy . toString ( ) , urlMapReferenceResource ) ;
* < / code > < / pre >
* @ param targetHttpsProxy Name of the TargetHttpsProxy resource whose URL map is to be set .
* @ param urlMapReferenceResource
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation setUrlMapTargetHttpsProxy ( String targetHttpsProxy , UrlMapReference urlMapReferenceResource ) { } } | SetUrlMapTargetHttpsProxyHttpRequest request = SetUrlMapTargetHttpsProxyHttpRequest . newBuilder ( ) . setTargetHttpsProxy ( targetHttpsProxy ) . setUrlMapReferenceResource ( urlMapReferenceResource ) . build ( ) ; return setUrlMapTargetHttpsProxy ( request ) ; |
public class FileServersInner { /** * Gets a list of File Servers within the specified resource group .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; FileServerInner & gt ; object */
public Observable < Page < FileServerInner > > listByResourceGroupAsync ( final String resourceGroupName ) { } } | return listByResourceGroupWithServiceResponseAsync ( resourceGroupName ) . map ( new Func1 < ServiceResponse < Page < FileServerInner > > , Page < FileServerInner > > ( ) { @ Override public Page < FileServerInner > call ( ServiceResponse < Page < FileServerInner > > response ) { return response . body ( ) ; } } ) ; |
public class KAMStoreImpl { /** * { @ inheritDoc } */
@ Override public List < AnnotationType > getAnnotationTypes ( Kam kam ) { } } | if ( kam == null ) throw new InvalidArgument ( DEFAULT_MSG ) ; if ( ! exists ( kam ) ) return null ; return getAnnotationTypes ( kam . getKamInfo ( ) ) ; |
public class MomentInterval { /** * / * [ deutsch ]
* Liefert einen Zufallsmoment innerhalb dieses Intervalls . < / p >
* @ return random moment within this interval
* @ throws IllegalStateException if this interval is infinite or empty or if there is no canonical form
* @ see # toCanonical ( )
* @ since 5.0 */
public Moment random ( ) { } } | MomentInterval interval = this . toCanonical ( ) ; if ( interval . isFinite ( ) && ! interval . isEmpty ( ) ) { Moment m1 = interval . getStartAsMoment ( ) ; Moment m2 = interval . getEndAsMoment ( ) ; double factor = MRD ; double d1 = m1 . getPosixTime ( ) + m1 . getNanosecond ( ) / factor ; double d2 = m2 . getPosixTime ( ) + m2 . getNanosecond ( ) / factor ; double randomNum = ThreadLocalRandom . current ( ) . nextDouble ( d1 , d2 ) ; long posix = ( long ) Math . floor ( randomNum ) ; int fraction = ( int ) ( MRD * ( randomNum - posix ) ) ; Moment random = Moment . of ( posix , fraction , TimeScale . POSIX ) ; if ( random . isBefore ( m1 ) ) { random = m1 ; } else if ( random . isAfterOrEqual ( m2 ) ) { random = m2 . minus ( 1 , TimeUnit . NANOSECONDS ) ; } return random ; } else { throw new IllegalStateException ( "Cannot get random moment in an empty or infinite interval: " + this ) ; } |
public class Str { /** * like putl but writes to a string .
* @ param messages the stuff you want to print out .
* @ return string */
public static String sputl ( Object ... messages ) { } } | CharBuf buf = CharBuf . create ( 100 ) ; return sputl ( buf , messages ) . toString ( ) ; |
public class StandardClassBodyEmitter { /** * / * ( non - Javadoc )
* @ see com . pogofish . jadt . emitter . ClassBodyEmitter # emitEquals ( com . pogofish . jadt . emitter . Sink , com . pogofish . jadt . ast . Constructor ) */
@ Override public void emitEquals ( final Sink sink , final String indent , Constructor constructor , List < String > typeArguments ) { } } | logger . finest ( "Generating equals() for " + constructor . name ) ; sink . write ( indent + "@Override\n" ) ; sink . write ( indent + "public boolean equals(Object obj) {\n" ) ; sink . write ( indent + " if (this == obj) return true;\n" ) ; sink . write ( indent + " if (obj == null) return false;\n" ) ; sink . write ( indent + " if (getClass() != obj.getClass()) return false;\n" ) ; if ( ! constructor . args . isEmpty ( ) ) { if ( ! typeArguments . isEmpty ( ) ) { sink . write ( indent + " @SuppressWarnings(\"rawtypes\")" ) ; } sink . write ( indent + " " + constructor . name + " other = (" + constructor . name + ")obj;\n" ) ; for ( final Arg arg : constructor . args ) { arg . type . _switch ( new Type . SwitchBlock ( ) { @ Override public void _case ( Ref x ) { x . type . _switch ( new RefType . SwitchBlock ( ) { @ Override public void _case ( ClassType x ) { sink . write ( indent + " if (" + arg . name + " == null) {\n" ) ; sink . write ( indent + " if (other." + arg . name + " != null) return false;\n" ) ; sink . write ( indent + " } else if (!" + arg . name + ".equals(other." + arg . name + ")) return false;\n" ) ; } @ Override public void _case ( ArrayType x ) { sink . write ( indent + " if (!java.util.Arrays.equals(" + arg . name + ", other." + arg . name + ")) return false;\n" ) ; } } ) ; } @ Override public void _case ( Primitive x ) { sink . write ( indent + " if (" + arg . name + " != other." + arg . name + ") return false;\n" ) ; } } ) ; } } sink . write ( indent + " return true;\n" ) ; sink . write ( indent + "}" ) ; |
public class ServletStartedListener { /** * { @ inheritDoc } */
@ Override public void started ( Container moduleContainer ) { } } | try { WebAppConfig webAppConfig = moduleContainer . adapt ( WebAppConfig . class ) ; SecurityMetadata securityMetadataFromDD = getSecurityMetadata ( webAppConfig ) ; updateSecurityMetadata ( securityMetadataFromDD , webAppConfig ) ; setModuleSecurityMetaData ( moduleContainer , securityMetadataFromDD ) ; if ( com . ibm . ws . webcontainer . osgi . WebContainer . getServletContainerSpecLevel ( ) >= 31 ) { notifyDeployOfUncoveredMethods ( webAppConfig ) ; } if ( checkDynamicAnnotation ( webAppConfig ) ) { JaccService js = jaccService . getService ( ) ; if ( js != null ) { js . propagateWebConstraints ( webAppConfig . getApplicationName ( ) , webAppConfig . getModuleName ( ) , webAppConfig ) ; } } } catch ( UnableToAdaptException e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "There was a problem setting the security meta data." , e ) ; } } |
public class PactDslJsonArray { /** * Element that must be an integer
* @ param number example integer value to use for generated bodies */
public PactDslJsonArray integerType ( Long number ) { } } | body . put ( number ) ; matchers . addRule ( rootPath + appendArrayIndex ( 0 ) , new NumberTypeMatcher ( NumberTypeMatcher . NumberType . INTEGER ) ) ; return this ; |
public class LifecycleManager { /** * Check that the indexable classes declared by the user are really indexable . */
private void checkIndexableClasses ( SearchIntegrator searchFactory , Set < Class < ? > > indexedEntities ) { } } | for ( Class < ? > c : indexedEntities ) { if ( searchFactory . getIndexBinding ( new PojoIndexedTypeIdentifier ( c ) ) == null ) { throw log . classNotIndexable ( c . getName ( ) ) ; } } |
public class RtfDestinationFontTable { /** * Initialize the object .
* @ param importFonts true to import the fonts into the FontFactory , false do not load fonts
* @ since 2.0.8 */
private void init ( boolean importFonts ) { } } | fontMap = new HashMap ( ) ; if ( this . rtfParser != null ) { this . importHeader = this . rtfParser . getImportManager ( ) ; } this . setToDefaults ( ) ; if ( importFonts ) { importSystemFonts ( ) ; } |
public class DefaultLogSystem { /** * Log an error
* @ param e The exception causing the error */
public void error ( Throwable e ) { } } | out . println ( new Date ( ) + " ERROR:" + e . getMessage ( ) ) ; e . printStackTrace ( out ) ; |
public class WorkspacePersistentDataManager { /** * Returns workspace data size . See for details WorkspaceQuotaManagerImpl # getWorkspaceDataSize ( ) .
* @ throws RepositoryException
* if any exception is occurred */
public long getWorkspaceDataSize ( ) throws RepositoryException { } } | final WorkspaceStorageConnection con = dataContainer . openConnection ( ) ; try { return con . getWorkspaceDataSize ( ) ; } finally { con . close ( ) ; } |
public class CSV { /** * Close reader and writer objects . */
public void close ( ) { } } | if ( csvReader != null ) { try { csvReader . close ( ) ; } catch ( IOException e ) { // Ignore .
} } if ( csvWriter != null ) { try { csvWriter . close ( ) ; } catch ( IOException e ) { // Ignore .
} } |
public class AddonDependencyEntry { /** * Create a new { @ link AddonDependencyEntry } with the given attributes . */
public static AddonDependencyEntry create ( String name , String versionRange ) { } } | return create ( name , Versions . parseMultipleVersionRange ( versionRange ) , false , false ) ; |
public class S3SourceSerializer { /** * As long as there is at least one CloudTrail log object :
* < li > Add the CloudTrail log object key to the list . < / li >
* < li > Add < code > accountId < / code > extracted from log object key to < code > sqsMessage < / code > . < / li >
* < li > Add { @ link SourceType # CloudTrailLog } to the < code > sqsMessage < / code > . < / li >
* If there is no CloudTrail log object and it is a valid S3 message , CPL adds only { @ link SourceType # Other }
* to the < code > sqsMessage < / code > . */
private void addCloudTrailLogsAndMessageAttributes ( Message sqsMessage , JsonNode s3RecordsNode , List < CloudTrailLog > cloudTrailLogs ) { } } | SourceType sourceType = SourceType . Other ; for ( JsonNode s3Record : s3RecordsNode ) { String bucketName = s3Record . at ( S3_BUCKET_NAME ) . textValue ( ) ; String objectKey = s3Record . at ( S3_OBJECT_KEY ) . textValue ( ) ; String eventName = s3Record . get ( EVENT_NAME ) . textValue ( ) ; SourceType currSourceType = sourceIdentifier . identifyWithEventName ( objectKey , eventName ) ; if ( currSourceType == SourceType . CloudTrailLog ) { cloudTrailLogs . add ( new CloudTrailLog ( bucketName , objectKey ) ) ; sourceType = currSourceType ; LibraryUtils . setMessageAccountId ( sqsMessage , objectKey ) ; } } sqsMessage . addAttributesEntry ( SourceAttributeKeys . SOURCE_TYPE . getAttributeKey ( ) , sourceType . name ( ) ) ; |
public class Element { /** * Print a PresentationML representation of the element and its children to the provided PrintStream . */
void asPresentationML ( XmlPrintStream out ) { } } | out . openElement ( getMessageMLTag ( ) , getAttributes ( ) ) ; for ( Element child : getChildren ( ) ) { child . asPresentationML ( out ) ; } out . closeElement ( ) ; |
public class SheetResourcesImpl { /** * Creates an Update Request for the specified Row ( s ) within the Sheet .
* It mirrors to the following Smartsheet REST API method : POST / sheets / { sheetId } / updaterequests
* Exceptions :
* - IllegalArgumentException : if any argument is null
* - InvalidRequestException : if there is any problem with the REST API request
* - AuthorizationException : if there is any problem with the REST API authorization ( access token )
* - ServiceUnavailableException : if the REST API service is not available ( possibly due to rate limiting )
* - SmartsheetRestException : if there is any other REST API related error occurred during the operation
* - SmartsheetException : if there is any other error occurred during the operation
* @ param sheetId the sheet id
* @ param email the email
* @ return the update request object
* @ throws SmartsheetException the smartsheet exception */
public UpdateRequest createUpdateRequest ( long sheetId , MultiRowEmail email ) throws SmartsheetException { } } | return this . createResource ( "sheets/" + sheetId + "/updaterequests" , UpdateRequest . class , email ) ; |
public class ContainerDefinition { /** * A list of namespaced kernel parameters to set in the container . This parameter maps to < code > Sysctls < / code > in
* the < a href = " https : / / docs . docker . com / engine / api / v1.35 / # operation / ContainerCreate " > Create a container < / a > section
* of the < a href = " https : / / docs . docker . com / engine / api / v1.35 / " > Docker Remote API < / a > and the < code > - - sysctl < / code >
* option to < a href = " https : / / docs . docker . com / engine / reference / run / " > docker run < / a > .
* < note >
* It is not recommended that you specify network - related < code > systemControls < / code > parameters for multiple
* containers in a single task that also uses either the < code > awsvpc < / code > or < code > host < / code > network modes . For
* tasks that use the < code > awsvpc < / code > network mode , the container that is started last determines which
* < code > systemControls < / code > parameters take effect . For tasks that use the < code > host < / code > network mode , it
* changes the container instance ' s namespaced kernel parameters as well as the containers .
* < / note >
* @ return A list of namespaced kernel parameters to set in the container . This parameter maps to
* < code > Sysctls < / code > in the < a
* href = " https : / / docs . docker . com / engine / api / v1.35 / # operation / ContainerCreate " > Create a container < / a > section
* of the < a href = " https : / / docs . docker . com / engine / api / v1.35 / " > Docker Remote API < / a > and the
* < code > - - sysctl < / code > option to < a href = " https : / / docs . docker . com / engine / reference / run / " > docker
* run < / a > . < / p > < note >
* It is not recommended that you specify network - related < code > systemControls < / code > parameters for
* multiple containers in a single task that also uses either the < code > awsvpc < / code > or < code > host < / code >
* network modes . For tasks that use the < code > awsvpc < / code > network mode , the container that is started
* last determines which < code > systemControls < / code > parameters take effect . For tasks that use the
* < code > host < / code > network mode , it changes the container instance ' s namespaced kernel parameters as well
* as the containers . */
public java . util . List < SystemControl > getSystemControls ( ) { } } | if ( systemControls == null ) { systemControls = new com . amazonaws . internal . SdkInternalList < SystemControl > ( ) ; } return systemControls ; |
public class ShareActionProvider { /** * Set the activity chooser policy of the model backed by the current
* share history file if needed which is if there is a registered callback . */
private void setActivityChooserPolicyIfNeeded ( ) { } } | if ( mOnShareTargetSelectedListener == null ) { return ; } if ( mOnChooseActivityListener == null ) { mOnChooseActivityListener = new ShareAcitivityChooserModelPolicy ( ) ; } ActivityChooserModel dataModel = ActivityChooserModel . get ( mContext , mShareHistoryFileName ) ; dataModel . setOnChooseActivityListener ( mOnChooseActivityListener ) ; |
public class CmsSecurityManager { /** * Increments a counter and returns its old value . < p >
* @ param context the request context
* @ param name the name of the counter
* @ return the value of the counter before incrementing
* @ throws CmsException if something goes wrong */
public int incrementCounter ( CmsRequestContext context , String name ) throws CmsException { } } | CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; try { return m_driverManager . incrementCounter ( dbc , name ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_INCREMENT_COUNTER_1 , name ) , e ) ; return - 1 ; // will never be reached
} finally { dbc . clear ( ) ; } |
public class MPDUtility { /** * Given a duration and the time units for the duration extracted from an MPP
* file , this method creates a new Duration to represent the given
* duration . This instance has been adjusted to take into account the
* number of " hours per day " specified for the current project .
* @ param file parent file
* @ param duration duration length
* @ param timeUnit duration units
* @ return Duration instance */
public static Duration getAdjustedDuration ( ProjectFile file , int duration , TimeUnit timeUnit ) { } } | Duration result ; switch ( timeUnit ) { case MINUTES : case ELAPSED_MINUTES : { double totalMinutes = duration / 10d ; result = Duration . getInstance ( totalMinutes , timeUnit ) ; break ; } case HOURS : case ELAPSED_HOURS : { double totalHours = duration / 600d ; result = Duration . getInstance ( totalHours , timeUnit ) ; break ; } case DAYS : { double unitsPerDay = file . getProjectProperties ( ) . getMinutesPerDay ( ) . doubleValue ( ) * 10d ; double totalDays = 0 ; if ( unitsPerDay != 0 ) { totalDays = duration / unitsPerDay ; } result = Duration . getInstance ( totalDays , timeUnit ) ; break ; } case ELAPSED_DAYS : { double unitsPerDay = 24d * 600d ; double totalDays = duration / unitsPerDay ; result = Duration . getInstance ( totalDays , timeUnit ) ; break ; } case WEEKS : { double unitsPerWeek = file . getProjectProperties ( ) . getMinutesPerWeek ( ) . doubleValue ( ) * 10d ; double totalWeeks = 0 ; if ( unitsPerWeek != 0 ) { totalWeeks = duration / unitsPerWeek ; } result = Duration . getInstance ( totalWeeks , timeUnit ) ; break ; } case ELAPSED_WEEKS : { double unitsPerWeek = ( 60 * 24 * 7 * 10 ) ; double totalWeeks = duration / unitsPerWeek ; result = Duration . getInstance ( totalWeeks , timeUnit ) ; break ; } case ELAPSED_MONTHS : { double unitsPerMonth = ( 60 * 24 * 30 * 10 ) ; double totalMonths = duration / unitsPerMonth ; result = Duration . getInstance ( totalMonths , timeUnit ) ; break ; } case MONTHS : { double totalMonths = duration / 96000d ; result = Duration . getInstance ( totalMonths , timeUnit ) ; break ; } default : { result = Duration . getInstance ( duration , timeUnit ) ; break ; } } return ( result ) ; |
public class InjectionFragment2 { /** * Add runtime / ui binding element .
* @ param element the runtime / ui binding element . */
public void addBoth ( BindingElement element ) { } } | if ( element != null ) { this . rtBindingElements . add ( element ) ; this . uiBindingElements . add ( element ) ; } |
public class ScriptableOutputStream { /** * Adds a qualified name to the list of object to be excluded from
* serialization . Names excluded from serialization are looked up
* in the new scope and replaced upon deserialization .
* @ param name a fully qualified name ( of the form " a . b . c " , where
* " a " must be a property of the top - level object ) . The object
* need not exist , in which case the name is ignored .
* @ throws IllegalArgumentException if the object is not a
* { @ link Scriptable } . */
public void addOptionalExcludedName ( String name ) { } } | Object obj = lookupQualifiedName ( scope , name ) ; if ( obj != null && obj != UniqueTag . NOT_FOUND ) { if ( ! ( obj instanceof Scriptable ) ) { throw new IllegalArgumentException ( "Object for excluded name " + name + " is not a Scriptable, it is " + obj . getClass ( ) . getName ( ) ) ; } table . put ( obj , name ) ; } |
public class Rule { /** * The < code > Predicates < / code > object contains one < code > Predicate < / code > element for each < a > ByteMatchSet < / a > ,
* < a > IPSet < / a > , or < a > SqlInjectionMatchSet < / a > object that you want to include in a < code > Rule < / code > .
* @ param predicates
* The < code > Predicates < / code > object contains one < code > Predicate < / code > element for each
* < a > ByteMatchSet < / a > , < a > IPSet < / a > , or < a > SqlInjectionMatchSet < / a > object that you want to include in a
* < code > Rule < / code > . */
public void setPredicates ( java . util . Collection < Predicate > predicates ) { } } | if ( predicates == null ) { this . predicates = null ; return ; } this . predicates = new java . util . ArrayList < Predicate > ( predicates ) ; |
public class PutDeliveryChannelRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( PutDeliveryChannelRequest putDeliveryChannelRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( putDeliveryChannelRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( putDeliveryChannelRequest . getDeliveryChannel ( ) , DELIVERYCHANNEL_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class AbstractJavaMetadata { /** * Create a < code > FieldMetadata < / code > from a { @ link Type } instance .
* @ param type - The { @ link Type }
* @ return the specific type of < code > FieldMetadata < / code > */
protected FieldMetadata createParameterizedFieldMetadataFrom ( Type type ) { } } | FieldMetadata parameterizedTypeFieldMetadata = null ; if ( type . isSimpleType ( ) ) { SimpleType simpleType = ( SimpleType ) type ; parameterizedTypeFieldMetadata = FieldMetadata . parametrizedType ( JavaMetadataUtil . getName ( simpleType . getName ( ) ) ) ; } // TODO also process QualifiedType
return parameterizedTypeFieldMetadata ; |
public class Streams { /** * Simultaneously reduce a stream with multiple reducers
* < pre > { @ code
* Monoid < Integer > sum = Monoid . of ( 0 , ( a , b ) - > a + b ) ;
* Monoid < Integer > mult = Monoid . of ( 1 , ( a , b ) - > a * b ) ;
* val result = Streams . reduce ( Stream . of ( 1,2,3,4 ) , Arrays . asList ( sum , mult ) ) ;
* assertThat ( result , equalTo ( Arrays . asList ( 10,24 ) ) ) ;
* } < / pre >
* @ param stream Stream to reduce
* @ param reducers Reducers to reduce Stream
* @ return Reduced Stream values as List entries */
@ SuppressWarnings ( { } } | "rawtypes" , "unchecked" } ) public static < R > Seq < R > reduce ( final Stream < R > stream , final Iterable < ? extends Monoid < R > > reducers ) { return Seq . fromIterable ( new MultiReduceOperator < R > ( stream ) . reduce ( reducers ) ) ; |
public class PropsUtils { /** * Convert a Props object to a hierarchical Map
* @ param props props object
* @ return a hierarchical Map presented Props object */
public static Map < String , Object > toHierarchicalMap ( final Props props ) { } } | final Map < String , Object > propsMap = new HashMap < > ( ) ; propsMap . put ( "source" , props . getSource ( ) ) ; propsMap . put ( "props" , toStringMap ( props , true ) ) ; if ( props . getParent ( ) != null ) { propsMap . put ( "parent" , toHierarchicalMap ( props . getParent ( ) ) ) ; } return propsMap ; |
public class InjectorImpl { /** * Returns all bindings matching a type . */
@ Override public < T > Iterable < Binding < T > > bindings ( Class < T > type ) { } } | BindingSet < T > set = ( BindingSet ) _bindingSetMap . get ( type ) ; if ( set != null ) { return ( Iterable ) set ; } else { return Collections . EMPTY_LIST ; } |
public class PathWrapper { /** * Opens a ReadWritePair for reading and writing .
* < p > A chat channel , for example , would open its socket using this
* interface .
* @ param is pre - allocated ReadStream to be initialized
* @ param os pre - allocated WriteStream to be initialized */
public void openReadWrite ( ReadStreamOld is , WriteStreamOld os ) throws IOException { } } | getWrappedPath ( ) . openReadWrite ( is , os ) ; |
public class MPXReader { /** * Populates a calendar instance .
* @ param record MPX record
* @ param calendar calendar instance
* @ param isBaseCalendar true if this is a base calendar */
private void populateCalendar ( Record record , ProjectCalendar calendar , boolean isBaseCalendar ) { } } | if ( isBaseCalendar == true ) { calendar . setName ( record . getString ( 0 ) ) ; } else { calendar . setParent ( m_projectFile . getCalendarByName ( record . getString ( 0 ) ) ) ; } calendar . setWorkingDay ( Day . SUNDAY , DayType . getInstance ( record . getInteger ( 1 ) ) ) ; calendar . setWorkingDay ( Day . MONDAY , DayType . getInstance ( record . getInteger ( 2 ) ) ) ; calendar . setWorkingDay ( Day . TUESDAY , DayType . getInstance ( record . getInteger ( 3 ) ) ) ; calendar . setWorkingDay ( Day . WEDNESDAY , DayType . getInstance ( record . getInteger ( 4 ) ) ) ; calendar . setWorkingDay ( Day . THURSDAY , DayType . getInstance ( record . getInteger ( 5 ) ) ) ; calendar . setWorkingDay ( Day . FRIDAY , DayType . getInstance ( record . getInteger ( 6 ) ) ) ; calendar . setWorkingDay ( Day . SATURDAY , DayType . getInstance ( record . getInteger ( 7 ) ) ) ; m_eventManager . fireCalendarReadEvent ( calendar ) ; |
public class FastdfsService { /** * 上传图片并生成缩略图 、 水印图
* @ param image 原图
* @ param watermark 水印图
* @ param ext 后缀名
* @ param metaInfo 元信息
* @ return */
public String uploadImage ( byte [ ] image , byte [ ] watermark , String ext , Map < String , String > metaInfo ) { } } | return uploadImage ( image , watermark , ext , metaInfo , DEFAULT_OPACITY , DEFAULT_LOCATION , DEFAULT_MARGIN ) ; |
public class Blade { /** * Set the environment variable for global use here
* { @ link # env ( String , String ) }
* @ param key environment key
* @ param value environment value
* @ return blade */
@ Deprecated public Blade environment ( @ NonNull String key , @ NonNull Object value ) { } } | this . environment . set ( key , value ) ; return this ; |
public class NumberFormatterBase { /** * Format the number into the buffer using the given options . */
public void formatDecimal ( BigDecimal n , StringBuilder destination , DecimalFormatOptions options ) { } } | DigitBuffer dbuf = new DigitBuffer ( ) ; DecimalFormatStyle style = options . style ( ) ; boolean grouping = orDefault ( options . grouping ( ) , false ) ; boolean currency = false ; NumberFormatMode formatMode = orDefault ( options . formatMode ( ) , NumberFormatMode . DEFAULT ) ; switch ( style ) { case DECIMAL : { NumberFormatContext ctx = new NumberFormatContext ( options , NumberFormatMode . DEFAULT ) ; NumberPattern pattern = select ( n , decimalStandard ) ; ctx . set ( pattern ) ; BigDecimal q = ctx . adjust ( n ) ; DigitBuffer number = new DigitBuffer ( ) ; NumberFormattingUtils . format ( q , number , params , currency , grouping , ctx . minIntDigits ( ) , pattern . format . primaryGroupingSize ( ) , pattern . format . secondaryGroupingSize ( ) ) ; format ( pattern , number , dbuf , null , null ) ; dbuf . appendTo ( destination ) ; break ; } case LONG : case SHORT : { NumberFormatContext ctx = new NumberFormatContext ( options , NumberFormatMode . SIGNIFICANT ) ; int nDigits = integerDigits ( n ) ; int nDivisor = getDivisor_DECIMAL_LONG ( nDigits ) ; // Q1 is the number divided by the divisor ( if any ) .
BigDecimal q1 = n ; if ( nDivisor > 0 ) { q1 = n . movePointLeft ( nDivisor ) . stripTrailingZeros ( ) ; } int q1Digits = integerDigits ( q1 ) ; // Select an initial pattern using the OTHER plural category .
NumberPattern pattern ; if ( style == DecimalFormatStyle . LONG ) { pattern = select ( n , getPattern_DECIMAL_LONG ( nDigits , PluralCategory . OTHER ) ) ; } else { pattern = select ( n , getPattern_DECIMAL_SHORT ( nDigits , PluralCategory . OTHER ) ) ; } // Q2 is the number adjusted using the pattern and options .
ctx . setCompact ( pattern , q1Digits , nDivisor ) ; BigDecimal q2 = ctx . adjust ( q1 ) ; int q2Digits = integerDigits ( q2 ) ; // Number rounded up , we need to select a new divisor and pattern .
if ( q2Digits > q1Digits ) { // Bump the number of digits by 1 to select the next divisor / pattern .
nDigits ++ ; int divisor = getDivisor_DECIMAL_LONG ( nDigits ) ; if ( style == DecimalFormatStyle . LONG ) { pattern = select ( n , getPattern_DECIMAL_LONG ( nDigits , PluralCategory . OTHER ) ) ; } else { pattern = select ( n , getPattern_DECIMAL_SHORT ( nDigits , PluralCategory . OTHER ) ) ; } // If divisor changed , we need to divide and adjust again .
// Otherwise we just use Q2 as - is .
if ( divisor > nDivisor ) { q1 = n . movePointLeft ( divisor ) . stripTrailingZeros ( ) ; ctx . setCompact ( pattern , integerDigits ( q1 ) , divisor ) ; q2 = ctx . adjust ( q1 ) ; } } // Compute the plural category .
NumberOperands operands = new NumberOperands ( q2 . toPlainString ( ) ) ; PluralCategory category = PLURAL_RULES . evalCardinal ( bundleId . language ( ) , operands ) ; // Select the final pluralized pattern .
if ( style == DecimalFormatStyle . LONG ) { pattern = select ( n , getPattern_DECIMAL_LONG ( nDigits , category ) ) ; } else { pattern = select ( n , getPattern_DECIMAL_SHORT ( nDigits , category ) ) ; } // Format the number into the buffer .
DigitBuffer number = new DigitBuffer ( ) ; NumberFormattingUtils . format ( q2 , number , params , false , grouping , pattern . format . minimumIntegerDigits , decimalStandard [ 0 ] . format . primaryGroupingSize , decimalStandard [ 0 ] . format . secondaryGroupingSize ) ; // Format the entire pattern and append to destination .
format ( pattern , number , dbuf , null , null ) ; dbuf . appendTo ( destination ) ; break ; } case PERCENT : case PERCENT_SCALED : case PERMILLE : case PERMILLE_SCALED : { // In default modes , scale the number . Otherwise assume it is already scaled
// appropriately .
if ( style == PERCENT || style == PERMILLE ) { n = n . movePointRight ( style == PERCENT ? 2 : 3 ) ; } String symbol = ( style == PERCENT ) || ( style == PERCENT_SCALED ) ? params . percent : params . perMille ; NumberPattern pattern = select ( n , percentStandard ) ; DigitBuffer number = new DigitBuffer ( ) ; setup ( params , pattern , n , number , options , null , formatMode , grouping , - 1 , - 1 , - 1 ) ; format ( pattern , number , dbuf , null , symbol ) ; dbuf . appendTo ( destination ) ; break ; } } |
public class AbstractDestinationHandler { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . impl . interfaces . DestinationHandler # getInputHandler ( ) */
public InputHandler getInputHandler ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "getInputHandler" ) ; SibTr . exit ( tc , "getInputHandler" , inputHandler ) ; } return inputHandler ; |
public class Line { /** * Reads an XML comment . Sets < code > xmlEndLine < / code > .
* @ param firstLine
* The Line to start reading from .
* @ param start
* The starting position .
* @ return The new position or - 1 if it is no valid comment . */
private int readXMLComment ( final Line firstLine , final int start ) { } } | Line line = firstLine ; if ( start + 3 < line . value . length ( ) ) { if ( line . value . charAt ( 2 ) == '-' && line . value . charAt ( 3 ) == '-' ) { int pos = start + 4 ; while ( line != null ) { while ( pos < line . value . length ( ) && line . value . charAt ( pos ) != '-' ) { pos ++ ; } if ( pos == line . value . length ( ) ) { line = line . next ; pos = 0 ; } else { if ( pos + 2 < line . value . length ( ) ) { if ( line . value . charAt ( pos + 1 ) == '-' && line . value . charAt ( pos + 2 ) == '>' ) { this . xmlEndLine = line ; return pos + 3 ; } } pos ++ ; } } } } return - 1 ; |
public class BundleUtils { /** * Returns a optional { @ link android . os . Bundle } value . In other words , returns the value mapped by key if it exists and is a { @ link android . os . Bundle } .
* The bundle argument is allowed to be { @ code null } . If the bundle is null , this method returns null .
* @ param bundle a bundle . If the bundle is null , this method will return a fallback value .
* @ param key a key for the value .
* @ param fallback fallback value .
* @ return a { @ link android . os . Bundle } value if exists , null otherwise .
* @ see android . os . Bundle # getBundle ( String ) */
@ Nullable public static Bundle optBundle ( @ Nullable Bundle bundle , @ Nullable String key , @ Nullable Bundle fallback ) { } } | if ( bundle == null ) { return fallback ; } return bundle . getBundle ( key ) ; |
public class GetUrlTaskResult { /** * Parses properties from task result
* @ param taskResult - JSON formatted set of properties */
public static GetUrlTaskResult deserialize ( String taskResult ) { } } | JaxbJsonSerializer < GetUrlTaskResult > serializer = new JaxbJsonSerializer < > ( GetUrlTaskResult . class ) ; try { return serializer . deserialize ( taskResult ) ; } catch ( IOException e ) { throw new TaskDataException ( "Unable to create task result due to: " + e . getMessage ( ) ) ; } |
public class JobsImpl { /** * Enables the specified job , allowing new tasks to run .
* When you call this API , the Batch service sets a disabled job to the enabling state . After the this operation is completed , the job moves to the active state , and scheduling of new tasks under the job resumes . The Batch service does not allow a task to remain in the active state for more than 180 days . Therefore , if you enable a job containing active tasks which were added more than 180 days ago , those tasks will not run .
* @ param jobId The ID of the job to enable .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < Void > enableAsync ( String jobId , final ServiceCallback < Void > serviceCallback ) { } } | return ServiceFuture . fromHeaderResponse ( enableWithServiceResponseAsync ( jobId ) , serviceCallback ) ; |
public class SpecRewriter { /** * field . getAst ( ) . getName ( ) changes , field . getName ( ) remains the same */
private void changeSharedFieldInternalName ( Field field ) { } } | field . getAst ( ) . rename ( InternalIdentifiers . getSharedFieldName ( field . getName ( ) ) ) ; |
public class QuandlCodeRequest { /** * Request all columns for a given quandlCode .
* @ param quandlCode the Quandl code you ' re interested in , not null
* @ return an request instance , not null */
public static QuandlCodeRequest allColumns ( final String quandlCode ) { } } | ArgumentChecker . notNull ( quandlCode , "quandlCode" ) ; return new QuandlCodeRequest ( quandlCode , null ) ; |
public class AbstractEncoding { /** * onigenc _ ascii _ apply _ all _ case _ fold / used also by multibyte encodings */
@ Override public void applyAllCaseFold ( int flag , ApplyAllCaseFoldFunction fun , Object arg ) { } } | asciiApplyAllCaseFold ( flag , fun , arg ) ; |
public class VoltProjectBuilder { /** * compatible with old deprecated syntax for test ONLY */
public void addStmtProcedure ( String name , String sql , String partitionInfoString ) { } } | addProcedures ( new ProcedureInfo ( new String [ 0 ] , name , sql , ProcedurePartitionData . fromPartitionInfoString ( partitionInfoString ) ) ) ; |
public class ReleasableInputStream { /** * Used to truly release the underlying resources . */
private void doRelease ( ) { } } | try { in . close ( ) ; } catch ( Exception ex ) { if ( log . isDebugEnabled ( ) ) log . debug ( "FYI" , ex ) ; } if ( in instanceof Releasable ) { // This allows any underlying stream that has the close operation
// disabled to be truly released
Releasable r = ( Releasable ) in ; r . release ( ) ; } abortIfNeeded ( ) ; |
public class ExperimentsInner { /** * Creates an Experiment .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param workspaceName The name of the workspace . Workspace names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long .
* @ param experimentName The name of the experiment . Experiment names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < ExperimentInner > beginCreateAsync ( String resourceGroupName , String workspaceName , String experimentName , final ServiceCallback < ExperimentInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( beginCreateWithServiceResponseAsync ( resourceGroupName , workspaceName , experimentName ) , serviceCallback ) ; |
public class TreeScanner { /** * { @ inheritDoc } This implementation scans the children in left to right order .
* @ param node { @ inheritDoc }
* @ param p { @ inheritDoc }
* @ return the result of scanning */
@ Override public R visitTypeParameter ( TypeParameterTree node , P p ) { } } | R r = scan ( node . getAnnotations ( ) , p ) ; r = scanAndReduce ( node . getBounds ( ) , p , r ) ; return r ; |
public class FitActionInterpreter { /** * { @ inheritDoc } */
@ Override protected Example firstRowOf ( Example next ) { } } | if ( Fit . isAFitInterpreter ( sud , ExampleUtil . contentOf ( next . firstChild ( ) ) ) ) return next . at ( 0 , 0 ) ; return next . at ( 0 , 1 ) ; |
public class ReflectionUtils { /** * Determine whether the given method is a " readString " method .
* @ see Object # toString ( ) */
public static boolean isToStringMethod ( Method method ) { } } | return ( method != null && method . getName ( ) . equals ( "readString" ) && method . getParameterTypes ( ) . length == 0 ) ; |
public class DicLibrary { /** * 删除关键词 */
public static void delete ( String key , String word ) { } } | Forest dic = get ( key ) ; if ( dic != null ) { Library . removeWord ( dic , word ) ; } |
public class GPGFileDecryptor { /** * Generate a PGPEncryptedDataList from an inputstream
* @ param inputStream file inputstream that needs to be decrypted
* @ throws IOException */
private PGPEncryptedDataList getPGPEncryptedDataList ( InputStream inputStream ) throws IOException { } } | if ( Security . getProvider ( BouncyCastleProvider . PROVIDER_NAME ) == null ) { Security . addProvider ( new BouncyCastleProvider ( ) ) ; } inputStream = PGPUtil . getDecoderStream ( inputStream ) ; JcaPGPObjectFactory pgpF = new JcaPGPObjectFactory ( inputStream ) ; PGPEncryptedDataList enc ; Object pgpfObject = pgpF . nextObject ( ) ; if ( pgpfObject instanceof PGPEncryptedDataList ) { enc = ( PGPEncryptedDataList ) pgpfObject ; } else { enc = ( PGPEncryptedDataList ) pgpF . nextObject ( ) ; } return enc ; |
public class CommandArgs { /** * Add a string argument . The argument is represented as bulk string .
* @ param s the string .
* @ return the command args . */
public CommandArgs < K , V > add ( String s ) { } } | singularArguments . add ( StringArgument . of ( s ) ) ; return this ; |
public class CassandraCounter { protected static int [ ] toYYYYMM_DD ( long timestampMs ) { } } | Calendar cal = Calendar . getInstance ( ) ; cal . setTimeInMillis ( timestampMs ) ; int yyyy = cal . get ( Calendar . YEAR ) ; int mm = cal . get ( Calendar . MONTH ) + 1 ; int dd = cal . get ( Calendar . DAY_OF_MONTH ) ; return new int [ ] { yyyy * 100 + mm , dd } ; |
public class DefaultConfigurationList { /** * Builds a representation of default configuration and writes it out to the feature list file
* @ param mfp */
public void writeDefaultConfiguration ( ManifestFileProcessor mfp ) { } } | try { try { // Build the list of configurations
buildDefaultConfigurationList ( mfp ) ; // Write < defaultConfiguration >
startDefaultConfigurationSection ( ) ; // We now have a map of bundles to the features that enable them . Loop through it and write out the default configuration for each one .
for ( Entry < FeatureResource , Set < String > > entry : bundleNameToFeaturesMap . entrySet ( ) ) { FeatureResource bundle = entry . getKey ( ) ; BundleWrapper bw = new BundleWrapper ( mfp , bundle ) ; for ( DefaultElement element : bw . getDefaultElements ( ) ) { indenter . indent ( 1 ) ; writer . writeStartElement ( DEFAULT_INSTANCE ) ; writer . writeAttribute ( PROVIDING_FEATURES , getFeatureString ( entry . getValue ( ) ) ) ; if ( element . requiresExisting ( ) ) writer . writeAttribute ( XMLConfigParser . REQUIRE_EXISTING , "true" ) ; if ( element . addIfMissing ( ) ) writer . writeAttribute ( XMLConfigParser . REQUIRE_DOES_NOT_EXIST , "true" ) ; element . writeElement ( 2 ) ; indenter . indent ( 1 ) ; writer . writeEndElement ( ) ; } } // < / defaultConfiguration >
endDefaultConfigurationSection ( ) ; } catch ( XMLStreamException e ) { throw new IOException ( "Error generating feature list" , e ) ; } catch ( BundleException e ) { throw new IOException ( "Error generating feature list" , e ) ; } } catch ( IOException ex ) { options . setReturnCode ( ReturnCode . RUNTIME_EXCEPTION ) ; throw new RuntimeException ( ex ) ; } |
public class Caffeine { /** * Ensures that the argument expression is true . */
static void requireArgument ( boolean expression , String template , @ Nullable Object ... args ) { } } | if ( ! expression ) { throw new IllegalArgumentException ( String . format ( template , args ) ) ; } |
public class PathHelper { /** * Assemble a relative path for the given absolute paths
* @ param basePath
* @ param targetPath
* @ return relative node path */
public static String getRelativeNodePath ( String basePath , String targetPath ) { } } | // Both paths are equal
if ( basePath . equals ( targetPath ) ) { return "." ; } if ( basePath . equals ( PATH_SEP ) && ( targetPath . length ( ) > 1 ) ) { // Base path is root path
return targetPath . substring ( 1 ) ; } String [ ] baseAncestors = FilenameUtils . normalizeNoEndSeparator ( basePath ) . split ( PATH_SEP ) ; String [ ] targetAncestors = FilenameUtils . normalizeNoEndSeparator ( targetPath ) . split ( PATH_SEP ) ; int length = ( baseAncestors . length < targetAncestors . length ) ? baseAncestors . length : targetAncestors . length ; int lastCommonRoot = - 1 ; int i ; // Iterate over the shorter path
for ( i = 0 ; i < length ; i ++ ) { if ( baseAncestors [ i ] . equals ( targetAncestors [ i ] ) ) { lastCommonRoot = i ; } else { break ; } } // Last common root is the common base path
if ( lastCommonRoot != - 1 ) { StringBuilder newRelativePath = new StringBuilder ( ) ; // How often must we go back from base path to common root ?
for ( i = lastCommonRoot + 1 ; i < baseAncestors . length ; i ++ ) { if ( baseAncestors [ i ] . length ( ) > 0 ) { newRelativePath . append ( ".." + PATH_SEP ) ; } } // How often must we go forth from common root to get to tagret path ?
for ( i = lastCommonRoot + 1 ; i < targetAncestors . length ; i ++ ) { newRelativePath . append ( targetAncestors [ i ] ) . append ( PATH_SEP ) ; } // newRelativePath . append ( targetAncestors [ targetAncestors . length - 1 ] ) ;
String result = newRelativePath . toString ( ) ; if ( result . endsWith ( PATH_SEP ) ) { result = result . substring ( 0 , result . length ( ) - 1 ) ; } return result ; } return targetPath ; |
public class SurfaceArrayPropertyType { /** * Gets the value of the surface property .
* This accessor method returns a reference to the live list ,
* not a snapshot . Therefore any modification you make to the
* returned list will be present inside the JAXB object .
* This is why there is not a < CODE > set < / CODE > method for the surface property .
* For example , to add a new item , do as follows :
* < pre >
* get _ Surface ( ) . add ( newItem ) ;
* < / pre >
* Objects of the following type ( s ) are allowed in the list
* { @ link JAXBElement } { @ code < } { @ link TinType } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link TriangulatedSurfaceType } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link PolyhedralSurfaceType } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link SurfaceType } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link net . opengis . citygml . texturedsurface . _ 1 . TexturedSurfaceType } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link net . opengis . citygml . texturedsurface . _ 2 . TexturedSurfaceType } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link OrientableSurfaceType } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link CompositeSurfaceType } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link PolygonType } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link AbstractSurfaceType } { @ code > } */
public List < JAXBElement < ? extends AbstractSurfaceType > > get_Surface ( ) { } } | if ( _Surface == null ) { _Surface = new ArrayList < JAXBElement < ? extends AbstractSurfaceType > > ( ) ; } return this . _Surface ; |
public class JobExecutionResult { /** * Gets all accumulators produced by the job . The map contains the accumulators as
* mappings from the accumulator name to the accumulator value .
* @ return A map containing all accumulators produced by the job . */
public Map < String , Object > getAllAccumulatorResults ( ) { } } | return accumulatorResults . entrySet ( ) . stream ( ) . collect ( Collectors . toMap ( Map . Entry :: getKey , entry -> entry . getValue ( ) . getUnchecked ( ) ) ) ; |
public class CreateAliasRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CreateAliasRequest createAliasRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( createAliasRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createAliasRequest . getAliasName ( ) , ALIASNAME_BINDING ) ; protocolMarshaller . marshall ( createAliasRequest . getTargetKeyId ( ) , TARGETKEYID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Configuration { /** * Get the value of the { @ code name } configuration property as a { @ code long } . If the config property does
* does not exist or is not a valid { @ code long } , then an exception is thrown .
* @ param name the configuration property name
* @ throws NumberFormatException if the configured value is not a valid { @ code long }
* @ throws NullPointerException if the configuration property is not present in the loaded config
* @ return the configuration property value as a { @ code long } */
public long getLong ( String name ) { } } | String valueString = getTrimmed ( name ) ; Preconditions . checkNotNull ( valueString ) ; String hexString = getHexDigits ( valueString ) ; if ( hexString != null ) { return Long . parseLong ( hexString , 16 ) ; } return Long . parseLong ( valueString ) ; |
public class MonitoringFilter { /** * cette méthode est protected pour pouvoir être surchargée dans une classe définie par l ' application */
protected void log ( HttpServletRequest httpRequest , String requestName , long duration , boolean systemError , int responseSize ) { } } | if ( ! logEnabled ) { return ; } final String filterName = filterConfig . getFilterName ( ) ; LOG . logHttpRequest ( httpRequest , requestName , duration , systemError , responseSize , filterName ) ; |
public class AipImageSearch { /** * 相同图检索 — 删除接口
* * * 删除图库中的图片 , 支持批量删除 , 批量删除时请传cont _ sign参数 , 勿传image , 最多支持1000个cont _ sign * *
* @ param contSign - 图片签名
* @ param options - 可选参数对象 , key : value都为string类型
* options - options列表 :
* @ return JSONObject */
public JSONObject sameHqDeleteBySign ( String contSign , HashMap < String , String > options ) { } } | AipRequest request = new AipRequest ( ) ; preOperation ( request ) ; request . addBody ( "cont_sign" , contSign ) ; if ( options != null ) { request . addBody ( options ) ; } request . setUri ( ImageSearchConsts . SAME_HQ_DELETE ) ; postOperation ( request ) ; return requestServer ( request ) ; |
public class UnicodeEscaper { /** * Returns the escaped form of a given literal string .
* < p > If you are escaping input in arbitrary successive chunks , then it is not
* generally safe to use this method . If an input string ends with an
* unmatched high surrogate character , then this method will throw
* { @ link IllegalArgumentException } . You should either ensure your input is
* valid < a href = " http : / / en . wikipedia . org / wiki / UTF - 16 " > UTF - 16 < / a > before
* calling this method or use an escaped { @ link Appendable } ( as returned by
* { @ link # escape ( Appendable ) } ) which can cope with arbitrarily split input .
* < p > < b > Note : < / b > When implementing an escaper it is a good idea to override
* this method for efficiency by inlining the implementation of
* { @ link # nextEscapeIndex ( CharSequence , int , int ) } directly . Doing this for
* { @ link PercentEscaper } more than doubled the performance for unescaped
* strings ( as measured by { @ link CharEscapersBenchmark } ) .
* @ param string the literal string to be escaped
* @ return the escaped form of { @ code string }
* @ throws NullPointerException if { @ code string } is null
* @ throws IllegalArgumentException if invalid surrogate characters are
* encountered */
public String escape ( String string ) { } } | int end = string . length ( ) ; int index = nextEscapeIndex ( string , 0 , end ) ; return index == end ? string : escapeSlow ( string , index ) ; |
public class DragDropUtil { /** * This functions handles the default drag and drop move event
* It takes care to move all items one by one within the passed in positions
* @ param fastAdapter the adapter
* @ param oldPosition the start position of the move
* @ param newPosition the end position of the move */
public static void onMove ( ItemAdapter itemAdapter , int oldPosition , int newPosition ) { } } | // necessary , because the positions passed to this function may be jumping in case of that the recycler view is scrolled while holding an item outside of the recycler view
if ( oldPosition < newPosition ) { for ( int i = oldPosition + 1 ; i <= newPosition ; i ++ ) { itemAdapter . move ( i , i - 1 ) ; } } else { for ( int i = oldPosition - 1 ; i >= newPosition ; i -- ) { itemAdapter . move ( i , i + 1 ) ; } } |
public class TruncatedNormal { /** * Returns the mean of the normal distribution truncated to 0 for values of x < lowerBound */
public static double meanTruncLower ( double mu , double sigma , double lowerBound ) { } } | double alpha = ( lowerBound - mu ) / sigma ; double phiAlpha = densityNonTrunc ( alpha , 0 , 1.0 ) ; double cPhiAlpha = cumulativeNonTrunc ( alpha , 0 , 1.0 ) ; return mu + sigma * phiAlpha / ( 1.0 - cPhiAlpha ) ; |
public class HashIntSet { /** * Sets the sentinel value , which cannot itself be stored in the set because it is used
* internally to represent an unused location .
* @ exception IllegalArgumentException if the set currently contains the requested sentinel . */
public void setSentinel ( int sentinel ) { } } | if ( _sentinel == sentinel ) { return ; } if ( contains ( sentinel ) ) { throw new IllegalArgumentException ( "Set contains sentinel value " + sentinel ) ; } // replace every instance of the old sentinel with the new
for ( int ii = 0 ; ii < _buckets . length ; ii ++ ) { if ( _buckets [ ii ] == _sentinel ) { _buckets [ ii ] = sentinel ; } } _sentinel = sentinel ; |
public class OmsPitfiller { /** * Takes the elevation matrix and calculate a matrix with pits filled , using the flooding
* algorithm .
* @ throws Exception */
private void flood ( ) throws Exception { } } | /* define directions */
// Initialise the vector to a supposed dimension , if the number of
// unresolved pixel overload the vector there are a method which resized
// the vectors .
pitsStackSize = ( int ) ( nCols * nRows * 0.1 ) ; pstack = pitsStackSize ; dn = new int [ pitsStackSize ] ; currentPitRows = new int [ pitsStackSize ] ; currentPitCols = new int [ pitsStackSize ] ; ipool = new int [ pstack ] ; jpool = new int [ pstack ] ; firstCol = 0 ; firstRow = 0 ; lastCol = nCols ; lastRow = nRows ; setdf ( ) ; |
public class CueSheet { /** * Verifys the Cue Sheet .
* @ param checkCdDaSubset True for check CD subset
* @ throws Violation Thrown if invalid Cue Sheet */
void isLegal ( boolean checkCdDaSubset ) throws Violation { } } | if ( checkCdDaSubset ) { if ( leadIn < 2 * 44100 ) { throw new Violation ( "CD-DA cue sheet must have a lead-in length of at least 2 seconds" ) ; } if ( leadIn % 588 != 0 ) { throw new Violation ( "CD-DA cue sheet lead-in length must be evenly divisible by 588 samples" ) ; } } if ( numTracks == 0 ) { throw new Violation ( "cue sheet must have at least one track (the lead-out)" ) ; } if ( checkCdDaSubset && tracks [ numTracks - 1 ] . number != 170 ) { throw new Violation ( "CD-DA cue sheet must have a lead-out track number 170 (0xAA)" ) ; } for ( int i = 0 ; i < numTracks ; i ++ ) { if ( tracks [ i ] . number == 0 ) { throw new Violation ( "cue sheet may not have a track number 0" ) ; } if ( checkCdDaSubset ) { if ( ! ( ( tracks [ i ] . number >= 1 && tracks [ i ] . number <= 99 ) || tracks [ i ] . number == 170 ) ) { throw new Violation ( "CD-DA cue sheet track number must be 1-99 or 170" ) ; } } if ( checkCdDaSubset && tracks [ i ] . offset % 588 != 0 ) { throw new Violation ( "CD-DA cue sheet track offset must be evenly divisible by 588 samples" ) ; } if ( i < numTracks - 1 ) { if ( tracks [ i ] . numIndices == 0 ) { throw new Violation ( "cue sheet track must have at least one index point" ) ; } if ( tracks [ i ] . indices [ 0 ] . number > 1 ) { throw new Violation ( "cue sheet track's first index number must be 0 or 1" ) ; } } for ( int j = 0 ; j < tracks [ i ] . numIndices ; j ++ ) { if ( checkCdDaSubset && tracks [ i ] . indices [ j ] . offset % 588 != 0 ) { throw new Violation ( "CD-DA cue sheet track index offset must be evenly divisible by 588 samples" ) ; } if ( j > 0 ) { if ( tracks [ i ] . indices [ j ] . number != tracks [ i ] . indices [ j - 1 ] . number + 1 ) { throw new Violation ( "cue sheet track index numbers must increase by 1" ) ; } } } } |
public class FasterRouteDetector { /** * The second step of the new route is valid if
* it equals the current route upcoming step .
* @ param secondStep of the new route
* @ param routeProgress current route progress
* @ return true if valid , false if not */
private boolean validSecondStep ( LegStep secondStep , RouteProgress routeProgress ) { } } | return routeProgress . currentLegProgress ( ) . upComingStep ( ) != null && routeProgress . currentLegProgress ( ) . upComingStep ( ) . equals ( secondStep ) ; |
public class ConceptUtils { /** * determines disjointness of parent - child types , parent defines the bound on the child
* @ param parent { @ link SchemaConcept }
* @ param child { @ link SchemaConcept }
* @ return true if types do not belong to the same type hierarchy , also true if parent is null and false if parent non - null and child null */
public static boolean areDisjointTypes ( SchemaConcept parent , SchemaConcept child , boolean direct ) { } } | return parent != null && child == null || ! typesCompatible ( parent , child , direct ) && ! typesCompatible ( child , parent , direct ) ; |
public class VelocityEngine { /** * 创建引擎
* @ param config 模板配置
* @ return { @ link org . apache . velocity . app . VelocityEngine } */
private static org . apache . velocity . app . VelocityEngine createEngine ( TemplateConfig config ) { } } | if ( null == config ) { config = new TemplateConfig ( ) ; } final org . apache . velocity . app . VelocityEngine ve = new org . apache . velocity . app . VelocityEngine ( ) ; // 编码
final String charsetStr = config . getCharset ( ) . toString ( ) ; ve . setProperty ( Velocity . INPUT_ENCODING , charsetStr ) ; // ve . setProperty ( Velocity . OUTPUT _ ENCODING , charsetStr ) ;
ve . setProperty ( Velocity . FILE_RESOURCE_LOADER_CACHE , true ) ; // 使用缓存
// loader
switch ( config . getResourceMode ( ) ) { case CLASSPATH : ve . setProperty ( "file.resource.loader.class" , "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader" ) ; break ; case FILE : // path
final String path = config . getPath ( ) ; if ( null != path ) { ve . setProperty ( Velocity . FILE_RESOURCE_LOADER_PATH , path ) ; } break ; case WEB_ROOT : ve . setProperty ( Velocity . RESOURCE_LOADER , "webapp" ) ; ve . setProperty ( "webapp.resource.loader.class" , "org.apache.velocity.tools.view.servlet.WebappLoader" ) ; ve . setProperty ( "webapp.resource.loader.path" , StrUtil . nullToDefault ( config . getPath ( ) , StrUtil . SLASH ) ) ; break ; case STRING : ve . setProperty ( Velocity . RESOURCE_LOADER , "str" ) ; ve . setProperty ( "str.resource.loader.class" , SimpleStringResourceLoader . class . getName ( ) ) ; break ; default : break ; } ve . init ( ) ; return ve ; |
public class UIInput { /** * < p > Set a < code > MethodBinding < / code > pointing at a
* method that will be called during < em > Process Validations < / em >
* phase of the request processing lifecycle , to validate the current
* value of this component . < / p >
* < p > Any method referenced by such an expression must be public , with
* a return type of < code > void < / code > , and accept parameters of type
* { @ link FacesContext } , { @ link UIComponent } , and < code > Object < / code > . < / p >
* @ param validatorBinding The new < code > MethodBinding < / code > instance
* @ deprecated Use { @ link # addValidator } instead , obtaining the
* argument { @ link Validator } by creating an instance of { @ link
* javax . faces . validator . MethodExpressionValidator } . */
public void setValidator ( MethodBinding validatorBinding ) { } } | Validator [ ] curValidators = getValidators ( ) ; // see if we need to null - out , or replace an existing validator
if ( null != curValidators ) { for ( int i = 0 ; i < curValidators . length ; i ++ ) { // if we want to remove the validatorBinding
if ( null == validatorBinding ) { // We are guaranteed to have at most one instance of
// MethodBindingValidator in the curValidators
// list .
if ( MethodBindingValidator . class == curValidators [ i ] . getClass ( ) ) { removeValidator ( curValidators [ i ] ) ; return ; } } // if we want to replace the validatorBinding
else // noinspection ObjectEquality
if ( validatorBinding == curValidators [ i ] ) { removeValidator ( curValidators [ i ] ) ; break ; } } } addValidator ( new MethodBindingValidator ( validatorBinding ) ) ; |
public class AbstractParam { /** * Gets the { @ code String } with the given configuration key .
* The default value is returned if the key doesn ' t exist or it ' s not a { @ code String } .
* @ param key the configuration key .
* @ param defaultValue the default value , if the key doesn ' t exist or it ' s not a { @ code String } .
* @ return the value of the configuration , or default value .
* @ since 2.7.0 */
protected String getString ( String key , String defaultValue ) { } } | try { return getConfig ( ) . getString ( key , defaultValue ) ; } catch ( ConversionException e ) { logConversionException ( key , e ) ; } return defaultValue ; |
public class BaseDestinationHandler { /** * < p > This method updates the destinationLocalizationDefinition associated with the
* destinationHandler ( if the destination is localised on this ME )
* and performs any necessary modifications to the
* message store and other components to reflect the new state of the
* destinationHandler . < / p >
* @ param destinationLocalizationDefinition
* < p > Updates the DestinationLocalizationDefinition associated with the
* destination . < / p > */
protected void updateLocalizationDefinition ( BaseLocalizationDefinition destinationLocalizationDefinition , TransactionCommon transaction ) throws SIResourceException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "updateLocalizationDefinition" , new Object [ ] { destinationLocalizationDefinition , transaction } ) ; // TODO close all consumer sessions ?
if ( isPubSub ( ) ) _pubSubRealization . updateLocalisationDefinition ( ( LocalizationDefinition ) destinationLocalizationDefinition ) ; else { if ( destinationLocalizationDefinition instanceof LocalizationDefinition ) { // this is an update of the existing PM localization
_ptoPRealization . updateLocalisationDefinition ( destinationLocalizationDefinition , transaction ) ; } else { SIResourceException e = new SIResourceException ( new UnsupportedOperationException ( ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "updateLocalizationDefinition" , e ) ; throw e ; } } // update TRM if necessary
getLocalisationManager ( ) . updateTrmAdvertisements ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "updateLocalizationDefinition" ) ; |
public class GenIncrementalDomCodeVisitor { /** * Generates ` get [ X ] ` for a given parameter value . */
private MethodDeclaration generateGetParamMethodForSoyElementClass ( TemplateParam param , boolean isAbstract ) { } } | JsType jsType = JsType . forIncrementalDomState ( param . type ( ) ) ; String accessorSuffix = Ascii . toUpperCase ( param . name ( ) . substring ( 0 , 1 ) ) + param . name ( ) . substring ( 1 ) ; if ( isAbstract ) { return MethodDeclaration . create ( "get" + accessorSuffix , JsDoc . builder ( ) . addAnnotation ( "abstract" ) . addParameterizedAnnotation ( "return" , jsType . typeExpr ( ) ) . build ( ) , Statement . of ( ImmutableList . of ( ) ) ) ; } Expression value = id ( "this" ) . dotAccess ( "data" ) . dotAccess ( param . name ( ) ) ; if ( param . hasDefault ( ) ) { value = templateTranslationContext . codeGenerator ( ) . declarationBuilder ( ) . setRhs ( value ) . build ( ) . ref ( ) ; value = value . withInitialStatement ( genParamDefault ( param , value , alias , getJsTypeForParamTypeCheck ( param . type ( ) ) , /* declareStatic = */
false ) ) ; } MethodDeclaration getParamMethod = MethodDeclaration . create ( "get" + accessorSuffix , JsDoc . builder ( ) . addAnnotation ( "override" ) . addAnnotation ( "public" ) . build ( ) , Statement . returnValue ( value ) ) ; return getParamMethod ; |
public class CPOptionCategoryUtil { /** * Returns the first cp option category in the ordered set where companyId = & # 63 ; .
* @ param companyId the company ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching cp option category , or < code > null < / code > if a matching cp option category could not be found */
public static CPOptionCategory fetchByCompanyId_First ( long companyId , OrderByComparator < CPOptionCategory > orderByComparator ) { } } | return getPersistence ( ) . fetchByCompanyId_First ( companyId , orderByComparator ) ; |
public class AbstractGitFlowMojo { /** * Executes git checkout .
* @ param branchName
* Branch name to checkout .
* @ throws MojoFailureException
* @ throws CommandLineException */
protected void gitCheckout ( final String branchName ) throws MojoFailureException , CommandLineException { } } | getLog ( ) . info ( "Checking out '" + branchName + "' branch." ) ; executeGitCommand ( "checkout" , branchName ) ; |
public class GrammarConverter { /** * This method starts and controls the process for final conversion of all
* tokens from the token and helper skeletons .
* @ param helpers
* @ param tokens
* @ throws GrammarException
* @ throws TreeException */
private void convertTokenDefinitions ( Map < String , ParseTreeNode > helpers , Map < String , ParseTreeNode > tokens ) throws GrammarException , TreeException { } } | tokenDefinitions = new TokenDefinitionSet ( ) ; for ( ParseTreeNode tokenDefinitionAST : parserTree . getChild ( "Tokens" ) . getChild ( "TokenDefinitions" ) . getChildren ( "TokenDefinition" ) ) { TokenDefinition convertedTokenDefinition = getTokenDefinition ( tokenDefinitionAST , helpers , tokens ) ; tokenDefinitions . addDefinition ( convertedTokenDefinition ) ; } |
public class PathBuilder { /** * Create a new Comparable typed path
* @ param < A >
* @ param path existing path
* @ return property path */
@ SuppressWarnings ( "unchecked" ) public < A extends Comparable < ? > > ComparablePath < A > get ( ComparablePath < A > path ) { } } | ComparablePath < A > newPath = getComparable ( toString ( path ) , ( Class < A > ) path . getType ( ) ) ; return addMetadataOf ( newPath , path ) ; |
public class DescribeSnapshotsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DescribeSnapshotsRequest describeSnapshotsRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( describeSnapshotsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeSnapshotsRequest . getDirectoryId ( ) , DIRECTORYID_BINDING ) ; protocolMarshaller . marshall ( describeSnapshotsRequest . getSnapshotIds ( ) , SNAPSHOTIDS_BINDING ) ; protocolMarshaller . marshall ( describeSnapshotsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( describeSnapshotsRequest . getLimit ( ) , LIMIT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class WindowManager { /** * The callback invoked by the trigger policy . */
@ Override public boolean onTrigger ( ) { } } | List < Event < T > > windowEvents = null ; List < T > expired = null ; /* * scan the entire window to handle out of order events in
* the case of time based windows . */
windowEvents = scanEvents ( true ) ; expired = new ArrayList < > ( expiredEvents ) ; expiredEvents . clear ( ) ; List < T > events = new ArrayList < > ( ) ; List < T > newEvents = new ArrayList < > ( ) ; for ( Event < T > event : windowEvents ) { events . add ( event . get ( ) ) ; if ( ! prevWindowEvents . contains ( event ) ) { newEvents . add ( event . get ( ) ) ; } } prevWindowEvents . clear ( ) ; if ( ! events . isEmpty ( ) ) { prevWindowEvents . addAll ( windowEvents ) ; LOG . fine ( String . format ( "invoking windowLifecycleListener onActivation, [%d] events in " + "window." , events . size ( ) ) ) ; windowLifecycleListener . onActivation ( events , newEvents , expired , evictionPolicy . getContext ( ) . getReferenceTime ( ) ) ; } else { LOG . fine ( "No events in the window, skipping onActivation" ) ; } triggerPolicy . reset ( ) ; return ! events . isEmpty ( ) ; |
public class ValueTransformer { /** * / * ( non - Javadoc )
* @ see com . oath . cyclops . types . Zippable # zip ( java . util . function . BiFunction , org . reactivestreams . Publisher ) */
public < T2 , R > ValueTransformer < W , R > zip ( BiFunction < ? super T , ? super T2 , ? extends R > f , Publisher < ? extends T2 > publisher ) { } } | return unitAnyM ( this . transformerStream ( ) . map ( v -> v . zip ( f , publisher ) ) ) ; |
public class Engine { /** * Apply the groups of strategy rules to the dataset passed in .
* @ param dataArr The list of data set
* @ return The list of new generated data set */
public ArrayList < HashMap < String , Object > > applyStg ( ArrayList < HashMap < String , Object > > dataArr ) { } } | ArrayList < HashMap < String , Object > > results = new ArrayList ( ) ; // Check if there is generator command group left
if ( hasMoreGenRules ( ) ) { if ( genRules != null && ! genRules . isEmpty ( ) ) { // Apply non - generator rules to each entry of data
for ( HashMap < String , Object > data : dataArr ) { ArrayList < HashMap < String , Object > > arr = new ArrayList ( ) ; for ( int i = 0 ; i < genRules . size ( ) - 1 ; i ++ ) { HashMap < String , String > rule = genRules . get ( i ) ; applyRule ( data , rule ) ; } } // Apply generator rules to whole data set
HashMap < String , String > gemRule = genRules . get ( genRules . size ( ) - 1 ) ; if ( ! gemRule . isEmpty ( ) ) { generators . add ( gemRule ) ; results = runGenerators ( dataArr , cur != genGroups . size ( ) ) ; generators . clear ( ) ; } else { results = dataArr ; } } // Try yo apply next group of genertators
results = applyStg ( results ) ; } else { // Finish recursion
results = dataArr ; // Remove reference
boolean wthRefFlg = ! keysToExtractFinal . contains ( "weather" ) ; boolean soilRefFlg = ! keysToExtractFinal . contains ( "soil" ) ; for ( HashMap result : results ) { if ( wthRefFlg ) { result . remove ( "weather" ) ; } if ( soilRefFlg ) { result . remove ( "soil" ) ; } } } return results ; |
public class ClassUseWriter { /** * Generate the class use list . */
protected void generateClassUseFile ( ) throws IOException { } } | Content body = getClassUseHeader ( ) ; HtmlTree div = new HtmlTree ( HtmlTag . DIV ) ; div . addStyle ( HtmlStyle . classUseContainer ) ; if ( pkgSet . size ( ) > 0 ) { addClassUse ( div ) ; } else { div . addContent ( getResource ( "doclet.ClassUse_No.usage.of.0" , classdoc . qualifiedName ( ) ) ) ; } body . addContent ( div ) ; addNavLinks ( false , body ) ; addBottom ( body ) ; printHtmlDocument ( null , true , body ) ; |
public class SQLiteViewStore { /** * Are key1 and key2 grouped together at this groupLevel ? */
private static boolean groupTogether ( Object key1 , Object key2 , int groupLevel ) { } } | if ( groupLevel == 0 || ! ( key1 instanceof List ) || ! ( key2 instanceof List ) ) { return key1 . equals ( key2 ) ; } @ SuppressWarnings ( "unchecked" ) List < Object > key1List = ( List < Object > ) key1 ; @ SuppressWarnings ( "unchecked" ) List < Object > key2List = ( List < Object > ) key2 ; // if either key list is smaller than groupLevel and the key lists are different
// sizes , they cannot be equal .
if ( ( key1List . size ( ) < groupLevel || key2List . size ( ) < groupLevel ) && key1List . size ( ) != key2List . size ( ) ) { return false ; } int end = Math . min ( groupLevel , Math . min ( key1List . size ( ) , key2List . size ( ) ) ) ; for ( int i = 0 ; i < end ; ++ i ) { if ( key1List . get ( i ) != null && ! key1List . get ( i ) . equals ( key2List . get ( i ) ) ) return false ; else if ( key1List . get ( i ) == null && key2List . get ( i ) != null ) return false ; } return true ; |
public class SitelinkFeedItem { /** * Gets the sitelinkFinalUrls value for this SitelinkFeedItem .
* @ return sitelinkFinalUrls * A list of possible final URLs after all cross domain redirects . */
public com . google . api . ads . adwords . axis . v201809 . cm . UrlList getSitelinkFinalUrls ( ) { } } | return sitelinkFinalUrls ; |
public class ServiceEndpointPolicyDefinitionsInner { /** * Creates or updates a service endpoint policy definition in the specified service endpoint policy .
* @ param resourceGroupName The name of the resource group .
* @ param serviceEndpointPolicyName The name of the service endpoint policy .
* @ param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition name .
* @ param serviceEndpointPolicyDefinitions Parameters supplied to the create or update service endpoint policy operation .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < ServiceEndpointPolicyDefinitionInner > createOrUpdateAsync ( String resourceGroupName , String serviceEndpointPolicyName , String serviceEndpointPolicyDefinitionName , ServiceEndpointPolicyDefinitionInner serviceEndpointPolicyDefinitions , final ServiceCallback < ServiceEndpointPolicyDefinitionInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , serviceEndpointPolicyName , serviceEndpointPolicyDefinitionName , serviceEndpointPolicyDefinitions ) , serviceCallback ) ; |
public class NlsAccess { /** * This method sets ( overrides ) the default { @ link NlsTemplateResolver } . < b > WARNING : < / b > < br >
* This is only a back - door for simple applications or test situations . Please try to avoid using this feature and
* solve this issue with IoC strategies ( using non - final static fields like here is evil ) . < br >
* < b > ATTENTION : < / b > < br >
* No synchronization is performed setting the instance . This assumes that an assignment is an atomic operation in the
* JVM you are using . Additionally this method should only be invoked in the initialization phase of your application .
* @ param templateResolver is the { @ link NlsTemplateResolver } to use by default .
* @ since 2.0.0 */
public static void setTemplateResolver ( NlsTemplateResolver templateResolver ) { } } | if ( NlsAccess . templateResolver == null ) { NlsAccess . templateResolver = templateResolver ; } else if ( NlsAccess . templateResolver != templateResolver ) { LOG . warn ( "NlsTemplateResolver is already set to {} and will not be changed to {}." , NlsAccess . templateResolver , templateResolver ) ; } |
public class VertxProtonEndpoint { /** * Getters & setters */
public Vertx getVertx ( ) { } } | if ( vertx != null ) { return vertx ; } if ( getComponent ( ) . getVertx ( ) != null ) { vertx = getComponent ( ) . getVertx ( ) ; } else { vertx = Vertx . vertx ( ) ; } return vertx ; |
public class Metadata { /** * Create a new { @ code Metadata } object with the given parameters .
* @ param name the name of the GPX file
* @ param description a description of the contents of the GPX file
* @ param author the person or organization who created the GPX file
* @ param copyright copyright and license information governing use of the
* file
* @ param links URLs associated with the location described in the file
* @ param time the creation date of the file
* @ param keywords keywords associated with the file . Search engines or
* databases can use this information to classify the data .
* @ param bounds minimum and maximum coordinates which describe the extent
* of the coordinates in the file
* @ return a new { @ code Metadata } object with the given parameters
* @ throws NullPointerException if the given { @ code links } sequence is
* { @ code null } */
public static Metadata of ( final String name , final String description , final Person author , final Copyright copyright , final List < Link > links , final ZonedDateTime time , final String keywords , final Bounds bounds ) { } } | return new Metadata ( name , description , author == null || author . isEmpty ( ) ? null : author , copyright , links , time , keywords , bounds ) ; |
public class TypeConverter { /** * Gets TypeCode for specific type .
* @ param type the Class type for the data type .
* @ return the TypeCode that corresponds to the passed object ' s type . */
public static TypeCode toTypeCode ( Class < ? > type ) { } } | if ( type == null ) return TypeCode . Unknown ; else if ( type . isArray ( ) ) return TypeCode . Array ; else if ( type . isEnum ( ) ) return TypeCode . Enum ; else if ( type . isPrimitive ( ) ) { if ( _booleanType . isAssignableFrom ( type ) ) return TypeCode . Boolean ; if ( _doubleType . isAssignableFrom ( type ) ) return TypeCode . Double ; if ( _floatType . isAssignableFrom ( type ) ) return TypeCode . Float ; if ( _longType . isAssignableFrom ( type ) ) return TypeCode . Long ; if ( _integerType . isAssignableFrom ( type ) ) return TypeCode . Integer ; } else { if ( _booleanType . isAssignableFrom ( type ) ) return TypeCode . Boolean ; if ( _doubleType . isAssignableFrom ( type ) ) return TypeCode . Double ; if ( _floatType . isAssignableFrom ( type ) ) return TypeCode . Float ; if ( _longType . isAssignableFrom ( type ) ) return TypeCode . Long ; if ( _integerType . isAssignableFrom ( type ) ) return TypeCode . Integer ; if ( _stringType . isAssignableFrom ( type ) ) return TypeCode . String ; if ( _dateTimeType . isAssignableFrom ( type ) ) return TypeCode . DateTime ; if ( _durationType . isAssignableFrom ( type ) ) return TypeCode . Duration ; if ( _mapType . isAssignableFrom ( type ) ) return TypeCode . Map ; if ( _listType . isAssignableFrom ( type ) ) return TypeCode . Array ; if ( _enumType . isAssignableFrom ( type ) ) return TypeCode . Enum ; } return TypeCode . Object ; |
public class ResourceReaderImpl { /** * / * ( non - Javadoc )
* @ see net . crowmagnumb . util . ResourceReader # getInteger ( java . lang . String , java . lang . Integer ) */
@ Override public Integer getInteger ( final String key , final Integer defaultValue ) { } } | return formatInteger ( key , getFormattedPropValue ( key ) , defaultValue ) ; |
public class FullscreenVideoView { /** * Releases and ends the current Object */
protected void release ( ) { } } | Log . d ( TAG , "release" ) ; releaseObjects ( ) ; if ( this . mediaPlayer != null ) { this . mediaPlayer . setOnBufferingUpdateListener ( null ) ; this . mediaPlayer . setOnPreparedListener ( null ) ; this . mediaPlayer . setOnErrorListener ( null ) ; this . mediaPlayer . setOnSeekCompleteListener ( null ) ; this . mediaPlayer . setOnCompletionListener ( null ) ; this . mediaPlayer . setOnInfoListener ( null ) ; this . mediaPlayer . setOnVideoSizeChangedListener ( null ) ; this . mediaPlayer . release ( ) ; this . mediaPlayer = null ; } this . currentState = State . END ; |
public class DublinCoreSchema { /** * Adds a single publisher .
* @ param publisher */
public void addPublisher ( String publisher ) { } } | XmpArray array = new XmpArray ( XmpArray . ORDERED ) ; array . add ( publisher ) ; setProperty ( PUBLISHER , array ) ; |
public class SgMethod { /** * Returns the " type " signature of the method .
* @ return Method name and argument types ( like
* " methodXY ( String , int , boolean ) " ) . */
public final String getTypeSignature ( ) { } } | final StringBuffer sb = new StringBuffer ( ) ; sb . append ( getName ( ) ) ; sb . append ( "(" ) ; for ( int i = 0 ; i < getArguments ( ) . size ( ) ; i ++ ) { if ( i > 0 ) { sb . append ( ", " ) ; } final SgArgument arg = getArguments ( ) . get ( i ) ; sb . append ( arg . getType ( ) . getSimpleName ( ) ) ; } sb . append ( ")" ) ; return sb . toString ( ) ; |
public class GetInventoryRequest { /** * The list of inventory item types to return .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setResultAttributes ( java . util . Collection ) } or { @ link # withResultAttributes ( java . util . Collection ) } if you
* want to override the existing values .
* @ param resultAttributes
* The list of inventory item types to return .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetInventoryRequest withResultAttributes ( ResultAttribute ... resultAttributes ) { } } | if ( this . resultAttributes == null ) { setResultAttributes ( new com . amazonaws . internal . SdkInternalList < ResultAttribute > ( resultAttributes . length ) ) ; } for ( ResultAttribute ele : resultAttributes ) { this . resultAttributes . add ( ele ) ; } return this ; |
public class BitsUtil { /** * Test for the bitstring to be all - zero .
* @ param v Bitstring
* @ return true when all zero */
public static boolean isZero ( long [ ] v ) { } } | for ( int i = 0 ; i < v . length ; i ++ ) { if ( v [ i ] != 0 ) { return false ; } } return true ; |
public class LinkHandler { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . store . AbstractItem # addPersistentLinkData ( ) */
@ SuppressWarnings ( "unchecked" ) public void addPersistentLinkData ( HashMap hm ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "addPersistentLinkData" , hm ) ; // For a linkHandler , we store
// * The uuid of the link
// * The name of the link
// * The busName at the other end of the link
// The uuid of the destination
hm . put ( "uuid" , _uuid . toByteArray ( ) ) ; // The name of the destination
hm . put ( "name" , _name ) ; // The name of the bus at the other end of the link
hm . put ( "busName" , _busName ) ; // Is the destination localised on the home ME
hm . put ( "hasLocal" , new Boolean ( hasLocal ( ) ) ) ; hm . put ( "type" , _type ) ; hm . put ( "inboundUserid" , _inboundUserid ) ; hm . put ( "outboundUserid" , _outboundUserid ) ; hm . put ( "TBD" , new Boolean ( isToBeDeleted ( ) ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "addPersistentLinkData" ) ; |
public class AsyncExecutionHandler { /** * < p > The given { @ link HttpResponse } with a successful status code is processed using the response
* processor chain ( { @ link Processors # RESPONSE } and if an { @ link AsyncHandler } is defined , the result
* of the processor chain is submitted to the < i > onSuccess < / i > callback . If response processing
* resulted in an error , execution defers to { @ link # onError ( InvocationContext , Exception ) } instead
* and the success callback of the { @ link AsyncHandler } is skipped . < / p >
* < p > See { @ link ExecutionHandler # onSuccess ( InvocationContext , HttpResponse ) } < / p >
* @ param context
* the { @ link InvocationContext } with information on the proxy invocation
* < br > < br >
* @ param response
* the resulting { @ link HttpResponse } with a successful status code
* < br > < br >
* @ since 1.3.0 */
@ Override public void onSuccess ( InvocationContext context , HttpResponse response ) { } } | Object reponseEntity = null ; try { reponseEntity = Processors . RESPONSE . run ( context , response ) ; // process , regardless of an AsyncHandler definition
} catch ( Exception e ) { onError ( context , InvocationException . newInstance ( context , response , e ) ) ; return ; } AsyncHandler < Object > asyncHandler = getAsyncHandler ( context ) ; if ( asyncHandler != null ) { try { asyncHandler . onSuccess ( response , reponseEntity ) ; } catch ( Exception e ) { LOGGER . error ( "Callback \"onSuccess\" aborted with an exception." , e ) ; } } |
public class SQLException { /** * Retrieve the SQLState code for the error that generated this exception .
* @ return Five character SQLState code . */
public String getSQLState ( ) { } } | String state = null ; try { state = new String ( m_sqlState , "UTF-8" ) ; } catch ( UnsupportedEncodingException e ) { throw new RuntimeException ( e ) ; } return state ; |
public class OperationsImpl { /** * Everything in the first list must be in the second list too
* But not necessarily the reverse .
* @ param list
* @ param list2
* @ return true if first list is in the second */
@ InternalFunction ( ) public Boolean match ( final List < ProxyField > list , final List < ProxyField > list2 ) { } } | if ( list == null ) { return Boolean . TRUE ; } for ( ProxyField proxyField : list ) { boolean found = false ; for ( ProxyField proxyField2 : list2 ) { if ( proxyField . getValue ( ) . equals ( proxyField2 . getValue ( ) ) ) { found = true ; break ; } } if ( ! found ) { return Boolean . FALSE ; } } return Boolean . TRUE ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.