signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ObjFunc { /** * static */ public static boolean isInc ( List < ObjFunc > ofs ) { } }
if ( ofs . isEmpty ( ) ) { throw new IllegalArgumentException ( "No Objective function(s) defined. " ) ; } boolean inc = ofs . get ( 0 ) . getOF ( ) . positiveDirection ( ) ; for ( ObjFunc of : ofs ) { if ( of . getOF ( ) . positiveDirection ( ) != inc ) { throw new IllegalArgumentException ( "Objective function(s) optimization direction mismatch!" ) ; } } return inc ;
public class CoordinatorDynamicConfigsResource { /** * default value is used for backwards compatibility */ @ POST @ Consumes ( MediaType . APPLICATION_JSON ) public Response setDynamicConfigs ( final CoordinatorDynamicConfig . Builder dynamicConfigBuilder , @ HeaderParam ( AuditManager . X_DRUID_AUTHOR ) @ DefaultValue ( "" ) final String author , @ HeaderParam ( AuditManager . X_DRUID_COMMENT ) @ DefaultValue ( "" ) final String comment , @ Context HttpServletRequest req ) { } }
try { CoordinatorDynamicConfig current = CoordinatorDynamicConfig . current ( manager ) ; final SetResult setResult = manager . set ( CoordinatorDynamicConfig . CONFIG_KEY , dynamicConfigBuilder . build ( current ) , new AuditInfo ( author , comment , req . getRemoteAddr ( ) ) ) ; if ( setResult . isOk ( ) ) { return Response . ok ( ) . build ( ) ; } else { return Response . status ( Response . Status . BAD_REQUEST ) . entity ( ServletResourceUtils . sanitizeException ( setResult . getException ( ) ) ) . build ( ) ; } } catch ( IllegalArgumentException e ) { return Response . status ( Response . Status . BAD_REQUEST ) . entity ( ServletResourceUtils . sanitizeException ( e ) ) . build ( ) ; }
public class Session { /** * Provides an implementation for { @ link Activity # onActivityResult * onActivityResult } that updates the Session based on information returned * during the authorization flow . The Activity that calls open or * requestNewPermissions should forward the resulting onActivityResult call here to * update the Session state based on the contents of the resultCode and * data . * @ param currentActivity The Activity that is forwarding the onActivityResult call . * @ param requestCode The requestCode parameter from the forwarded call . When this * onActivityResult occurs as part of Facebook authorization * flow , this value is the activityCode passed to open or * authorize . * @ param resultCode An int containing the resultCode parameter from the forwarded * call . * @ param data The Intent passed as the data parameter from the forwarded * call . * @ return A boolean indicating whether the requestCode matched a pending * authorization request for this Session . */ public final boolean onActivityResult ( Activity currentActivity , int requestCode , int resultCode , Intent data ) { } }
Validate . notNull ( currentActivity , "currentActivity" ) ; initializeStaticContext ( currentActivity ) ; synchronized ( lock ) { if ( pendingAuthorizationRequest == null || ( requestCode != pendingAuthorizationRequest . getRequestCode ( ) ) ) { return false ; } } Exception exception = null ; AuthorizationClient . Result . Code code = AuthorizationClient . Result . Code . ERROR ; if ( data != null ) { AuthorizationClient . Result result = ( AuthorizationClient . Result ) data . getSerializableExtra ( LoginActivity . RESULT_KEY ) ; if ( result != null ) { // This came from LoginActivity . handleAuthorizationResult ( resultCode , result ) ; return true ; } else if ( authorizationClient != null ) { // Delegate to the auth client . authorizationClient . onActivityResult ( requestCode , resultCode , data ) ; return true ; } } else if ( resultCode == Activity . RESULT_CANCELED ) { exception = new FacebookOperationCanceledException ( "User canceled operation." ) ; code = AuthorizationClient . Result . Code . CANCEL ; } if ( exception == null ) { exception = new FacebookException ( "Unexpected call to Session.onActivityResult" ) ; } logAuthorizationComplete ( code , null , exception ) ; finishAuthOrReauth ( null , exception ) ; return true ;
public class AbstractTemporalCellConverter { /** * アノテーションで指定されたExcelの書式を設定する 。 * @ param settingExcelPattern アノテーションで指定されたExcelの書式 。 空の場合もある 。 */ public void setSettingExcelPattern ( String settingExcelPattern ) { } }
if ( Utils . isEmpty ( settingExcelPattern ) ) { this . settingExcelPattern = Optional . empty ( ) ; } else { this . settingExcelPattern = Optional . of ( settingExcelPattern ) ; }
public class Engine { /** * Run the generators on the dataset passed in . This will generate a number * of additional datasets based on the original dataset . * @ param data A dataset to run the generators on * @ return A { @ code HashMap } of just the exported keys . */ public ArrayList < HashMap < String , Object > > runGenerators ( HashMap < String , Object > data ) { } }
return runGenerators ( data , false ) ;
public class URLTemplatesFactory { /** * Adds a given URLTemplatesFactory instance as an attribute on the ServletRequest . * @ param servletRequest the current ServletRequest . * @ param templatesFactory the URLTemplatesFactory instance to add as an attribute of the request */ public static void initServletRequest ( ServletRequest servletRequest , URLTemplatesFactory templatesFactory ) { } }
assert servletRequest != null : "The ServletRequest cannot be null." ; if ( servletRequest == null ) { throw new IllegalArgumentException ( "The ServletRequest cannot be null." ) ; } servletRequest . setAttribute ( URL_TEMPLATE_FACTORY_ATTR , templatesFactory ) ;
public class JobOperatorImpl { /** * { @ inheritDoc } */ @ Override public List < Long > getRunningExecutions ( String jobName ) throws NoSuchJobException , JobSecurityException { } }
batchRoleCheck ( ) ; List < Long > authorizedRunningExecutions = new ArrayList < Long > ( ) ; // get the jobexecution ids associated with this job name List < Long > allRunningExecutions = getPersistenceManagerService ( ) . getJobExecutionsRunning ( jobName ) ; for ( long id : allRunningExecutions ) { try { logger . finer ( "Examining executionId: " + id ) ; if ( authService == null || authService . isAuthorizedInstanceRead ( getPersistenceManagerService ( ) . getJobInstanceIdFromExecutionId ( id ) ) ) { JobExecution jobEx = getPersistenceManagerService ( ) . getJobExecution ( id ) ; authorizedRunningExecutions . add ( jobEx . getExecutionId ( ) ) ; } else { logger . finer ( "Don't have authorization for executionId: " + id ) ; } } catch ( NoSuchJobExecutionException e ) { String errorMsg = "Just found execution with id = " + id + " in table, but now seeing it as gone" ; throw new IllegalStateException ( errorMsg , e ) ; } } if ( authorizedRunningExecutions . size ( ) == 0 ) { validateJobName ( jobName ) ; } return authorizedRunningExecutions ;
public class OneTouch { /** * Sends the OneTouch ' s approval request to the Authy servers and returns the OneTouchResponse that comes back . * @ param approvalRequestParams The bean wrapping the user ' s Authy approval request built using the ApprovalRequest . Builder * @ return The bean wrapping the response from Authy ' s service . */ public OneTouchResponse sendApprovalRequest ( ApprovalRequestParams approvalRequestParams ) throws AuthyException { } }
// Integer userId , String message , HashMap < String , Object > options , Integer secondsToExpire ) throws OneTouchException { JSONObject params = new JSONObject ( ) ; params . put ( "message" , approvalRequestParams . getMessage ( ) ) ; if ( approvalRequestParams . getSecondsToExpire ( ) != null ) { params . put ( "seconds_to_expire" , approvalRequestParams . getSecondsToExpire ( ) ) ; } if ( approvalRequestParams . getDetails ( ) . size ( ) > 0 ) { params . put ( "details" , mapToJSONObject ( approvalRequestParams . getDetails ( ) ) ) ; } if ( approvalRequestParams . getHidden ( ) . size ( ) > 0 ) { params . put ( "hidden_details" , mapToJSONObject ( approvalRequestParams . getHidden ( ) ) ) ; } if ( ! approvalRequestParams . getLogos ( ) . isEmpty ( ) ) { JSONArray jSONArray = new JSONArray ( ) ; for ( Logo logo : approvalRequestParams . getLogos ( ) ) { logo . addToMap ( jSONArray ) ; } params . put ( "logos" , jSONArray ) ; } final Response response = this . post ( APPROVAL_REQUEST_PRE + approvalRequestParams . getAuthyId ( ) + APPROVAL_REQUEST_POS , new JSONBody ( params ) ) ; OneTouchResponse oneTouchResponse = new OneTouchResponse ( response . getStatus ( ) , response . getBody ( ) ) ; if ( ! oneTouchResponse . isOk ( ) ) { oneTouchResponse . setError ( errorFromJson ( response . getBody ( ) ) ) ; } return oneTouchResponse ;
public class TagUtils { /** * " $ { . . . } " EL 명을 Object 값으로 변환한다 . * @ param context * @ param elName * @ return */ public static Object getElValue ( JspContext context , String elName ) { } }
VariableResolver variableResolver = context . getVariableResolver ( ) ; Object var = null ; try { elName = TagUtils . getElName ( elName ) ; var = variableResolver . resolveVariable ( elName ) ; logger . debug ( "ExpressionLanguage variable {} is [{}]" , elName , var ) ; } catch ( ELException e ) { logger . error ( "ExpressionLanguage Error" , e ) ; } return var ;
public class dbdbprofile { /** * Use this API to fetch dbdbprofile resource of given name . */ public static dbdbprofile get ( nitro_service service , String name ) throws Exception { } }
dbdbprofile obj = new dbdbprofile ( ) ; obj . set_name ( name ) ; dbdbprofile response = ( dbdbprofile ) obj . get_resource ( service ) ; return response ;
public class UserPreferences { /** * Fix key . * @ param key the key * @ return the string */ protected static String fixKey ( final String key ) { } }
if ( key . startsWith ( getKeyPrefix ( ) ) ) { return key ; } return getKeyPrefix ( ) + "-" + key ;
public class XianDataSource { /** * parse the databse url and get he database name * @ return the database name */ public String getDatabase ( ) { } }
int indexSlash = url . indexOf ( "/" , url . indexOf ( "://" ) + 3 ) ; int questionMarkIndex = url . indexOf ( "?" ) ; if ( questionMarkIndex != - 1 ) { return url . substring ( indexSlash + 1 , questionMarkIndex ) ; } else { return url . substring ( indexSlash + 1 ) ; }
public class ClearDefaultAuthorizerRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ClearDefaultAuthorizerRequest clearDefaultAuthorizerRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( clearDefaultAuthorizerRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class GoogleHadoopFileSystemBase { /** * { @ inheritDoc } */ @ Override public void removeXAttr ( Path path , String name ) throws IOException { } }
logger . atFine ( ) . log ( "GHFS.removeXAttr: %s, %s" , path , name ) ; checkNotNull ( path , "path should not be null" ) ; checkNotNull ( name , "name should not be null" ) ; FileInfo fileInfo = getGcsFs ( ) . getFileInfo ( getGcsPath ( path ) ) ; Map < String , byte [ ] > xAttrToRemove = new HashMap < > ( ) ; xAttrToRemove . put ( getXAttrKey ( name ) , null ) ; UpdatableItemInfo updateInfo = new UpdatableItemInfo ( fileInfo . getItemInfo ( ) . getResourceId ( ) , xAttrToRemove ) ; getGcsFs ( ) . getGcs ( ) . updateItems ( ImmutableList . of ( updateInfo ) ) ; logger . atFine ( ) . log ( "GHFS.removeXAttr:=> " ) ;
public class HttpRequestBuilder { /** * Send the request and log / update metrics for the results . */ protected HttpResponse sendImpl ( ) throws IOException { } }
HttpURLConnection con = ( HttpURLConnection ) uri . toURL ( ) . openConnection ( ) ; con . setConnectTimeout ( connectTimeout ) ; con . setReadTimeout ( readTimeout ) ; con . setRequestMethod ( method ) ; for ( Map . Entry < String , String > h : reqHeaders . entrySet ( ) ) { entry . addRequestHeader ( h . getKey ( ) , h . getValue ( ) ) ; con . setRequestProperty ( h . getKey ( ) , h . getValue ( ) ) ; } configureHTTPS ( con ) ; try { con . setDoInput ( true ) ; // HttpURLConnection will change method to POST if there is a body associated // with a GET request . Only try to write entity if it is not empty . entry . markStart ( ) ; if ( entity . length > 0 ) { con . setDoOutput ( true ) ; try ( OutputStream out = con . getOutputStream ( ) ) { out . write ( entity ) ; } } int status = con . getResponseCode ( ) ; entry . markEnd ( ) . withHttpStatus ( status ) ; // A null key is used to return the status line , remove it before sending to // the log entry or creating the response object Map < String , List < String > > headers = new LinkedHashMap < > ( con . getHeaderFields ( ) ) ; headers . remove ( null ) ; for ( Map . Entry < String , List < String > > h : headers . entrySet ( ) ) { for ( String v : h . getValue ( ) ) { entry . addResponseHeader ( h . getKey ( ) , v ) ; } } try ( InputStream in = ( status >= 400 ) ? con . getErrorStream ( ) : con . getInputStream ( ) ) { byte [ ] data = readAll ( in ) ; return new HttpResponse ( status , headers , data ) ; } } catch ( IOException e ) { entry . markEnd ( ) . withException ( e ) ; throw e ; } finally { entry . log ( ) ; }
public class ClientBase { /** * Method to handle * @ param node */ public void persist ( Node node ) { } }
Object entity = node . getData ( ) ; Object id = node . getEntityId ( ) ; EntityMetadata metadata = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , node . getDataClass ( ) ) ; isUpdate = node . isUpdate ( ) ; List < RelationHolder > relationHolders = getRelationHolders ( node ) ; onPersist ( metadata , entity , id , relationHolders ) ; id = PropertyAccessorHelper . getId ( entity , metadata ) ; node . setEntityId ( id ) ; indexNode ( node , metadata ) ;
public class RestorableHivePartitionDatasetFinder { /** * Will return a Singleton list of HivePartitionDataset to be restored . */ public List < HivePartitionDataset > findDatasets ( ) throws IOException { } }
Preconditions . checkArgument ( this . state . contains ( ComplianceConfigurationKeys . RESTORE_DATASET ) , "Missing required property " + ComplianceConfigurationKeys . RESTORE_DATASET ) ; HivePartitionDataset hivePartitionDataset = HivePartitionFinder . findDataset ( this . state . getProp ( ComplianceConfigurationKeys . RESTORE_DATASET ) , this . state ) ; Preconditions . checkNotNull ( hivePartitionDataset , "No dataset to restore" ) ; return Collections . singletonList ( hivePartitionDataset ) ;
public class ProgressEvent { /** * Private - - - - - */ private void readObject ( ObjectInputStream ois ) throws ClassNotFoundException , IOException { } }
ObjectInputStream . GetField fields = ois . readFields ( ) ; String name = serialPersistentFields [ STATUS_IDX ] . getName ( ) ; this . status = ( DeploymentStatus ) fields . get ( name , null ) ; name = serialPersistentFields [ MODULE_ID_IDX ] . getName ( ) ; this . moduleID = ( TargetModuleID ) fields . get ( name , null ) ;
public class Job { /** * Returns the configured build discarder for this job , via { @ link BuildDiscarderProperty } , or null if none . */ public synchronized BuildDiscarder getBuildDiscarder ( ) { } }
BuildDiscarderProperty prop = _getProperty ( BuildDiscarderProperty . class ) ; return prop != null ? prop . getStrategy ( ) : /* settings compatibility */ logRotator ;
public class TokenStreamBuilder { /** * make Liskov spin */ public void push ( T token ) { } }
if ( token == lastPushed ) return ; lastPushed = token ; if ( token . getChannel ( ) == TokenChannel . Default ) { main . add ( token ) ; } all . add ( token ) ;
public class HttpServerUpgradeHandler { /** * Splits a comma - separated header value . The returned set is case - insensitive and contains each * part with whitespace removed . */ private static List < CharSequence > splitHeader ( CharSequence header ) { } }
final StringBuilder builder = new StringBuilder ( header . length ( ) ) ; final List < CharSequence > protocols = new ArrayList < CharSequence > ( 4 ) ; for ( int i = 0 ; i < header . length ( ) ; ++ i ) { char c = header . charAt ( i ) ; if ( Character . isWhitespace ( c ) ) { // Don ' t include any whitespace . continue ; } if ( c == ',' ) { // Add the string and reset the builder for the next protocol . protocols . add ( builder . toString ( ) ) ; builder . setLength ( 0 ) ; } else { builder . append ( c ) ; } } // Add the last protocol if ( builder . length ( ) > 0 ) { protocols . add ( builder . toString ( ) ) ; } return protocols ;
public class GroupElement { /** * Look up $ 16 ^ i r _ i B $ in the precomputed table . * No secret array indices , no secret branching . Constant time . * Must have previously precomputed . * Method is package private only so that tests run . * @ param pos $ = i / 2 $ for $ i $ in $ \ { 0 , 2 , 4 , . . . , 62 \ } $ * @ param b $ = r _ i $ * @ return the GroupElement */ org . mariadb . jdbc . internal . com . send . authentication . ed25519 . math . GroupElement select ( final int pos , final int b ) { } }
// Is r _ i negative ? final int bnegative = Utils . negative ( b ) ; // | r _ i | final int babs = b - ( ( ( - bnegative ) & b ) << 1 ) ; // 16 ^ i | r _ i | B final org . mariadb . jdbc . internal . com . send . authentication . ed25519 . math . GroupElement t = this . curve . getZero ( Representation . PRECOMP ) . cmov ( this . precmp [ pos ] [ 0 ] , Utils . equal ( babs , 1 ) ) . cmov ( this . precmp [ pos ] [ 1 ] , Utils . equal ( babs , 2 ) ) . cmov ( this . precmp [ pos ] [ 2 ] , Utils . equal ( babs , 3 ) ) . cmov ( this . precmp [ pos ] [ 3 ] , Utils . equal ( babs , 4 ) ) . cmov ( this . precmp [ pos ] [ 4 ] , Utils . equal ( babs , 5 ) ) . cmov ( this . precmp [ pos ] [ 5 ] , Utils . equal ( babs , 6 ) ) . cmov ( this . precmp [ pos ] [ 6 ] , Utils . equal ( babs , 7 ) ) . cmov ( this . precmp [ pos ] [ 7 ] , Utils . equal ( babs , 8 ) ) ; // -16 ^ i | r _ i | B final org . mariadb . jdbc . internal . com . send . authentication . ed25519 . math . GroupElement tminus = precomp ( curve , t . Y , t . X , t . Z . negate ( ) ) ; // 16 ^ i r _ i B return t . cmov ( tminus , bnegative ) ;
public class UriEscape { /** * Perform am URI path segment < strong > escape < / strong > operation * on a < tt > char [ ] < / tt > input . * The following are the only allowed chars in an URI path segment ( will not be escaped ) : * < ul > * < li > < tt > A - Z a - z 0-9 < / tt > < / li > * < li > < tt > - . _ ~ < / tt > < / li > * < li > < tt > ! $ & amp ; ' ( ) * + , ; = < / tt > < / li > * < li > < tt > : @ < / tt > < / li > * < / ul > * All other chars will be escaped by converting them to the sequence of bytes that * represents them in the specified < em > encoding < / em > and then representing each byte * in < tt > % HH < / tt > syntax , being < tt > HH < / tt > the hexadecimal representation of the byte . * This method is < strong > thread - safe < / strong > . * @ param text the < tt > char [ ] < / tt > to be escaped . * @ param offset the position in < tt > text < / tt > at which the escape operation should start . * @ param len the number of characters in < tt > text < / tt > that should be escaped . * @ param writer the < tt > java . io . Writer < / tt > to which the escaped result will be written . Nothing will * be written at all to this writer if input is < tt > null < / tt > . * @ param encoding the encoding to be used for escaping . * @ throws IOException if an input / output exception occurs */ public static void escapeUriPathSegment ( final char [ ] text , final int offset , final int len , final Writer writer , final String encoding ) throws IOException { } }
if ( writer == null ) { throw new IllegalArgumentException ( "Argument 'writer' cannot be null" ) ; } if ( encoding == null ) { throw new IllegalArgumentException ( "Argument 'encoding' cannot be null" ) ; } final int textLen = ( text == null ? 0 : text . length ) ; if ( offset < 0 || offset > textLen ) { throw new IllegalArgumentException ( "Invalid (offset, len). offset=" + offset + ", len=" + len + ", text.length=" + textLen ) ; } if ( len < 0 || ( offset + len ) > textLen ) { throw new IllegalArgumentException ( "Invalid (offset, len). offset=" + offset + ", len=" + len + ", text.length=" + textLen ) ; } UriEscapeUtil . escape ( text , offset , len , writer , UriEscapeUtil . UriEscapeType . PATH_SEGMENT , encoding ) ;
public class ApiOvhDomain { /** * List of glue record * REST : GET / domain / { serviceName } / glueRecord * @ param host [ required ] Filter the value of host property ( like ) * @ param serviceName [ required ] The internal name of your domain */ public ArrayList < String > serviceName_glueRecord_GET ( String serviceName , String host ) throws IOException { } }
String qPath = "/domain/{serviceName}/glueRecord" ; StringBuilder sb = path ( qPath , serviceName ) ; query ( sb , "host" , host ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t2 ) ;
public class MediaHandlerImpl { /** * If a set of media formats is given it is filtered to contain only download media formats . * If no is given a new set of allowed media formats is created by getting from all media formats those marked as " download " . * If the result is an empty set of media formats ( but downloads are requests ) resolution is not successful . * @ param mediaArgs Media args * @ return true if resolving was successful */ private boolean resolveDownloadMediaFormats ( MediaArgs mediaArgs ) { } }
if ( ! mediaArgs . isDownload ( ) ) { // not filtering for downloads return true ; } List < MediaFormat > candidates = new ArrayList < > ( ) ; if ( mediaArgs . getMediaFormats ( ) != null ) { candidates . addAll ( ImmutableList . copyOf ( mediaArgs . getMediaFormats ( ) ) ) ; } else { candidates . addAll ( mediaFormatHandler . getMediaFormats ( ) ) ; } MediaFormat [ ] result = candidates . stream ( ) . filter ( MediaFormat :: isDownload ) . toArray ( size -> new MediaFormat [ size ] ) ; if ( result . length > 0 ) { mediaArgs . mediaFormats ( result ) ; return true ; } else { return false ; }
public class CmsEditMappingDialog { /** * Returns the String value of the mapping type . < p > * @ return String value of the mapping type */ public String getType ( ) { } }
if ( ( m_mapping != null ) && ( m_mapping . getType ( ) != null ) ) { return m_mapping . getType ( ) . toString ( ) ; } return "" ;
public class HtmlWriter { /** * Generates HTML Output for a { @ link TableElement } . */ private static String tableElementToHtml ( TableElement td ) { } }
StringBuilder result = new StringBuilder ( ) ; result . append ( "Row: " + td . getRow ( ) + " Col: " + td . getCol ( ) + "\n" ) ; if ( td . nrOfSections ( ) == 1 && td . getSection ( 0 ) . getTitleElement ( ) == null ) { result . append ( sectionCCLToHtml ( td . getSection ( 0 ) ) ) ; } else { for ( int i = 0 ; i < td . nrOfSections ( ) ; i ++ ) { result . append ( sectionToHtml ( td . getSection ( i ) ) ) ; } } return result . toString ( ) ;
public class ListGlobalTablesResult { /** * List of global table names . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setGlobalTables ( java . util . Collection ) } or { @ link # withGlobalTables ( java . util . Collection ) } if you want to * override the existing values . * @ param globalTables * List of global table names . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListGlobalTablesResult withGlobalTables ( GlobalTable ... globalTables ) { } }
if ( this . globalTables == null ) { setGlobalTables ( new java . util . ArrayList < GlobalTable > ( globalTables . length ) ) ; } for ( GlobalTable ele : globalTables ) { this . globalTables . add ( ele ) ; } return this ;
public class DBInstance { /** * The Active Directory Domain membership records associated with the DB instance . * @ param domainMemberships * The Active Directory Domain membership records associated with the DB instance . */ public void setDomainMemberships ( java . util . Collection < DomainMembership > domainMemberships ) { } }
if ( domainMemberships == null ) { this . domainMemberships = null ; return ; } this . domainMemberships = new com . amazonaws . internal . SdkInternalList < DomainMembership > ( domainMemberships ) ;
public class TileDaoUtils { /** * Get the zoom level for the provided width and height in the default units * @ param widths * sorted widths * @ param heights * sorted heights * @ param tileMatrices * tile matrices * @ param length * in default units * @ return tile matrix zoom level */ public static Long getZoomLevel ( double [ ] widths , double [ ] heights , List < TileMatrix > tileMatrices , double length ) { } }
return getZoomLevel ( widths , heights , tileMatrices , length , true ) ;
public class Bitstream { /** * Read bits from buffer into the lower bits of an unsigned int . * The LSB contains the latest read bit of the stream . * ( 1 < = number _ of _ bits < = 16) */ public int get_bits ( int number_of_bits ) { } }
int returnvalue = 0 ; int sum = bitindex + number_of_bits ; // E . B // There is a problem here , wordpointer could be - 1 ? ! if ( wordpointer < 0 ) wordpointer = 0 ; // E . B : End . if ( sum <= 32 ) { // all bits contained in * wordpointer returnvalue = ( framebuffer [ wordpointer ] >>> ( 32 - sum ) ) & bitmask [ number_of_bits ] ; // returnvalue = ( wordpointer [ 0 ] > > ( 32 - sum ) ) & bitmask [ number _ of _ bits ] ; if ( ( bitindex += number_of_bits ) == 32 ) { bitindex = 0 ; wordpointer ++ ; // added by me ! } return returnvalue ; } // E . B : Check that ? // ( ( short [ ] ) & returnvalue ) [ 0 ] = ( ( short [ ] ) wordpointer + 1 ) [ 0 ] ; // wordpointer + + ; / / Added by me ! // ( ( short [ ] ) & returnvalue + 1 ) [ 0 ] = ( ( short [ ] ) wordpointer ) [ 0 ] ; int Right = ( framebuffer [ wordpointer ] & 0x0000FFFF ) ; wordpointer ++ ; int Left = ( framebuffer [ wordpointer ] & 0xFFFF0000 ) ; returnvalue = ( ( Right << 16 ) & 0xFFFF0000 ) | ( ( Left >>> 16 ) & 0x0000FFFF ) ; returnvalue >>>= 48 - sum ; // returnvalue > > = 16 - ( number _ of _ bits - ( 32 - bitindex ) ) returnvalue &= bitmask [ number_of_bits ] ; bitindex = sum - 32 ; return returnvalue ;
public class ElementPlugin { /** * Register a component with the container . The container will control the visibility of the * component according to when it is active / inactive . * @ param component BaseComponent to register . */ public void registerComponent ( BaseUIComponent component ) { } }
if ( registeredComponents == null ) { registeredComponents = new ArrayList < > ( ) ; } registeredComponents . add ( component ) ; component . setAttribute ( Constants . ATTR_CONTAINER , this ) ; component . setAttribute ( Constants . ATTR_VISIBLE , component . isVisible ( ) ) ; component . setVisible ( isVisible ( ) ) ;
public class StaticResourceRequest { /** * Returns a matcher that includes the specified { @ link StaticResourceLocation * Locations } . For example : < pre class = " code " > * PathRequest . toStaticResources ( ) . at ( locations ) * < / pre > * @ param locations the locations to include * @ return the configured { @ link RequestMatcher } */ public StaticResourceRequestMatcher at ( Set < StaticResourceLocation > locations ) { } }
Assert . notNull ( locations , "Locations must not be null" ) ; return new StaticResourceRequestMatcher ( new LinkedHashSet < > ( locations ) ) ;
public class ColorHelper { /** * Alpha Blend * Source from : https : / / gist . github . com / JordanDelcros / 518396da1c13f75ee057 * @ param a * @ param b * @ return */ public static int mix ( int base , int added ) { } }
float bAlpha = ColorHelper . getAlpha ( base ) / 255f ; float aAlpha = ColorHelper . getAlpha ( added ) / 255f ; float alpha = 1 - ( 1 - bAlpha ) * ( 1 - aAlpha ) ; // alpha int bR = ColorHelper . getRed ( base ) ; int bG = ColorHelper . getGreen ( base ) ; int bB = ColorHelper . getBlue ( base ) ; int aR = ColorHelper . getRed ( added ) ; int aG = ColorHelper . getGreen ( added ) ; int aB = ColorHelper . getBlue ( added ) ; int r = Math . round ( ( aR * aAlpha / alpha ) + ( bR * bAlpha * ( 1 - aAlpha ) / alpha ) ) ; // red int g = Math . round ( ( aG * aAlpha / alpha ) + ( bG * bAlpha * ( 1 - aAlpha ) / alpha ) ) ; // green int b = Math . round ( ( aB * aAlpha / alpha ) + ( bB * bAlpha * ( 1 - aAlpha ) / alpha ) ) ; // blue return getARGB ( r , g , b , ( int ) clamp ( alpha * MAX ) ) ;
public class NativeCounterCell { /** * We have to special case digest creation for counter column because * we don ' t want to include the information about which shard of the * context is a delta or not , since this information differs from node to * node . */ @ Override public void updateDigest ( MessageDigest digest ) { } }
updateWithName ( digest ) ; // We don ' t take the deltas into account in a digest contextManager . updateDigest ( digest , value ( ) ) ; FBUtilities . updateWithLong ( digest , timestamp ( ) ) ; FBUtilities . updateWithByte ( digest , serializationFlags ( ) ) ; FBUtilities . updateWithLong ( digest , timestampOfLastDelete ( ) ) ;
public class OsiamGroupService { /** * See { @ link OsiamConnector # replaceGroup ( String , Group , AccessToken ) } */ Group replaceGroup ( String id , Group group , AccessToken accessToken ) { } }
return replaceResource ( id , group , accessToken ) ;
public class ConfigurationLoader { /** * Returns an input stream for reading the specified resource . * @ param resource * the resource name * @ return an input stream for reading the resource . * @ throws FileNotFoundException * if the resource could not be found */ public static InputStream loadResource ( final String resource ) throws FileNotFoundException { } }
final boolean hasLeadingSlash = resource . startsWith ( "/" ) ; final String stripped = hasLeadingSlash ? resource . substring ( 1 ) : resource ; InputStream stream = null ; final ClassLoader classLoader = Thread . currentThread ( ) . getContextClassLoader ( ) ; if ( classLoader != null ) { stream = classLoader . getResourceAsStream ( resource ) ; if ( stream == null && hasLeadingSlash ) { stream = classLoader . getResourceAsStream ( stripped ) ; } } if ( stream == null ) { throw new FileNotFoundException ( "resource " + resource + " not found" ) ; } return stream ;
public class AddressDivisionGrouping { /** * this does not handle overflow , overflow should be checked before calling this */ protected static < R extends AddressSection , S extends AddressSegment > R increment ( R section , long increment , AddressCreator < ? , R , ? , S > addrCreator , long count , long lowerValue , long upperValue , Supplier < R > lowerProducer , Supplier < R > upperProducer , Integer prefixLength ) { } }
if ( ! section . isMultiple ( ) ) { return add ( section , lowerValue , increment , addrCreator , prefixLength ) ; } boolean isDecrement = increment <= 0 ; if ( isDecrement ) { // we know lowerValue + increment > = 0 because we already did an overflow check return add ( lowerProducer . get ( ) , lowerValue , increment , addrCreator , prefixLength ) ; } if ( count > increment ) { if ( count == increment + 1 ) { return upperProducer . get ( ) ; } return incrementRange ( section , increment , addrCreator , lowerProducer , prefixLength ) ; } if ( increment <= Long . MAX_VALUE - upperValue ) { return add ( upperProducer . get ( ) , upperValue , increment - ( count - 1 ) , addrCreator , prefixLength ) ; } return add ( upperProducer . get ( ) , BigInteger . valueOf ( increment - ( count - 1 ) ) , addrCreator , prefixLength ) ;
public class MessageAction { /** * Adds the provided byte [ ] as file data . * < p > To reset all files use { @ link # clearFiles ( ) } * @ param data * The byte [ ] that will be interpreted as file data * @ param name * The file name that should be used to interpret the type of the given data * using the file - name extension . This name is similar to what will be visible * through { @ link net . dv8tion . jda . core . entities . Message . Attachment # getFileName ( ) Message . Attachment . getFileName ( ) } * @ throws java . lang . IllegalStateException * If the file limit of { @ value Message # MAX _ FILE _ AMOUNT } has been reached prior to calling this method , * or if this MessageAction will perform an edit operation on an existing Message ( see { @ link # isEdit ( ) } ) * @ throws java . lang . IllegalArgumentException * If the provided data is { @ code null } or the provided name is blank or { @ code null } * or if the provided data exceeds the maximum file size of the currently logged in account * @ throws net . dv8tion . jda . core . exceptions . InsufficientPermissionException * If this is targeting a TextChannel and the currently logged in account does not have * { @ link net . dv8tion . jda . core . Permission # MESSAGE _ ATTACH _ FILES Permission . MESSAGE _ ATTACH _ FILES } * @ return Updated MessageAction for chaining convenience * @ see net . dv8tion . jda . core . entities . SelfUser # getAllowedFileSize ( ) SelfUser . getAllowedFileSize ( ) */ @ CheckReturnValue public MessageAction addFile ( final byte [ ] data , final String name ) { } }
Checks . notNull ( data , "Data" ) ; final long maxSize = getJDA ( ) . getSelfUser ( ) . getAllowedFileSize ( ) ; Checks . check ( data . length <= maxSize , "File may not exceed the maximum file length of %d bytes!" , maxSize ) ; return addFile ( new ByteArrayInputStream ( data ) , name ) ;
public class DateTimeBrowser { /** * addMenus */ private void addMenus ( JMenuBar menuBar ) { } }
// Create all the menus . JMenu fileMenu = new JMenu ( "File" ) ; JMenu viewMenu = new JMenu ( "View" ) ; // Add them to the menubar in order . menuBar . add ( fileMenu ) ; menuBar . add ( viewMenu ) ; // Create action objects and menu items . Action open = new OpenAction ( ) ; JMenuItem jmiOpen = new JMenuItem ( open ) ; Action exit = new ExitAction ( ) ; JMenuItem jmiExit = new JMenuItem ( exit ) ; // Next Menu Action getter = new GetterAction ( ) ; jmiGetter = new JMenuItem ( getter ) ; getter . setEnabled ( true ) ; Action hex = new HexAction ( ) ; jmiHex = new JMenuItem ( hex ) ; hex . setEnabled ( true ) ; Action date = new DateAction ( ) ; jmiDate = new JMenuItem ( date ) ; date . setEnabled ( true ) ; Action cal = new CalAction ( ) ; jmiCal = new JMenuItem ( cal ) ; cal . setEnabled ( true ) ; // Build the file menu . fileMenu . add ( jmiOpen ) ; fileMenu . addSeparator ( ) ; fileMenu . add ( jmiExit ) ; // Build the view menu . viewMenu . add ( jmiGetter ) ; viewMenu . add ( jmiHex ) ; viewMenu . add ( jmiDate ) ; viewMenu . add ( jmiCal ) ; // * temp Developer ' s code // jmiGetter . setEnabled ( false ) ; // JMenuItem getter2 = new JMenuItem ( " getter2 " ) ; // getter2 . addActionListener ( new myMouseListener ( ) ) ; // viewMenu . add ( getter2 ) ;
public class DefaultRequestWrapper { /** * Default implementation of the request wrapper */ @ Override public YokeRequest wrap ( HttpServerRequest request , Context context , Map < String , Engine > engines , SessionStore store ) { } }
return new YokeRequest ( request , new YokeResponse ( request . response ( ) , context , engines ) , context , store ) ;
public class FileLogInput { /** * ( non - Javadoc ) * @ see com . ibm . ws . objectManager . LogInput # getLogFileSize ( ) */ protected long getLogFileSize ( ) { } }
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "getLogFileSIze" ) ; long logFileSize = sectorValidatedInputStream . header . fileSize ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "getLogFileSize" , "returns logFileSize=" + logFileSize + "(long)" ) ; return logFileSize ;
public class BoardsApi { /** * Lists Issue Boards in the given project . * < pre > < code > GitLab Endpoint : GET / projects / : id / boards < / code > < / pre > * @ param projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance * @ return a list of project ' s issue boards * @ throws GitLabApiException if any exception occurs */ public List < Board > getBoards ( Object projectIdOrPath ) throws GitLabApiException { } }
return ( getBoards ( projectIdOrPath , getDefaultPerPage ( ) ) . all ( ) ) ;
public class AbstractNlsResourceBundleJavaScriptServlet { /** * This method writes the given { @ link ResourceBundle } to the { @ code writer } . * @ param writer is the { @ link PrintWriter } to use . * @ param name is the { @ link ResourceBundle # getBundle ( String ) bundle name } . * @ param bundle is the { @ link ResourceBundle } for the users locale to write to the given { @ code writer } . */ protected void writeBundle ( PrintWriter writer , String name , ResourceBundle bundle ) { } }
writer . print ( "var " ) ; writer . print ( escapeBundleName ( name ) ) ; writer . println ( " = {" ) ; Enumeration < String > keyEnum = bundle . getKeys ( ) ; while ( keyEnum . hasMoreElements ( ) ) { String key = keyEnum . nextElement ( ) ; Object object = bundle . getObject ( key ) ; if ( object instanceof String ) { writer . print ( escapeBundleKey ( key ) ) ; writer . print ( ":\"" ) ; writer . print ( object . toString ( ) ) ; writer . print ( "\"," ) ; } } writer . println ( "};" ) ;
public class AVSession { /** * 获取最后接收到 server patch 的时间 * 按照业务需求 , 当本地没有缓存此数据时 , 返回最初始的客户端值 * @ return */ long getLastPatchTime ( ) { } }
if ( lastPatchTime <= 0 ) { lastPatchTime = AppConfiguration . getDefaultSetting ( ) . getLong ( selfId , LAST_PATCH_TIME , 0L ) ; } if ( lastPatchTime <= 0 ) { lastPatchTime = System . currentTimeMillis ( ) ; AppConfiguration . getDefaultSetting ( ) . saveLong ( selfId , LAST_PATCH_TIME , lastPatchTime ) ; } return lastPatchTime ;
public class VarDef { /** * Returns true if the given value can be bound to this variable . */ public boolean isApplicable ( VarValueDef value ) { } }
return value . isNA ( ) ? isOptional ( ) : getValue ( value . getName ( ) ) != null ;
public class CassandraVersioner { /** * Get current database version for given migration type with ALL consistency . Select one row since * migration history is saved ordered descending by timestamp . If there are no rows in the schema _ version table , * return 0 as default database version . Data version is changed by executing migrations . * @ param type Migration type * @ return Database version for given type */ public int getCurrentVersion ( final MigrationType type ) { } }
final Statement select = QueryBuilder . select ( ) . all ( ) . from ( SCHEMA_VERSION_CF ) . where ( QueryBuilder . eq ( TYPE , type . name ( ) ) ) . limit ( 1 ) . setConsistencyLevel ( ConsistencyLevel . ALL ) ; final ResultSet result = session . execute ( select ) ; final Row row = result . one ( ) ; return row == null ? 0 : row . getInt ( VERSION ) ;
public class ServiceUpdater { /** * Add the requested post parameters to the Request . * @ param request Request to add post params to */ private void addPostParams ( final Request request ) { } }
if ( friendlyName != null ) { request . addPostParam ( "FriendlyName" , friendlyName ) ; } if ( defaultServiceRoleSid != null ) { request . addPostParam ( "DefaultServiceRoleSid" , defaultServiceRoleSid ) ; } if ( defaultChannelRoleSid != null ) { request . addPostParam ( "DefaultChannelRoleSid" , defaultChannelRoleSid ) ; } if ( defaultChannelCreatorRoleSid != null ) { request . addPostParam ( "DefaultChannelCreatorRoleSid" , defaultChannelCreatorRoleSid ) ; } if ( readStatusEnabled != null ) { request . addPostParam ( "ReadStatusEnabled" , readStatusEnabled . toString ( ) ) ; } if ( reachabilityEnabled != null ) { request . addPostParam ( "ReachabilityEnabled" , reachabilityEnabled . toString ( ) ) ; } if ( typingIndicatorTimeout != null ) { request . addPostParam ( "TypingIndicatorTimeout" , typingIndicatorTimeout . toString ( ) ) ; } if ( consumptionReportInterval != null ) { request . addPostParam ( "ConsumptionReportInterval" , consumptionReportInterval . toString ( ) ) ; } if ( notificationsNewMessageEnabled != null ) { request . addPostParam ( "Notifications.NewMessage.Enabled" , notificationsNewMessageEnabled . toString ( ) ) ; } if ( notificationsNewMessageTemplate != null ) { request . addPostParam ( "Notifications.NewMessage.Template" , notificationsNewMessageTemplate ) ; } if ( notificationsAddedToChannelEnabled != null ) { request . addPostParam ( "Notifications.AddedToChannel.Enabled" , notificationsAddedToChannelEnabled . toString ( ) ) ; } if ( notificationsAddedToChannelTemplate != null ) { request . addPostParam ( "Notifications.AddedToChannel.Template" , notificationsAddedToChannelTemplate ) ; } if ( notificationsRemovedFromChannelEnabled != null ) { request . addPostParam ( "Notifications.RemovedFromChannel.Enabled" , notificationsRemovedFromChannelEnabled . toString ( ) ) ; } if ( notificationsRemovedFromChannelTemplate != null ) { request . addPostParam ( "Notifications.RemovedFromChannel.Template" , notificationsRemovedFromChannelTemplate ) ; } if ( notificationsInvitedToChannelEnabled != null ) { request . addPostParam ( "Notifications.InvitedToChannel.Enabled" , notificationsInvitedToChannelEnabled . toString ( ) ) ; } if ( notificationsInvitedToChannelTemplate != null ) { request . addPostParam ( "Notifications.InvitedToChannel.Template" , notificationsInvitedToChannelTemplate ) ; } if ( preWebhookUrl != null ) { request . addPostParam ( "PreWebhookUrl" , preWebhookUrl . toString ( ) ) ; } if ( postWebhookUrl != null ) { request . addPostParam ( "PostWebhookUrl" , postWebhookUrl . toString ( ) ) ; } if ( webhookMethod != null ) { request . addPostParam ( "WebhookMethod" , webhookMethod . toString ( ) ) ; } if ( webhookFilters != null ) { for ( String prop : webhookFilters ) { request . addPostParam ( "WebhookFilters" , prop ) ; } } if ( webhooksOnMessageSendUrl != null ) { request . addPostParam ( "Webhooks.OnMessageSend.Url" , webhooksOnMessageSendUrl . toString ( ) ) ; } if ( webhooksOnMessageSendMethod != null ) { request . addPostParam ( "Webhooks.OnMessageSend.Method" , webhooksOnMessageSendMethod . toString ( ) ) ; } if ( webhooksOnMessageSendFormat != null ) { request . addPostParam ( "Webhooks.OnMessageSend.Format" , webhooksOnMessageSendFormat ) ; } if ( webhooksOnMessageUpdateUrl != null ) { request . addPostParam ( "Webhooks.OnMessageUpdate.Url" , webhooksOnMessageUpdateUrl . toString ( ) ) ; } if ( webhooksOnMessageUpdateMethod != null ) { request . addPostParam ( "Webhooks.OnMessageUpdate.Method" , webhooksOnMessageUpdateMethod . toString ( ) ) ; } if ( webhooksOnMessageUpdateFormat != null ) { request . addPostParam ( "Webhooks.OnMessageUpdate.Format" , webhooksOnMessageUpdateFormat ) ; } if ( webhooksOnMessageRemoveUrl != null ) { request . addPostParam ( "Webhooks.OnMessageRemove.Url" , webhooksOnMessageRemoveUrl . toString ( ) ) ; } if ( webhooksOnMessageRemoveMethod != null ) { request . addPostParam ( "Webhooks.OnMessageRemove.Method" , webhooksOnMessageRemoveMethod . toString ( ) ) ; } if ( webhooksOnMessageRemoveFormat != null ) { request . addPostParam ( "Webhooks.OnMessageRemove.Format" , webhooksOnMessageRemoveFormat ) ; } if ( webhooksOnChannelAddUrl != null ) { request . addPostParam ( "Webhooks.OnChannelAdd.Url" , webhooksOnChannelAddUrl . toString ( ) ) ; } if ( webhooksOnChannelAddMethod != null ) { request . addPostParam ( "Webhooks.OnChannelAdd.Method" , webhooksOnChannelAddMethod . toString ( ) ) ; } if ( webhooksOnChannelAddFormat != null ) { request . addPostParam ( "Webhooks.OnChannelAdd.Format" , webhooksOnChannelAddFormat ) ; } if ( webhooksOnChannelDestroyUrl != null ) { request . addPostParam ( "Webhooks.OnChannelDestroy.Url" , webhooksOnChannelDestroyUrl . toString ( ) ) ; } if ( webhooksOnChannelDestroyMethod != null ) { request . addPostParam ( "Webhooks.OnChannelDestroy.Method" , webhooksOnChannelDestroyMethod . toString ( ) ) ; } if ( webhooksOnChannelDestroyFormat != null ) { request . addPostParam ( "Webhooks.OnChannelDestroy.Format" , webhooksOnChannelDestroyFormat ) ; } if ( webhooksOnChannelUpdateUrl != null ) { request . addPostParam ( "Webhooks.OnChannelUpdate.Url" , webhooksOnChannelUpdateUrl . toString ( ) ) ; } if ( webhooksOnChannelUpdateMethod != null ) { request . addPostParam ( "Webhooks.OnChannelUpdate.Method" , webhooksOnChannelUpdateMethod . toString ( ) ) ; } if ( webhooksOnChannelUpdateFormat != null ) { request . addPostParam ( "Webhooks.OnChannelUpdate.Format" , webhooksOnChannelUpdateFormat ) ; } if ( webhooksOnMemberAddUrl != null ) { request . addPostParam ( "Webhooks.OnMemberAdd.Url" , webhooksOnMemberAddUrl . toString ( ) ) ; } if ( webhooksOnMemberAddMethod != null ) { request . addPostParam ( "Webhooks.OnMemberAdd.Method" , webhooksOnMemberAddMethod . toString ( ) ) ; } if ( webhooksOnMemberAddFormat != null ) { request . addPostParam ( "Webhooks.OnMemberAdd.Format" , webhooksOnMemberAddFormat ) ; } if ( webhooksOnMemberRemoveUrl != null ) { request . addPostParam ( "Webhooks.OnMemberRemove.Url" , webhooksOnMemberRemoveUrl . toString ( ) ) ; } if ( webhooksOnMemberRemoveMethod != null ) { request . addPostParam ( "Webhooks.OnMemberRemove.Method" , webhooksOnMemberRemoveMethod . toString ( ) ) ; } if ( webhooksOnMemberRemoveFormat != null ) { request . addPostParam ( "Webhooks.OnMemberRemove.Format" , webhooksOnMemberRemoveFormat ) ; } if ( webhooksOnMessageSentUrl != null ) { request . addPostParam ( "Webhooks.OnMessageSent.Url" , webhooksOnMessageSentUrl . toString ( ) ) ; } if ( webhooksOnMessageSentMethod != null ) { request . addPostParam ( "Webhooks.OnMessageSent.Method" , webhooksOnMessageSentMethod . toString ( ) ) ; } if ( webhooksOnMessageSentFormat != null ) { request . addPostParam ( "Webhooks.OnMessageSent.Format" , webhooksOnMessageSentFormat ) ; } if ( webhooksOnMessageUpdatedUrl != null ) { request . addPostParam ( "Webhooks.OnMessageUpdated.Url" , webhooksOnMessageUpdatedUrl . toString ( ) ) ; } if ( webhooksOnMessageUpdatedMethod != null ) { request . addPostParam ( "Webhooks.OnMessageUpdated.Method" , webhooksOnMessageUpdatedMethod . toString ( ) ) ; } if ( webhooksOnMessageUpdatedFormat != null ) { request . addPostParam ( "Webhooks.OnMessageUpdated.Format" , webhooksOnMessageUpdatedFormat ) ; } if ( webhooksOnMessageRemovedUrl != null ) { request . addPostParam ( "Webhooks.OnMessageRemoved.Url" , webhooksOnMessageRemovedUrl . toString ( ) ) ; } if ( webhooksOnMessageRemovedMethod != null ) { request . addPostParam ( "Webhooks.OnMessageRemoved.Method" , webhooksOnMessageRemovedMethod . toString ( ) ) ; } if ( webhooksOnMessageRemovedFormat != null ) { request . addPostParam ( "Webhooks.OnMessageRemoved.Format" , webhooksOnMessageRemovedFormat ) ; } if ( webhooksOnChannelAddedUrl != null ) { request . addPostParam ( "Webhooks.OnChannelAdded.Url" , webhooksOnChannelAddedUrl . toString ( ) ) ; } if ( webhooksOnChannelAddedMethod != null ) { request . addPostParam ( "Webhooks.OnChannelAdded.Method" , webhooksOnChannelAddedMethod . toString ( ) ) ; } if ( webhooksOnChannelAddedFormat != null ) { request . addPostParam ( "Webhooks.OnChannelAdded.Format" , webhooksOnChannelAddedFormat ) ; } if ( webhooksOnChannelDestroyedUrl != null ) { request . addPostParam ( "Webhooks.OnChannelDestroyed.Url" , webhooksOnChannelDestroyedUrl . toString ( ) ) ; } if ( webhooksOnChannelDestroyedMethod != null ) { request . addPostParam ( "Webhooks.OnChannelDestroyed.Method" , webhooksOnChannelDestroyedMethod . toString ( ) ) ; } if ( webhooksOnChannelDestroyedFormat != null ) { request . addPostParam ( "Webhooks.OnChannelDestroyed.Format" , webhooksOnChannelDestroyedFormat ) ; } if ( webhooksOnChannelUpdatedUrl != null ) { request . addPostParam ( "Webhooks.OnChannelUpdated.Url" , webhooksOnChannelUpdatedUrl . toString ( ) ) ; } if ( webhooksOnChannelUpdatedMethod != null ) { request . addPostParam ( "Webhooks.OnChannelUpdated.Method" , webhooksOnChannelUpdatedMethod . toString ( ) ) ; } if ( webhooksOnChannelUpdatedFormat != null ) { request . addPostParam ( "Webhooks.OnChannelUpdated.Format" , webhooksOnChannelUpdatedFormat ) ; } if ( webhooksOnMemberAddedUrl != null ) { request . addPostParam ( "Webhooks.OnMemberAdded.Url" , webhooksOnMemberAddedUrl . toString ( ) ) ; } if ( webhooksOnMemberAddedMethod != null ) { request . addPostParam ( "Webhooks.OnMemberAdded.Method" , webhooksOnMemberAddedMethod . toString ( ) ) ; } if ( webhooksOnMemberAddedFormat != null ) { request . addPostParam ( "Webhooks.OnMemberAdded.Format" , webhooksOnMemberAddedFormat ) ; } if ( webhooksOnMemberRemovedUrl != null ) { request . addPostParam ( "Webhooks.OnMemberRemoved.Url" , webhooksOnMemberRemovedUrl . toString ( ) ) ; } if ( webhooksOnMemberRemovedMethod != null ) { request . addPostParam ( "Webhooks.OnMemberRemoved.Method" , webhooksOnMemberRemovedMethod . toString ( ) ) ; } if ( webhooksOnMemberRemovedFormat != null ) { request . addPostParam ( "Webhooks.OnMemberRemoved.Format" , webhooksOnMemberRemovedFormat ) ; } if ( limitsChannelMembers != null ) { request . addPostParam ( "Limits.ChannelMembers" , limitsChannelMembers . toString ( ) ) ; } if ( limitsUserChannels != null ) { request . addPostParam ( "Limits.UserChannels" , limitsUserChannels . toString ( ) ) ; }
public class ObjectErrorBuilder { /** * { @ link ObjectError } のインスタンスを組み立てます 。 * @ return { @ link ObjectError } のインスタンス */ public ObjectError build ( ) { } }
final ObjectError error = new ObjectError ( objectName , codes , variables ) ; error . setDefaultMessage ( defaultMessage ) ; error . setSheetName ( sheetName ) ; error . setLabel ( label ) ; return error ;
public class DevicesInner { /** * Scans for updates on a data box edge / gateway device . * @ param deviceName The device name . * @ param resourceGroupName The resource group name . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > beginScanForUpdatesAsync ( String deviceName , String resourceGroupName , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginScanForUpdatesWithServiceResponseAsync ( deviceName , resourceGroupName ) , serviceCallback ) ;
public class NarManager { /** * Returns a list of all attached nar dependencies for a specific binding and * " noarch " , but not where " local " is * specified * @ param scope * compile , test , runtime , . . . . * @ param aol * either a valid aol , noarch or null . In case of null both the * default getAOL ( ) and noarch dependencies * are returned . * @ param type * noarch , static , shared , jni , or null . In case of null the default * binding found in narInfo is used . * @ return * @ throws MojoExecutionException * @ throws MojoFailureException */ public final List /* < AttachedNarArtifact > */ getAttachedNarDependencies ( final List /* * NarArtifacts */ narArtifacts , final AOL archOsLinker , final String type ) throws MojoExecutionException , MojoFailureException { } }
boolean noarch = false ; AOL aol = archOsLinker ; if ( aol == null ) { noarch = true ; aol = this . defaultAOL ; } final List artifactList = new ArrayList ( ) ; for ( final Object narArtifact : narArtifacts ) { final Artifact dependency = ( Artifact ) narArtifact ; final NarInfo narInfo = getNarInfo ( dependency ) ; if ( noarch ) { artifactList . addAll ( getAttachedNarDependencies ( dependency , null , NarConstants . NAR_NO_ARCH ) ) ; } // use preferred binding , unless non existing . final String binding = narInfo . getBinding ( aol , type != null ? type : Library . STATIC ) ; // FIXME kludge , but does not work anymore since AOL is now a class if ( aol . equals ( NarConstants . NAR_NO_ARCH ) ) { // FIXME no handling of local artifactList . addAll ( getAttachedNarDependencies ( dependency , null , NarConstants . NAR_NO_ARCH ) ) ; } else { artifactList . addAll ( getAttachedNarDependencies ( dependency , aol , binding ) ) ; } } return artifactList ;
public class CmsXmlConfigUpdater { /** * Transforms a single configuration file using the given transformation source . * @ param file the configuration file * @ param transformSource the transform soruce * @ throws TransformerConfigurationException - * @ throws IOException - * @ throws SAXException - * @ throws TransformerException - * @ throws ParserConfigurationException - */ private void transform ( File file , Source transformSource ) throws TransformerConfigurationException , IOException , SAXException , TransformerException , ParserConfigurationException { } }
Transformer transformer = m_transformerFactory . newTransformer ( transformSource ) ; transformer . setOutputProperty ( OutputKeys . ENCODING , "us-ascii" ) ; transformer . setOutputProperty ( OutputKeys . INDENT , "yes" ) ; transformer . setOutputProperty ( "{http://xml.apache.org/xslt}indent-amount" , "4" ) ; String configDirPath = m_configDir . getAbsolutePath ( ) ; configDirPath = configDirPath . replaceFirst ( "[/\\\\]$" , "" ) ; transformer . setParameter ( "configDir" , configDirPath ) ; XMLReader reader = m_parserFactory . newSAXParser ( ) . getXMLReader ( ) ; reader . setEntityResolver ( NO_ENTITY_RESOLVER ) ; Source source ; if ( file . exists ( ) ) { source = new SAXSource ( reader , new InputSource ( file . getCanonicalPath ( ) ) ) ; } else { source = new SAXSource ( reader , new InputSource ( new ByteArrayInputStream ( DEFAULT_XML . getBytes ( "UTF-8" ) ) ) ) ; } ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; Result target = new StreamResult ( baos ) ; transformer . transform ( source , target ) ; byte [ ] transformedConfig = baos . toByteArray ( ) ; try ( FileOutputStream output = new FileOutputStream ( file ) ) { output . write ( transformedConfig ) ; }
public class FullyQualifiedNameImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . FULLY_QUALIFIED_NAME__FQN_TYPE : setFQNType ( ( Integer ) newValue ) ; return ; case AfplibPackage . FULLY_QUALIFIED_NAME__FQN_FORMAT : setFQNFormat ( ( Integer ) newValue ) ; return ; case AfplibPackage . FULLY_QUALIFIED_NAME__FQ_NAME : setFQName ( ( String ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class InstanceClient { /** * Retrieves the list of referrers to instances contained within the specified zone . For more * information , read Viewing Referrers to VM Instances . * < p > Sample code : * < pre > < code > * try ( InstanceClient instanceClient = InstanceClient . create ( ) ) { * ProjectZoneInstanceName instance = ProjectZoneInstanceName . of ( " [ PROJECT ] " , " [ ZONE ] " , " [ INSTANCE ] " ) ; * for ( Reference element : instanceClient . listReferrersInstances ( instance . toString ( ) ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param instance Name of the target instance scoping this request , or ' - ' if the request should * span over all instances in the container . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final ListReferrersInstancesPagedResponse listReferrersInstances ( String instance ) { } }
ListReferrersInstancesHttpRequest request = ListReferrersInstancesHttpRequest . newBuilder ( ) . setInstance ( instance ) . build ( ) ; return listReferrersInstances ( request ) ;
public class SimpleEventDispatcher { /** * { @ inheritDoc } */ @ Override public void subscribe ( Class < ? extends RootApplicationEvent > event , String listenerBeanName ) { } }
Object instance = ctx . getBean ( listenerBeanName ) ; if ( instance instanceof EventListener ) { subscribe ( event , ( EventListener ) instance ) ; } else { throw new ServiceLayerException ( format ( "The bean with name [%s] is not of type EventListener and cannot subscribe to events" , listenerBeanName ) ) ; }
public class BoxHttpRequest { /** * Adds an HTTP header to the request . * @ param key the header key . * @ param value the header value . * @ return request with the updated header . */ public BoxHttpRequest addHeader ( String key , String value ) { } }
mUrlConnection . addRequestProperty ( key , value ) ; return this ;
public class CmsFlexCacheKey { /** * Compares this key to the other key passed as parameter , * from comparing the two keys , a variation String is constructed . < p > * This method is the " heart " of the key matching process . < p > * The assumtion is that this key should be the one constructed for the response , * while the parameter key should have been constructed from the request . < p > * A short example how this works : * If the cache key is " cache = user " and the request is done from a guest user * the constructed variation will be " user = ( guest ) " . < p > * @ param key the key to match this key with * @ return null if not cachable , or the Variation String if cachable */ public String matchRequestKey ( CmsFlexRequestKey key ) { } }
StringBuffer str = new StringBuffer ( 100 ) ; if ( m_always < 0 ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_FLEXCACHEKEY_KEYMATCH_CACHE_NEVER_0 ) ) ; } return null ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_FLEXCACHEKEY_KEYMATCH_CHECK_NO_PARAMS_0 ) ) ; } if ( ( m_noparams != null ) && ( key . getParams ( ) != null ) ) { if ( ( m_noparams . size ( ) == 0 ) && ( key . getParams ( ) . size ( ) > 0 ) ) { return null ; } Iterator < String > i = key . getParams ( ) . keySet ( ) . iterator ( ) ; while ( i . hasNext ( ) ) { if ( m_noparams . contains ( i . next ( ) ) ) { return null ; } } } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_FLEXCACHEKEY_KEYMATCH_CHECK_NO_ATTRS_0 ) ) ; } if ( ( m_noattrs != null ) && ( key . getAttributes ( ) != null ) ) { if ( ( m_noattrs . size ( ) == 0 ) && ( key . getAttributes ( ) . size ( ) > 0 ) ) { return null ; } Iterator < String > i = key . getAttributes ( ) . keySet ( ) . iterator ( ) ; while ( i . hasNext ( ) ) { if ( m_noattrs . contains ( i . next ( ) ) ) { return null ; } } } if ( m_always > 0 ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_FLEXCACHEKEY_KEYMATCH_CACHE_ALWAYS_0 ) ) ; } str . append ( CACHE_00_ALWAYS ) ; return str . toString ( ) ; } if ( m_uri != null ) { appendKeyValue ( str , CACHE_02_URI , key . getUri ( ) ) ; } if ( m_site != null ) { appendKeyValue ( str , CACHE_17_SITE , key . getSite ( ) ) ; } if ( m_element != null ) { appendKeyValue ( str , CACHE_14_ELEMENT , key . getElement ( ) ) ; } if ( m_device != null ) { appendKeyValue ( str , CACHE_20_DEVICE , key . getDevice ( ) ) ; } if ( m_containerElement != null ) { appendKeyValue ( str , CACHE_21_CONTAINER_ELEMENT , key . getContainerElement ( ) ) ; } if ( m_locale != null ) { appendKeyValue ( str , CACHE_15_LOCALE , key . getLocale ( ) ) ; } if ( m_encoding != null ) { appendKeyValue ( str , CACHE_16_ENCODING , key . getEncoding ( ) ) ; } if ( m_ip != null ) { appendKeyValue ( str , CACHE_13_IP , key . getIp ( ) ) ; } if ( m_user != null ) { appendKeyValue ( str , CACHE_03_USER , key . getUser ( ) ) ; } if ( m_params != null ) { str . append ( CACHE_04_PARAMS ) ; str . append ( "=(" ) ; Map < String , String [ ] > keyParams = key . getParams ( ) ; if ( keyParams != null ) { if ( m_params . size ( ) > 0 ) { // match only params listed in cache directives Iterator < String > i = m_params . iterator ( ) ; while ( i . hasNext ( ) ) { Object o = i . next ( ) ; if ( keyParams . containsKey ( o ) ) { str . append ( o ) ; str . append ( "=" ) ; // TODO : handle multiple occurrences of the same parameter value String [ ] values = keyParams . get ( o ) ; str . append ( values [ 0 ] ) ; if ( i . hasNext ( ) ) { str . append ( "," ) ; } } } } else { // match all request params Iterator < Map . Entry < String , String [ ] > > i = keyParams . entrySet ( ) . iterator ( ) ; while ( i . hasNext ( ) ) { Map . Entry < String , String [ ] > entry = i . next ( ) ; str . append ( entry . getKey ( ) ) ; str . append ( "=" ) ; // TODO : handle multiple occurrences of the same parameter value String [ ] values = entry . getValue ( ) ; str . append ( values [ 0 ] ) ; if ( i . hasNext ( ) ) { str . append ( "," ) ; } } } } str . append ( ");" ) ; } if ( m_attrs != null ) { str . append ( CACHE_18_ATTRS ) ; str . append ( "=(" ) ; Map < String , Object > keyAttrs = key . getAttributes ( ) ; if ( keyAttrs != null ) { if ( m_attrs . size ( ) > 0 ) { // match only attributes listed in cache directives Iterator < String > i = m_attrs . iterator ( ) ; while ( i . hasNext ( ) ) { String s = i . next ( ) ; if ( keyAttrs . containsKey ( s ) ) { str . append ( s ) ; str . append ( "=" ) ; Object value = keyAttrs . get ( s ) ; str . append ( value ) ; if ( i . hasNext ( ) ) { str . append ( "," ) ; } } } } else { // match all request attributes Iterator < Map . Entry < String , Object > > i = keyAttrs . entrySet ( ) . iterator ( ) ; while ( i . hasNext ( ) ) { Map . Entry < String , Object > entry = i . next ( ) ; str . append ( entry . getKey ( ) ) ; str . append ( "=" ) ; Object value = entry . getValue ( ) ; str . append ( value ) ; if ( i . hasNext ( ) ) { str . append ( "," ) ; } } } } str . append ( ");" ) ; } if ( m_session != null ) { StringBuffer buf = new StringBuffer ( 32 ) ; boolean found = false ; buf . append ( CACHE_07_SESSION ) ; buf . append ( "=(" ) ; HttpSession keySession = key . getSession ( ) ; if ( keySession != null ) { // match only session attributes listed in cache directives Iterator < String > i = m_session . iterator ( ) ; while ( i . hasNext ( ) ) { String name = i . next ( ) ; Object val = keySession . getAttribute ( name ) ; if ( val != null ) { found = true ; buf . append ( name ) ; buf . append ( "=" ) ; buf . append ( val ) ; if ( i . hasNext ( ) ) { buf . append ( "," ) ; } } } } if ( found ) { buf . append ( ");" ) ; str . append ( buf ) ; } } if ( m_schemes != null ) { String s = key . getScheme ( ) ; if ( ( m_schemes . size ( ) > 0 ) && ( ! m_schemes . contains ( s ) ) ) { return null ; } appendKeyValue ( str , CACHE_08_SCHEMES , s ) ; } if ( m_ports != null ) { Integer i = key . getPort ( ) ; if ( ( m_ports . size ( ) > 0 ) && ( ! m_ports . contains ( i ) ) ) { return null ; } str . append ( CACHE_09_PORTS ) ; str . append ( "=(" ) ; str . append ( i ) ; str . append ( ");" ) ; } if ( m_timeout > 0 ) { str . append ( CACHE_06_TIMEOUT ) ; str . append ( "=(" ) ; str . append ( m_timeout ) ; str . append ( ");" ) ; } if ( str . length ( ) > 0 ) { return str . toString ( ) ; } else { return null ; }
public class ZKPaths { /** * Given a parent and a child node , join them in the given { @ link StringBuilder path } * @ param path the { @ link StringBuilder } used to make the path * @ param parent the parent * @ param child the child */ private static void joinPath ( StringBuilder path , String parent , String child ) { } }
// Add parent piece , with no trailing slash . if ( ( parent != null ) && ( parent . length ( ) > 0 ) ) { if ( ! parent . startsWith ( PATH_SEPARATOR ) ) { path . append ( PATH_SEPARATOR ) ; } if ( parent . endsWith ( PATH_SEPARATOR ) ) { path . append ( parent . substring ( 0 , parent . length ( ) - 1 ) ) ; } else { path . append ( parent ) ; } } if ( ( child == null ) || ( child . length ( ) == 0 ) || ( child . equals ( PATH_SEPARATOR ) ) ) { // Special case , empty parent and child if ( path . length ( ) == 0 ) { path . append ( PATH_SEPARATOR ) ; } return ; } // Now add the separator between parent and child . path . append ( PATH_SEPARATOR ) ; if ( child . startsWith ( PATH_SEPARATOR ) ) { child = child . substring ( 1 ) ; } if ( child . endsWith ( PATH_SEPARATOR ) ) { child = child . substring ( 0 , child . length ( ) - 1 ) ; } // Finally , add the child . path . append ( child ) ;
public class Handler { /** * add data to command */ @ Override public void characters ( char [ ] text , int start , int length ) throws SAXException { } }
if ( length > 0 ) { String data = String . valueOf ( text , start , length ) . replace ( '\n' , ' ' ) . replace ( '\t' , ' ' ) . trim ( ) ; if ( data . length ( ) > 0 ) { Command . prn ( tabCount , tabCount + ">setting data=" + data // $ NON - NLS - 1 $ + "<EOL>" ) ; // $ NON - NLS - 1 $ Command cmd = stack . peek ( ) ; cmd . setData ( data ) ; } }
public class ExpressRouteGatewaysInner { /** * Lists ExpressRoute gateways under a given subscription . * @ return the observable to the List & lt ; ExpressRouteGatewayInner & gt ; object */ public Observable < Page < ExpressRouteGatewayInner > > listAsync ( ) { } }
return listWithServiceResponseAsync ( ) . map ( new Func1 < ServiceResponse < List < ExpressRouteGatewayInner > > , Page < ExpressRouteGatewayInner > > ( ) { @ Override public Page < ExpressRouteGatewayInner > call ( ServiceResponse < List < ExpressRouteGatewayInner > > response ) { PageImpl1 < ExpressRouteGatewayInner > page = new PageImpl1 < > ( ) ; page . setItems ( response . body ( ) ) ; return page ; } } ) ;
public class MD5Utils { /** * 签名字符串 * @ param text 需要签名的字符串 * @ param key 密钥 * @ param input _ charset 编码格式 * @ return 签名结果 */ public static String sign ( String text , String key , String input_charset ) { } }
text = text + key ; return DigestUtils . md5Hex ( getContentBytes ( text , input_charset ) ) ;
public class FedoraObjectTripleGenerator_3_0 { /** * Adds all triples given by reader . getRelationships ( null , null ) . * This includes everything in RELS - EXT and RELS - INT as well as the implicit * basic content model assertion , if any . */ private void addRelationshipTriples ( DOReader reader , URIReference objURI , Set < Triple > set ) throws Exception { } }
for ( RelationshipTuple tuple : reader . getRelationships ( ) ) { set . add ( tuple . toTriple ( null ) ) ; }
public class SQLMultiScopeRecoveryLog { /** * Informs the RLS that any outstanding recovery process for the recovery log is * complete . Client services may issue this call to give the RLS an opportunity * to optomize log access by performing a keypoint operation . Client services do * not have to issue this call . * This call is separate from the < code > RecoveryDirector . recoveryComplete < / code > * method which must be invoked by a client service in response to a recovery * request . The RecoveryDirector callback indicates that sufficient recovery * processing has been performed to allow the request to be passed to the next * client service . The recovery process may however still execute on a separate * thread and call < code > RecoveryLog . recoveryComplete < / code > when it has * finished . * @ exception LogClosedException Thrown if the recovery log is closed and must * be opened before this call can be issued . * @ exception InternalLogException Thrown if an unexpected error has occured . * @ exception LogIncompatibleException An attempt has been made access a recovery * log that is not compatible with this version * of the service . */ @ Override public void recoveryComplete ( ) throws LogClosedException , InternalLogException , LogIncompatibleException , LogIncompatibleException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "recoveryComplete" , this ) ; // If this recovery log instance has been marked as incompatible then throw an exception // accordingly . if ( incompatible ( ) ) { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "recoveryComplete" , "LogIncompatibleException" ) ; throw new LogIncompatibleException ( ) ; } // If this recovery log instance has experienced a serious internal error then prevent this operation from // executing . if ( failed ( ) ) { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "recoveryComplete" , this ) ; throw new InternalLogException ( null ) ; } // Check that the log is open . if ( _closesRequired == 0 ) { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "recoveryComplete" , "LogClosedException" ) ; throw new LogClosedException ( null ) ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "recoveryComplete" ) ;
public class SourceReader { /** * All newline character patterns are are converted to \ n . */ public int read ( ) throws IOException { } }
int c ; if ( mFirst != 0 ) { c = mFirst ; mFirst = 0 ; } else { c = super . read ( ) ; } if ( c == '\n' ) { mLine ++ ; } else if ( c == ENTER_CODE ) { mUnicodeReader . setEscapesEnabled ( true ) ; } else if ( c == ENTER_TEXT ) { mUnicodeReader . setEscapesEnabled ( false ) ; } return c ;
public class AuthInfo { /** * Get a typical auth descriptor for CRAM - MD5 auth with the given username and password . * @ param u the username * @ param p the password * @ return an AuthInfo */ public static AuthInfo cramMD5 ( String username , String password ) { } }
return new AuthInfo ( new PlainCallbackHandler ( username , password ) , new String [ ] { "CRAM-MD5" } ) ;
public class LocaleData { /** * Returns the set of exemplar characters for a locale . * Equivalent to calling new LocaleData ( locale ) . { @ link # getExemplarSet ( int , int ) } . * @ param locale Locale for which the exemplar character set * is to be retrieved . * @ param options Bitmask for options to apply to the exemplar pattern . * Specify zero to retrieve the exemplar set as it is * defined in the locale data . Specify * UnicodeSet . CASE to retrieve a case - folded exemplar * set . See { @ link UnicodeSet # applyPattern ( String , * int ) } for a complete list of valid options . The * IGNORE _ SPACE bit is always set , regardless of the * value of ' options ' . * @ param extype The type of exemplar character set to retrieve . * @ return The set of exemplar characters for the given locale . */ public static UnicodeSet getExemplarSet ( ULocale locale , int options , int extype ) { } }
return LocaleData . getInstance ( locale ) . getExemplarSet ( options , extype ) ;
public class AbstractAggregatorImpl { /** * / * ( non - Javadoc ) * @ see com . ibm . jaggr . service . resource . IResourceProvider # getResource ( java . net . URI ) */ @ Override public IResource newResource ( URI uri ) { } }
final String sourceMethod = "newResource" ; // $ NON - NLS - 1 $ boolean isTraceLogging = log . isLoggable ( Level . FINER ) ; if ( isTraceLogging ) { log . entering ( AbstractAggregatorImpl . class . getName ( ) , sourceMethod , new Object [ ] { uri } ) ; } Mutable < URI > uriRef = new MutableObject < URI > ( uri ) ; IResourceFactory factory = getResourceFactory ( uriRef ) ; IResource result ; if ( factory != null ) { result = factory . newResource ( uriRef . getValue ( ) ) ; } else { result = new NotFoundResource ( uriRef . getValue ( ) ) ; } result = runConverters ( result ) ; if ( isTraceLogging ) { log . exiting ( AbstractAggregatorImpl . class . getName ( ) , sourceMethod , result ) ; } return result ;
public class RequestHelper { /** * Get the HTTP method associated with the given HTTP request * @ param aHttpRequest * The http request to query . May not be < code > null < / code > . * @ return < code > null < / code > if no supported HTTP method is contained */ @ Nullable public static EHttpMethod getHttpMethod ( @ Nonnull final HttpServletRequest aHttpRequest ) { } }
ValueEnforcer . notNull ( aHttpRequest , "HttpRequest" ) ; final String sMethod = aHttpRequest . getMethod ( ) ; return EHttpMethod . getFromNameOrNull ( sMethod ) ;
public class CommerceVirtualOrderItemPersistenceImpl { /** * Clears the cache for the commerce virtual order item . * The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */ @ Override public void clearCache ( CommerceVirtualOrderItem commerceVirtualOrderItem ) { } }
entityCache . removeResult ( CommerceVirtualOrderItemModelImpl . ENTITY_CACHE_ENABLED , CommerceVirtualOrderItemImpl . class , commerceVirtualOrderItem . getPrimaryKey ( ) ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ; clearUniqueFindersCache ( ( CommerceVirtualOrderItemModelImpl ) commerceVirtualOrderItem , true ) ;
public class SortedArrayList { /** * Adds the specified element in sorted position within this list . When * two elements have the same hashCode , the new item is added at the end * of the list of matching hashCodes . * @ param o element to be appended to this list . * @ return < tt > true < / tt > ( as per the general contract of Collection . add ) . */ @ Override public boolean add ( E o ) { } }
// Shortcut for empty int size = size ( ) ; if ( size == 0 ) { super . add ( o ) ; } else { int Ohash = o . hashCode ( ) ; // Shortcut for adding to end ( makes imports of already - sorted data operate at constant - time instead of logarithmic complexity ) if ( Ohash >= get ( size - 1 ) . hashCode ( ) ) { super . add ( o ) ; } else { int index = binarySearchHashCode ( Ohash ) ; if ( index < 0 ) { // Not found in list super . add ( - ( index + 1 ) , o ) ; } else { // Add after the last item with matching hashCodes while ( index < ( size - 1 ) && get ( index + 1 ) . hashCode ( ) == Ohash ) index ++ ; super . add ( index + 1 , o ) ; } } } return true ;
public class SessionRegistry { /** * closeSessions will be closed the all sessions on specific workspace . * @ param workspaceName * the workspace name . * @ return int * how many sessions was closed . */ public int closeSessions ( String workspaceName ) { } }
int closedSessions = 0 ; for ( SessionImpl session : sessionsMap . values ( ) ) { if ( session . getWorkspace ( ) . getName ( ) . equals ( workspaceName ) ) { session . logout ( ) ; closedSessions ++ ; } } return closedSessions ;
public class TaskEventDispatcher { /** * Registers the given partition for incoming task events allowing calls to { @ link * # subscribeToEvent ( ResultPartitionID , EventListener , Class ) } . * @ param partitionId * the partition ID */ public void registerPartition ( ResultPartitionID partitionId ) { } }
checkNotNull ( partitionId ) ; synchronized ( registeredHandlers ) { LOG . debug ( "registering {}" , partitionId ) ; if ( registeredHandlers . put ( partitionId , new TaskEventHandler ( ) ) != null ) { throw new IllegalStateException ( "Partition " + partitionId + " already registered at task event dispatcher." ) ; } }
public class ExpressionUtil { /** * Evaluate a value expression . To support optional attributes , if the expression is null then * null will be returned . * @ param expression the expression * @ param pageContext the context for the JSP * @ param < T > the expected type of the expression * @ return the result of evaluating the expression */ public static < T > T evaluate ( ValueExpression expression , PageContext pageContext ) { } }
if ( expression == null ) { return null ; } @ SuppressWarnings ( "unchecked" ) T value = ( T ) expression . getValue ( pageContext . getELContext ( ) ) ; return value ;
public class ServletUtil { /** * 返回数据给客户端 * @ param response 响应对象 { @ link HttpServletResponse } * @ param in 需要返回客户端的内容 */ public static void write ( HttpServletResponse response , InputStream in ) { } }
write ( response , in , IoUtil . DEFAULT_BUFFER_SIZE ) ;
public class CancelConversionTaskRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < CancelConversionTaskRequest > getDryRunRequest ( ) { } }
Request < CancelConversionTaskRequest > request = new CancelConversionTaskRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class IntegerGene { /** * Create a new random { @ code IntegerGene } with the given value and the * given range . If the { @ code value } isn ' t within the interval [ min , max ] , * no exception is thrown . In this case the method * { @ link IntegerGene # isValid ( ) } returns { @ code false } . * @ since 3.2 * @ param value the value of the gene . * @ param range the integer range to use * @ return a new { @ code IntegerGene } with the give { @ code value } * @ throws NullPointerException if the given { @ code range } is { @ code null } . */ public static IntegerGene of ( final int value , final IntRange range ) { } }
return IntegerGene . of ( value , range . getMin ( ) , range . getMax ( ) ) ;
public class C8DChunk { /** * public String pformat0 ( ) { return " % 21.15e " ; } */ @ Override public final void initFromBytes ( ) { } }
_start = - 1 ; _cidx = - 1 ; set_len ( _mem . length >> 3 ) ; assert _mem . length == _len << 3 ;
public class CSSFactory { /** * Parses URL into StyleSheet * @ param url * URL of file to be parsed * @ param encoding * Encoding of file * @ return Parsed StyleSheet * @ throws CSSException * When exception during parse occurs * @ throws IOException * When file not found */ public static final StyleSheet parse ( URL url , String encoding ) throws CSSException , IOException { } }
return getCSSParserFactory ( ) . parse ( url , getNetworkProcessor ( ) , encoding , SourceType . URL , url ) ;
public class FileUtils { /** * Just like { @ link Files # asByteSink ( java . io . File , com . google . common . io . FileWriteMode . . . ) } , but * decompresses the incoming data using GZIP . */ public static ByteSink asCompressedByteSink ( File f ) throws IOException { } }
return GZIPByteSink . gzipCompress ( Files . asByteSink ( f ) ) ;
public class DisjointSets { /** * Returns an iterator over all the unique items across all sets . */ public Iterator < T > iterator ( ) { } }
List < Iterator < T > > iters = new ArrayList < Iterator < T > > ( sets . size ( ) ) ; for ( Set < T > s : sets ) iters . add ( s . iterator ( ) ) ; return new CombinedIterator < T > ( iters ) ;
public class InternalXtextParser { /** * InternalXtext . g : 1195:1 : entryRuleTerminalTokenElement : ruleTerminalTokenElement EOF ; */ public final void entryRuleTerminalTokenElement ( ) throws RecognitionException { } }
try { // InternalXtext . g : 1196:1 : ( ruleTerminalTokenElement EOF ) // InternalXtext . g : 1197:1 : ruleTerminalTokenElement EOF { before ( grammarAccess . getTerminalTokenElementRule ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_1 ) ; ruleTerminalTokenElement ( ) ; state . _fsp -- ; after ( grammarAccess . getTerminalTokenElementRule ( ) ) ; match ( input , EOF , FollowSets000 . FOLLOW_2 ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return ;
public class Source { /** * Makes a new empty builder . */ public static Builder builder ( ) { } }
return new AutoValue_Source . Builder ( ) . setPath ( DEV_NULL ) . setCode ( "" ) . setOriginalCodeSupplier ( null ) . setSourceMap ( "" ) . setSourceUrl ( "" ) . setSourceMappingUrl ( "" ) . setRuntimes ( ImmutableSet . of ( ) ) . setLoadFlags ( ImmutableMap . of ( ) ) . setEstimatedSize ( 0 ) ;
public class ScribeIndex { /** * Gets the appropriate component scribe for a given component instance . * @ param component the component instance * @ return the component scribe or null if not found */ public ICalComponentScribe < ? extends ICalComponent > getComponentScribe ( ICalComponent component ) { } }
if ( component instanceof RawComponent ) { RawComponent raw = ( RawComponent ) component ; return new RawComponentScribe ( raw . getName ( ) ) ; } return getComponentScribe ( component . getClass ( ) ) ;
public class ContextManager { /** * Executes a blocking action on a background thread . * @ param action The action to execute . * @ param resultHandler A handler to be called with the action result . * @ return The context manager . */ public < T > ContextManager execute ( Action < T > action , Handler < AsyncResult < T > > resultHandler ) { } }
vertx . executeBlocking ( action , resultHandler ) ; return this ;
public class SVGParser { /** * Parse a preserveAspectRation attribute */ private static void parsePreserveAspectRatio ( SVG . SvgPreserveAspectRatioContainer obj , String val ) throws SVGParseException { } }
obj . preserveAspectRatio = parsePreserveAspectRatio ( val ) ;
public class AbstractAttachable { /** * { @ inheritDoc } */ @ Override public < T > T getAttachment ( final AttachmentKey < T > key ) { } }
if ( key == null || attachments == null ) { return null ; } return ( T ) attachments . get ( key ) ;
public class ServiceManagerSparql { /** * Given the URI of a type ( i . e . , a modelReference ) , this method figures out all the entities of a type * ( Service or Operation are the ones that are expected ) that have this as part of their inputs or outputs . What * data relationship should be used is also parameterised . * @ param entityType the type of entity we are looking for ( i . e . , service or operation ) * @ param dataPropertyType the kind of data property we are interested in ( e . g . , inputs , outputs , fault ) * @ param modelReference the type of input sought for * @ return a Set of URIs of the entities that matched the request or the empty Set otherwise . */ private Set < URI > listEntitiesByDataModel ( com . hp . hpl . jena . rdf . model . Resource entityType , Property dataPropertyType , URI modelReference ) { } }
if ( modelReference == null ) { return ImmutableSet . of ( ) ; } StringBuilder queryBuilder = new StringBuilder ( ) . append ( "SELECT DISTINCT ?entity WHERE { \n" ) ; // Deal with engines that store the inference in the default graph ( e . g . , OWLIM ) queryBuilder . append ( "{" ) . append ( "\n" ) . append ( " ?entity <" ) . append ( RDF . type . getURI ( ) ) . append ( "> <" ) . append ( entityType . getURI ( ) ) . append ( "> ." ) . append ( "\n" ) ; // Deal with the difference between Services and Operations if ( entityType . equals ( MSM . Service ) ) { queryBuilder . append ( " ?entity <" ) . append ( MSM . hasOperation . getURI ( ) ) . append ( "> / " ) . append ( "\n" ) . append ( " <" ) . append ( dataPropertyType . getURI ( ) ) . append ( "> / " ) . append ( "\n" ) ; } else { queryBuilder . append ( " ?entity <" ) . append ( dataPropertyType . getURI ( ) ) . append ( "> / " ) . append ( "\n" ) ; } queryBuilder . append ( " <" ) . append ( SAWSDL . modelReference . getURI ( ) ) . append ( "> <" ) . append ( modelReference . toASCIIString ( ) ) . append ( "> ." ) . append ( "\n" ) ; // UNION queryBuilder . append ( "\n } UNION { \n" ) ; queryBuilder . append ( " ?entity <" ) . append ( RDF . type . getURI ( ) ) . append ( "> <" ) . append ( entityType . getURI ( ) ) . append ( "> ." ) . append ( "\n" ) ; // Deal with the difference between Services and Operations if ( entityType . equals ( MSM . Service ) ) { queryBuilder . append ( " ?entity <" ) . append ( MSM . hasOperation . getURI ( ) ) . append ( "> / " ) . append ( "\n" ) . append ( " <" ) . append ( dataPropertyType . getURI ( ) ) . append ( "> ?message ." ) . append ( "\n" ) ; } else { queryBuilder . append ( " ?entity <" ) . append ( dataPropertyType . getURI ( ) ) . append ( "> ?message ." ) . append ( "\n" ) ; } queryBuilder . append ( "?message (<" ) . append ( MSM . hasOptionalPart . getURI ( ) ) . append ( "> | <" ) . append ( MSM . hasMandatoryPart . getURI ( ) ) . append ( ">)+ ?part . \n" ) . append ( " ?part <" ) . append ( SAWSDL . modelReference . getURI ( ) ) . append ( "> <" ) . append ( modelReference . toASCIIString ( ) ) . append ( "> ." ) . append ( "}}" ) ; return this . graphStoreManager . listResourcesByQuery ( queryBuilder . toString ( ) , "entity" ) ;
public class SipApplicationSessionImpl { /** * Notifies the listeners that a lifecycle event occured on that sip application session * @ param sipApplicationSessionEventType the type of event that happened */ public void notifySipApplicationSessionListeners ( SipApplicationSessionEventType sipApplicationSessionEventType ) { } }
List < SipApplicationSessionListener > listeners = sipContext . getListeners ( ) . getSipApplicationSessionListeners ( ) ; if ( listeners . size ( ) > 0 ) { ClassLoader oldClassLoader = java . lang . Thread . currentThread ( ) . getContextClassLoader ( ) ; sipContext . enterSipContext ( ) ; SipApplicationSessionEvent event = new SipApplicationSessionEvent ( this . getFacade ( ) ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "notifying sip application session listeners of context " + key . getApplicationName ( ) + " of following event " + sipApplicationSessionEventType ) ; } for ( SipApplicationSessionListener sipApplicationSessionListener : listeners ) { try { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "notifying sip application session listener " + sipApplicationSessionListener . getClass ( ) . getName ( ) + " of context " + key . getApplicationName ( ) + " of following event " + sipApplicationSessionEventType ) ; } if ( SipApplicationSessionEventType . CREATION . equals ( sipApplicationSessionEventType ) ) { sipApplicationSessionListener . sessionCreated ( event ) ; } else if ( SipApplicationSessionEventType . DELETION . equals ( sipApplicationSessionEventType ) ) { sipApplicationSessionListener . sessionDestroyed ( event ) ; } else if ( SipApplicationSessionEventType . EXPIRATION . equals ( sipApplicationSessionEventType ) ) { sipApplicationSessionListener . sessionExpired ( event ) ; } else if ( SipApplicationSessionEventType . READYTOINVALIDATE . equals ( sipApplicationSessionEventType ) ) { sipApplicationSessionListener . sessionReadyToInvalidate ( event ) ; } } catch ( Throwable t ) { logger . error ( "SipApplicationSessionListener threw exception" , t ) ; } } sipContext . exitSipContext ( oldClassLoader ) ; }
public class Exec { /** * Execute a command in a container . If there are multiple containers in the pod , uses the first * container in the Pod . * @ param namespace The namespace of the Pod * @ param name The name of the Pod * @ param command The command to run * @ param stdin If true , pass a stdin stream into the container */ public Process exec ( String namespace , String name , String [ ] command , boolean stdin ) throws ApiException , IOException { } }
return exec ( namespace , name , command , null , stdin , false ) ;
public class GregorianCalendar { /** * Calculates the time in milliseconds that this calendar represents using the UTC time , * timezone information ( specifically Daylight Savings Time ( DST ) rules , if any ) and knowledge * of what fields were explicitly set on the calendar . * < p > A time is represented as the number of milliseconds since * < i > 1st January 1970 00:00:00.000 UTC < / i > . * < p > This uses the terms { @ link SimpleTimeZone # STANDARD _ TIME standard time } , * { @ link SimpleTimeZone # WALL _ TIME } wall time } and { @ link SimpleTimeZone # UTC _ TIME UTC time } as * used in { @ link SimpleTimeZone } . Specifically : * < dl > * < dt > < b > UTC time < / b > < / dt > * < dd > This is the time within the UTC time zone . UTC does not support DST so the UTC time , * standard time and wall time are all identical within the UTC time zone . < / dd > * < dt > < b > standard time < / b > < / dt > * < dd > This is the local time within the time zone and is not affected by DST . < / dd > * < dt > < b > wall time < / b > < / dt > * < dd > This is the local time within the time zone as shown on a wall clock . If the time zone * supports DST then it will be the same as < b > standard time < / b > when outside DST and it will * differ ( usually be an hour later ) when inside DST . This is what the fields on the Calendar * represent . < / dd > * < / dl > * < p > The { @ code utcTimeInMillis } value supplied was calculated as if the fields represented * a standard time in the { @ code UTC } time zone . It is the value that would be returned by * { @ link # getTimeInMillis ( ) } when called on this calendar if it was in UTC time zone . e . g . If * the calendar was set to say < i > 2014 March 19th 13:27.53 - 08:00 < / i > then the value of * { @ code utcTimeInMillis } would be the value of { @ link # getTimeInMillis ( ) } when called on a * calendar set to < i > 2014 March 19th 13:27.53 - 00:00 < / i > , note the time zone offset is set to * < p > To adjust from a UTC time in millis to the standard time in millis we must * < em > subtract < / em > the offset from UTC . e . g . given an offset of UTC - 08:00 , to convert * " 14:00 UTC " to " 14:00 UTC - 08:00 " we must subtract - 08:00 ( i . e . add 8 hours ) . Another way to * think about it is that 8 hours has to elapse after 14:00 UTC before it is 14:00 UTC - 08:00. * < p > As the zone offset can depend on the time and we cannot calculate the time properly until * we know the time there is a bit of a catch - 22 . So , what this does is use the * { @ link TimeZone # getRawOffset ( ) raw offset } to calculate a ballpark standard time and then * uses that value to retrieve the appropriate zone and DST offsets from the time zone . They * are then used to make the final wall time calculation . * < p > The DST offset will need clearing if the standard time is not a valid wall clock . See * { @ link # adjustDstOffsetForInvalidWallClock ( long , TimeZone , int ) } for more information . * @ param tzMask the set of time zone related fields , i . e . { @ link # ZONE _ OFFSET _ MASK } and * { @ link # DST _ OFFSET _ MASK } * @ param utcTimeInMillis the time in millis , calculated assuming the time zone was GMT . * @ param zone the actual time zone . * @ return the UTC time in millis after adjusting for zone and DST offset . */ private long adjustForZoneAndDaylightSavingsTime ( int tzMask , long utcTimeInMillis , TimeZone zone ) { } }
// The following don ' t actually need to be initialized because they are always set before // they are used but the compiler cannot detect that . int zoneOffset = 0 ; int dstOffset = 0 ; // If either of the ZONE _ OFFSET or DST _ OFFSET fields are not set then get the information // from the TimeZone . if ( tzMask != ( ZONE_OFFSET_MASK | DST_OFFSET_MASK ) ) { if ( zoneOffsets == null ) { zoneOffsets = new int [ 2 ] ; } int gmtOffset = isFieldSet ( tzMask , ZONE_OFFSET ) ? internalGet ( ZONE_OFFSET ) : zone . getRawOffset ( ) ; // Calculate the standard time ( no DST ) in the supplied zone . This is a ballpark figure // and not used in the final calculation as the offset used here may not be the same as // the actual offset the time zone requires be used for this time . This is to handle // situations like Honolulu , where its raw offset changed from GMT - 10:30 to GMT - 10:00 // in 1947 . The TimeZone always uses a raw offset of - 10:00 but will return - 10:30 // for dates before the change over . long standardTimeInZone = utcTimeInMillis - gmtOffset ; // Retrieve the correct zone and DST offsets from the time zone . // J2ObjC modified : Use NativeTimeZone instead of ZoneInfo . if ( zone instanceof NativeTimeZone ) { ( ( NativeTimeZone ) zone ) . getOffsetsByUtcTime ( standardTimeInZone , zoneOffsets ) ; } else { zone . getOffsets ( standardTimeInZone , zoneOffsets ) ; } zoneOffset = zoneOffsets [ 0 ] ; dstOffset = zoneOffsets [ 1 ] ; // If necessary adjust the DST offset to handle an invalid wall clock sensibly . dstOffset = adjustDstOffsetForInvalidWallClock ( standardTimeInZone , zone , dstOffset ) ; } // If either ZONE _ OFFSET of DST _ OFFSET fields are set then get the information from the // fields , potentially overriding information from the TimeZone . if ( tzMask != 0 ) { if ( isFieldSet ( tzMask , ZONE_OFFSET ) ) { zoneOffset = internalGet ( ZONE_OFFSET ) ; } if ( isFieldSet ( tzMask , DST_OFFSET ) ) { dstOffset = internalGet ( DST_OFFSET ) ; } } // Adjust the time zone offset values to get the UTC time . long standardTimeInZone = utcTimeInMillis - zoneOffset ; return standardTimeInZone - dstOffset ;
public class ZonedDateTime { /** * Returns a copy of this { @ code ZonedDateTime } with the specified number of nanoseconds subtracted . * This operates on the instant time - line , such that subtracting one nano will * always be a duration of one nano earlier . * This may cause the local date - time to change by an amount other than one nano . * Note that this is a different approach to that used by days , months and years . * This instance is immutable and unaffected by this method call . * @ param nanos the nanos to subtract , may be negative * @ return a { @ code ZonedDateTime } based on this date - time with the nanoseconds subtracted , not null * @ throws DateTimeException if the result exceeds the supported date range */ public ZonedDateTime minusNanos ( long nanos ) { } }
return ( nanos == Long . MIN_VALUE ? plusNanos ( Long . MAX_VALUE ) . plusNanos ( 1 ) : plusNanos ( - nanos ) ) ;
public class DefaultConfigurationFactory { /** * Creates default implementation of { @ link DiskCache } depends on incoming parameters */ public static DiskCache createDiskCache ( Context context , FileNameGenerator diskCacheFileNameGenerator , long diskCacheSize , int diskCacheFileCount ) { } }
File reserveCacheDir = createReserveDiskCacheDir ( context ) ; if ( diskCacheSize > 0 || diskCacheFileCount > 0 ) { File individualCacheDir = StorageUtils . getIndividualCacheDirectory ( context ) ; try { return new LruDiskCache ( individualCacheDir , reserveCacheDir , diskCacheFileNameGenerator , diskCacheSize , diskCacheFileCount ) ; } catch ( IOException e ) { L . e ( e ) ; // continue and create unlimited cache } } File cacheDir = StorageUtils . getCacheDirectory ( context ) ; return new UnlimitedDiskCache ( cacheDir , reserveCacheDir , diskCacheFileNameGenerator ) ;
public class MergeRequestApi { /** * Get a Pager of the participants of merge request . * < pre > < code > GitLab Endpoint : GET / projects / : id / merge _ requests / : merge _ request _ iid / participants < / code > < / pre > * @ param projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance * @ param mergeRequestIid the IID of the merge request to get * @ param itemsPerPage the number of Participant instances that will be fetched per page * @ return a Pager containing all participants for the specified merge request * @ throws GitLabApiException if any exception occurs */ public Pager < Participant > getParticipants ( Object projectIdOrPath , Integer mergeRequestIid , int itemsPerPage ) throws GitLabApiException { } }
return new Pager < Participant > ( this , Participant . class , itemsPerPage , null , "projects" , getProjectIdOrPath ( projectIdOrPath ) , "merge_requests" , mergeRequestIid , "participants" ) ;
public class M4AReader { /** * Search through the frames by offset / position to find the sample . * @ param pos * @ return frame index */ private int getFrame ( long pos ) { } }
int sample = 1 ; int len = frames . size ( ) ; MP4Frame frame = null ; for ( int f = 0 ; f < len ; f ++ ) { frame = frames . get ( f ) ; if ( pos == frame . getOffset ( ) ) { sample = f ; break ; } } return sample ;
public class Document { /** * 获取复杂句子列表 , 句子中的每个单词有可能是复合词 , 有可能是简单词 * @ return */ public List < List < IWord > > getComplexSentenceList ( ) { } }
List < List < IWord > > complexList = new LinkedList < List < IWord > > ( ) ; for ( Sentence sentence : sentenceList ) { complexList . add ( sentence . wordList ) ; } return complexList ;
public class KeyGroupRangeOffsets { /** * Returns a key - group range with offsets which is the intersection of the internal key - group range with the given * key - group range . * @ param keyGroupRange Key - group range to intersect with the internal key - group range . * @ return The key - group range with offsets for the intersection of the internal key - group range with the given * key - group range . */ public KeyGroupRangeOffsets getIntersection ( KeyGroupRange keyGroupRange ) { } }
Preconditions . checkNotNull ( keyGroupRange ) ; KeyGroupRange intersection = this . keyGroupRange . getIntersection ( keyGroupRange ) ; long [ ] subOffsets = new long [ intersection . getNumberOfKeyGroups ( ) ] ; if ( subOffsets . length > 0 ) { System . arraycopy ( offsets , computeKeyGroupIndex ( intersection . getStartKeyGroup ( ) ) , subOffsets , 0 , subOffsets . length ) ; } return new KeyGroupRangeOffsets ( intersection , subOffsets ) ;
public class Chronology { /** * Casts the { @ code Temporal } to { @ code ChronoLocalDateTime } with the same chronology . * @ param temporal a date - time to cast , not null * @ return the date - time checked and cast to { @ code ChronoLocalDateTime } , not null * @ throws ClassCastException if the date - time cannot be cast to ChronoLocalDateTimeImpl * or the chronology is not equal this Chrono */ < D extends ChronoLocalDate > ChronoLocalDateTimeImpl < D > ensureChronoLocalDateTime ( Temporal temporal ) { } }
@ SuppressWarnings ( "unchecked" ) ChronoLocalDateTimeImpl < D > other = ( ChronoLocalDateTimeImpl < D > ) temporal ; if ( this . equals ( other . toLocalDate ( ) . getChronology ( ) ) == false ) { throw new ClassCastException ( "Chrono mismatch, required: " + getId ( ) + ", supplied: " + other . toLocalDate ( ) . getChronology ( ) . getId ( ) ) ; } return other ;
public class XPathUtil { /** * Creates a string representation of the dom node . * NOTE : The string can be formatted and indented with a specified indent size , but be aware that this is depending on a Xalan implementation of the XSLT library . * @ param node * @ param indentXml * @ param indentSize * @ return */ public static String getXml ( Node node , boolean indentXml , int indentSize ) { } }
try { TransformerFactory tf = TransformerFactory . newInstance ( ) ; Transformer transformer ; if ( indentXml ) { transformer = tf . newTransformer ( ) ; transformer . setOutputProperty ( OutputKeys . INDENT , "yes" ) ; transformer . setOutputProperty ( "{http://xml.apache.org/xslt}indent-amount" , new Integer ( indentSize ) . toString ( ) ) ; } else { transformer = tf . newTransformer ( new StreamSource ( XPathUtil . class . getResourceAsStream ( "remove-whitespace.xsl" ) ) ) ; } // initialize StreamResult with File object to save to file StreamResult result = new StreamResult ( new StringWriter ( ) ) ; DOMSource source = new DOMSource ( node ) ; transformer . transform ( source , result ) ; String xmlString = result . getWriter ( ) . toString ( ) ; return xmlString ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
public class CmsLogFileApp { /** * Adds the download button . * @ param view layout which displays the log file */ private void addDownloadButton ( final CmsLogFileView view ) { } }
Button button = CmsToolBar . createButton ( FontOpenCms . DOWNLOAD , CmsVaadinUtils . getMessageText ( Messages . GUI_LOGFILE_DOWNLOAD_0 ) ) ; button . addClickListener ( new ClickListener ( ) { private static final long serialVersionUID = 1L ; public void buttonClick ( ClickEvent event ) { Window window = CmsBasicDialog . prepareWindow ( CmsBasicDialog . DialogWidth . wide ) ; window . setCaption ( CmsVaadinUtils . getMessageText ( Messages . GUI_LOGFILE_DOWNLOAD_0 ) ) ; window . setContent ( new CmsLogDownloadDialog ( window , view . getCurrentFile ( ) ) ) ; A_CmsUI . get ( ) . addWindow ( window ) ; } } ) ; m_uiContext . addToolbarButton ( button ) ;
public class Expression { /** * Execute an expression in the XPath runtime context , and return the * result of the expression . * @ param xctxt The XPath runtime context . * @ param currentNode The currentNode . * @ param dtm The DTM of the current node . * @ param expType The expanded type ID of the current node . * @ return The result of the expression in the form of a < code > XObject < / code > . * @ throws javax . xml . transform . TransformerException if a runtime exception * occurs . */ public XObject execute ( XPathContext xctxt , int currentNode , DTM dtm , int expType ) throws javax . xml . transform . TransformerException { } }
// For now , the current node is already pushed . return execute ( xctxt ) ;