signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class PlatformDescription { /** * The programming languages supported by the platform .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setProgrammingLanguages ( java . util . Collection ) } or { @ link # withProgrammingLanguages ( java . util . Collection ) }
* if you want to override the existing values .
* @ param programmingLanguages
* The programming languages supported by the platform .
* @ return Returns a reference to this object so that method calls can be chained together . */
public PlatformDescription withProgrammingLanguages ( PlatformProgrammingLanguage ... programmingLanguages ) { } } | if ( this . programmingLanguages == null ) { setProgrammingLanguages ( new com . amazonaws . internal . SdkInternalList < PlatformProgrammingLanguage > ( programmingLanguages . length ) ) ; } for ( PlatformProgrammingLanguage ele : programmingLanguages ) { this . programmingLanguages . add ( ele ) ; } return this ; |
public class KeyedDataStorage { /** * Atomically gets the existing data object if any , or if it doesn ' t exist
* { @ link # create ( String , Object ) create } it and return it .
* @ return
* Item with the specified { @ code key } .
* @ param createParams
* Additional parameters needed to create a new data object . Can be null .
* @ throws IOException Loading error */
public @ Nonnull T getOrCreate ( String key , P createParams ) throws IOException { } } | return get ( key , true , createParams ) ; |
public class EdgeList { /** * Check whether the edge type of the { @ link DataSet } is { @ link NullValue } .
* @ param edges data set for introspection
* @ param < T > graph ID type
* @ param < ET > edge value type
* @ return whether the edge type of the { @ link DataSet } is { @ link NullValue } */
private static < T , ET > boolean hasNullValueEdges ( DataSet < Edge < T , ET > > edges ) { } } | TypeInformation < ? > genericTypeInfo = edges . getType ( ) ; @ SuppressWarnings ( "unchecked" ) TupleTypeInfo < Tuple3 < T , T , ET > > tupleTypeInfo = ( TupleTypeInfo < Tuple3 < T , T , ET > > ) genericTypeInfo ; return tupleTypeInfo . getTypeAt ( 2 ) . equals ( ValueTypeInfo . NULL_VALUE_TYPE_INFO ) ; |
public class CommonG { /** * Check is a String is a valid timestamp format
* @ param dateToValidate
* @ param dateFromat
* @ return true / false */
private boolean isThisDateValid ( String dateToValidate , String dateFromat ) { } } | if ( dateToValidate == null ) { return false ; } SimpleDateFormat sdf = new SimpleDateFormat ( dateFromat ) ; sdf . setLenient ( false ) ; try { // if not valid , it will throw ParseException
Date date = sdf . parse ( dateToValidate ) ; } catch ( ParseException e ) { e . printStackTrace ( ) ; return false ; } return true ; |
public class LogObjectPrinter { /** * Gets join point input params description string .
* @ param joinPoint aspect join point
* @ param includeParamNames input parameters names to be printed . NOTE ! can be overridden with @ { @ link
* LoggingAspectConfig }
* @ return join point input params description string */
public static String printInputParams ( JoinPoint joinPoint , String ... includeParamNames ) { } } | try { if ( joinPoint == null ) { return "joinPoint is null" ; } Signature signature = joinPoint . getSignature ( ) ; if ( ! ( signature instanceof MethodSignature ) ) { return PRINT_EMPTY_LIST ; } Optional < LoggingAspectConfig > config = AopAnnotationUtils . getConfigAnnotation ( joinPoint ) ; String [ ] includeParams = includeParamNames ; String [ ] excludeParams = EMPTY_ARRAY ; boolean inputCollectionAware = LoggingAspectConfig . DEFAULT_INPUT_COLLECTION_AWARE ; if ( config . isPresent ( ) ) { if ( ! config . get ( ) . inputDetails ( ) ) { return PRINT_HIDDEN ; } inputCollectionAware = config . get ( ) . inputCollectionAware ( ) ; if ( ArrayUtils . isNotEmpty ( config . get ( ) . inputIncludeParams ( ) ) ) { includeParams = config . get ( ) . inputIncludeParams ( ) ; } if ( ArrayUtils . isEmpty ( includeParams ) && ArrayUtils . isNotEmpty ( config . get ( ) . inputExcludeParams ( ) ) ) { excludeParams = config . get ( ) . inputExcludeParams ( ) ; } } MethodSignature ms = ( MethodSignature ) signature ; String [ ] params = ms . getParameterNames ( ) ; return ArrayUtils . isNotEmpty ( params ) ? renderParams ( joinPoint , params , includeParams , excludeParams , inputCollectionAware ) : PRINT_EMPTY_LIST ; } catch ( IndexOutOfBoundsException | IllegalArgumentException e ) { log . warn ( "Error while print params: {}, params = {}" , e , joinPoint . getArgs ( ) ) ; return "printerror: " + e ; } |
public class SecurityPhaseListener { /** * Enforce any security annotations applicable to the InvokeApplication phase
* @ param event */
public void observeInvokeApplication ( @ Observes @ Before @ InvokeApplication PhaseEvent event ) { } } | log . debug ( "Before Render Response event" ) ; performObservation ( event , PhaseIdType . INVOKE_APPLICATION ) ; |
public class DescribeRulesPackagesRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DescribeRulesPackagesRequest describeRulesPackagesRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( describeRulesPackagesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeRulesPackagesRequest . getRulesPackageArns ( ) , RULESPACKAGEARNS_BINDING ) ; protocolMarshaller . marshall ( describeRulesPackagesRequest . getLocale ( ) , LOCALE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ModelSqlUtils { /** * 从对象中获取delete语句
* @ param po
* @ return
* @ throws Exception */
public static SqlParamsPairs getDeleteFromObject ( Object po ) throws Exception { } } | // 用来存放insert语句
StringBuffer deleteSql = new StringBuffer ( ) ; // 用来存储id
Object idValue = null ; // 分析表名
String tableName = getTableName ( po . getClass ( ) ) ; deleteSql . append ( "delete from " + tableName + " where " ) ; Class clazz = po . getClass ( ) ; // 分析列
Field [ ] fields = clazz . getDeclaredFields ( ) ; // 用于寻找id字段
Id idAnno = null ; for ( int i = 0 ; i < fields . length ; i ++ ) { Field f = fields [ i ] ; // 找id字段
Method getter = getGetter ( clazz , f ) ; if ( getter == null ) { // 没有get方法直接跳过
continue ; } // 看是不是主键
idAnno = getter . getAnnotation ( Id . class ) ; if ( idAnno == null ) { continue ; } // 看有没有定义column
String columnName = getColumnNameFromGetter ( getter , f ) ; deleteSql . append ( columnName + " = ?" ) ; idValue = getter . invoke ( po , new Object [ ] { } ) ; break ; } // 全部遍历完如果找不到主键就抛异常
if ( idAnno == null ) { throw new NoIdAnnotationFoundException ( clazz ) ; } SqlParamsPairs sqlAndParams = new SqlParamsPairs ( deleteSql . toString ( ) , new Object [ ] { idValue } ) ; logger . debug ( sqlAndParams . toString ( ) ) ; return sqlAndParams ; |
public class InstanceTarget { /** * The lifecycle events of the deployment to this target instance .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setLifecycleEvents ( java . util . Collection ) } or { @ link # withLifecycleEvents ( java . util . Collection ) } if you
* want to override the existing values .
* @ param lifecycleEvents
* The lifecycle events of the deployment to this target instance .
* @ return Returns a reference to this object so that method calls can be chained together . */
public InstanceTarget withLifecycleEvents ( LifecycleEvent ... lifecycleEvents ) { } } | if ( this . lifecycleEvents == null ) { setLifecycleEvents ( new com . amazonaws . internal . SdkInternalList < LifecycleEvent > ( lifecycleEvents . length ) ) ; } for ( LifecycleEvent ele : lifecycleEvents ) { this . lifecycleEvents . add ( ele ) ; } return this ; |
public class Utils { /** * Returns the host part of the URL .
* @ param url a URL
* @ return just the host */
public static String getHostFromURL ( String url ) { } } | URL u = toURL ( url ) ; String host = ( u == null ) ? "" : u . getHost ( ) ; return host ; |
public class MavenUtil { /** * get maven settings */
static MavenSettings getMavenSettings ( ) { } } | try { String homeDir = System . getProperty ( "user.home" ) ; return parseMavenSettings ( new File ( homeDir , ".m2/settings.xml" ) ) ; } catch ( Exception e ) { log ( e ) ; } return new MavenSettings ( ) ; |
public class KuduDBSchemaManager { /** * Alter column .
* @ param alterTableOptions
* the alter table options
* @ param schema
* the schema
* @ param columnInfo
* the column info
* @ param updated
* the updated */
private void alterColumn ( AlterTableOptions alterTableOptions , Schema schema , ColumnInfo columnInfo , AtomicBoolean updated ) { } } | if ( ! KuduDBDataHandler . hasColumn ( schema , columnInfo . getColumnName ( ) ) ) { // add if column is not in schema
alterTableOptions . addNullableColumn ( columnInfo . getColumnName ( ) , KuduDBValidationClassMapper . getValidTypeForClass ( columnInfo . getType ( ) ) ) ; updated . set ( true ) ; } else { // check for type , drop and add if not consistent TODO : throw
// exception or override ?
if ( ! schema . getColumn ( columnInfo . getColumnName ( ) ) . getType ( ) . equals ( KuduDBValidationClassMapper . getValidTypeForClass ( columnInfo . getType ( ) ) ) ) { alterTableOptions . dropColumn ( columnInfo . getColumnName ( ) ) ; alterTableOptions . addNullableColumn ( columnInfo . getColumnName ( ) , KuduDBValidationClassMapper . getValidTypeForClass ( columnInfo . getType ( ) ) ) ; updated . set ( true ) ; } } |
public class FailOverListenerPriorityComparator { /** * / * ( non - Javadoc )
* @ see java . util . Comparator # compare ( java . lang . Object , java . lang . Object ) */
public int compare ( FailOverListener o1 , FailOverListener o2 ) { } } | if ( o1 . equals ( o2 ) ) { return 0 ; } else { if ( o1 . getPriority ( ) > o2 . getPriority ( ) ) { return - 1 ; } else { return 1 ; } } |
public class Enforcer { /** * deletePermissionForUser deletes a permission for a user or role .
* Returns false if the user or role does not have the permission ( aka not affected ) .
* @ param user the user .
* @ param permission the permission , usually be ( obj , act ) . It is actually the rule without the subject .
* @ return succeeds or not . */
public boolean deletePermissionForUser ( String user , List < String > permission ) { } } | return deletePermissionForUser ( user , permission . toArray ( new String [ 0 ] ) ) ; |
public class ns_ns_ip { /** * Use this API to fetch filtered set of ns _ ns _ ip resources .
* filter string should be in JSON format . eg : " vm _ state : DOWN , name : [ a - z ] + " */
public static ns_ns_ip [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } } | ns_ns_ip obj = new ns_ns_ip ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; ns_ns_ip [ ] response = ( ns_ns_ip [ ] ) obj . getfiltered ( service , option ) ; return response ; |
public class ArrayFile { /** * Resets this ArrayFile with the specified long array .
* @ param longArray - the long array .
* @ throws IOException */
public synchronized void reset ( MemoryLongArray longArray ) throws IOException { } } | _writer . flush ( ) ; _writer . position ( DATA_START_POSITION ) ; for ( int i = 0 , cnt = longArray . length ( ) ; i < cnt ; i ++ ) { _writer . writeLong ( longArray . get ( i ) ) ; } _writer . flush ( ) ; |
public class Vector4f { /** * / * ( non - Javadoc )
* @ see org . joml . Vector4fc # negate ( org . joml . Vector4f ) */
public Vector4f negate ( Vector4f dest ) { } } | dest . x = - x ; dest . y = - y ; dest . z = - z ; dest . w = - w ; return dest ; |
public class CaffeineConfiguration { /** * Set the { @ link Factory } for the { @ link Weigher } .
* @ param factory the { @ link Weigher } { @ link Factory } */
@ SuppressWarnings ( "unchecked" ) public void setWeigherFactory ( Optional < Factory < ? extends Weigher < K , V > > > factory ) { } } | weigherFactory = ( Factory < Weigher < K , V > > ) factory . orElse ( null ) ; |
public class CmsDomUtil { /** * This method will create an { @ link com . google . gwt . dom . client . Element } for the given HTML .
* The HTML should have a single root tag , if not , the first tag will be used and all others discarded . < p >
* Script - tags will be removed . < p >
* @ param html the HTML to use for the element
* @ return the created element
* @ throws Exception if something goes wrong */
public static Element createElement ( String html ) throws Exception { } } | Element wrapperDiv = DOM . createDiv ( ) ; wrapperDiv . setInnerHTML ( html ) ; Element elementRoot = wrapperDiv . getFirstChildElement ( ) ; wrapperDiv . removeChild ( elementRoot ) ; // just in case we have a script tag outside the root HTML - tag
while ( ( elementRoot != null ) && ( elementRoot . getTagName ( ) . toLowerCase ( ) . equals ( Tag . script . name ( ) ) ) ) { elementRoot = wrapperDiv . getFirstChildElement ( ) ; wrapperDiv . removeChild ( elementRoot ) ; } if ( elementRoot == null ) { CmsDebugLog . getInstance ( ) . printLine ( "Could not create element as the given HTML has no appropriate root element" ) ; throw new IllegalArgumentException ( "Could not create element as the given HTML has no appropriate root element" ) ; } return elementRoot ; |
public class ImgCompressUtils { /** * 根据宽或者高和指定压缩质量进行等比例压缩 , 注意如果指定的宽或者高大于源图片的高或者宽 ,
* 那么压缩图片将直接使用源图片的宽高 。
* 注 : 若isForceWh为true , 不论如何均按照指定宽高进行等比例压缩
* @ param srcStream 源图片输入流
* @ param base 指定压缩后图片的宽或者高
* @ param wh 此参数用于指定base参数是宽还是高 , 该参数应由 { @ link ImgCompressUtils } 里的
* 静态常量指定
* @ param quality 指定压缩质量 , 范围 [ 0.0,1.0 ] , 如果指定为null则按照默认值
* @ param isForceWh 指定是否强制使用指定宽高进行等比例压缩 , true代表强制 , false反之
* @ return 返回压缩后的图像对象 */
public static BufferedImage imgCompressByScale ( InputStream srcStream , double base , int wh , Float quality , boolean isForceWh ) { } } | BufferedImage bufferedImage = null ; try { bufferedImage = ImgCompressUtils . imgCompressByScale ( ImageIO . read ( srcStream ) , base , wh , quality , isForceWh ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } return bufferedImage ; |
public class DimensionProperties { /** * Gets the levelOfDetail value for this DimensionProperties .
* @ return levelOfDetail */
public com . google . api . ads . adwords . axis . v201809 . cm . LevelOfDetail getLevelOfDetail ( ) { } } | return levelOfDetail ; |
public class AWSStorageGatewayClient { /** * Configures one or more gateway local disks as upload buffer for a specified gateway . This operation is supported
* for the stored volume , cached volume and tape gateway types .
* In the request , you specify the gateway Amazon Resource Name ( ARN ) to which you want to add upload buffer , and
* one or more disk IDs that you want to configure as upload buffer .
* @ param addUploadBufferRequest
* @ return Result of the AddUploadBuffer operation returned by the service .
* @ throws InvalidGatewayRequestException
* An exception occurred because an invalid gateway request was issued to the service . For more information ,
* see the error and message fields .
* @ throws InternalServerErrorException
* An internal server error has occurred during the request . For more information , see the error and message
* fields .
* @ sample AWSStorageGateway . AddUploadBuffer
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / storagegateway - 2013-06-30 / AddUploadBuffer " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public AddUploadBufferResult addUploadBuffer ( AddUploadBufferRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeAddUploadBuffer ( request ) ; |
public class KbRuntimeException { /** * Converts a Throwable to a KbRuntimeException . If the Throwable is a
* KbRuntimeException , it will be passed through unmodified ; otherwise , it will be wrapped
* in a new KbRuntimeException .
* @ param cause the Throwable to convert
* @ return a KbRuntimeException */
public static KbRuntimeException fromThrowable ( Throwable cause ) { } } | return ( cause instanceof KbRuntimeException ) ? ( KbRuntimeException ) cause : new KbRuntimeException ( cause ) ; |
public class XsdAnnotationProcessor { /** * { @ inheritDoc } */
@ Override public boolean accept ( final Node aNode ) { } } | // Only deal with Element nodes with " name " attributes .
if ( ! DomHelper . isNamedElement ( aNode ) ) { return false ; } /* < xs : complexType name = " somewhatNamedPerson " >
< ! - - ClassLocation JavaDocData insertion point - - >
< xs : sequence >
< ! - - FieldLocation or MethodLocation JavaDocData insertion point ( within child ) - - >
< xs : element name = " firstName " type = " xs : string " nillable = " true " minOccurs = " 0 " / >
< ! - - FieldLocation or MethodLocation JavaDocData insertion point ( within child ) - - >
< xs : element name = " lastName " type = " xs : string " / >
< / xs : sequence >
< ! - - FieldLocation or MethodLocation JavaDocData insertion point ( within child ) - - >
< xs : attribute name = " age " type = " xs : int " use = " required " / >
< / xs : complexType > */
// Only process nodes corresponding to Types we have any JavaDoc for .
// TODO : How should we handle PackageLocations and package documentation ?
boolean toReturn = false ; if ( DomHelper . getMethodLocation ( aNode , methodJavaDocs . keySet ( ) ) != null ) { toReturn = true ; } else if ( DomHelper . getFieldLocation ( aNode , fieldJavaDocs . keySet ( ) ) != null ) { toReturn = true ; } else if ( DomHelper . getClassLocation ( aNode , classJavaDocs . keySet ( ) ) != null ) { toReturn = true ; } // All done .
return toReturn ; |
public class CryptoPrimitives { /** * addCACertificateToTrustStore adds a CA cert to the set of certificates used for signature validation
* @ param caCert an X . 509 certificate
* @ param alias an alias associated with the certificate . Used as shorthand for the certificate during crypto operations
* @ throws CryptoException
* @ throws InvalidArgumentException */
void addCACertificateToTrustStore ( Certificate caCert , String alias ) throws InvalidArgumentException , CryptoException { } } | if ( alias == null || alias . isEmpty ( ) ) { throw new InvalidArgumentException ( "You must assign an alias to a certificate when adding to the trust store." ) ; } if ( caCert == null ) { throw new InvalidArgumentException ( "Certificate cannot be null." ) ; } try { if ( config . extraLogLevel ( 10 ) ) { if ( null != diagnosticFileDumper ) { logger . trace ( format ( "Adding cert to trust store. alias: %s. certificate:" , alias ) + diagnosticFileDumper . createDiagnosticFile ( alias + "cert: " + caCert . toString ( ) ) ) ; } } synchronized ( certificateSet ) { if ( certificateSet . contains ( alias ) ) { return ; } getTrustStore ( ) . setCertificateEntry ( alias , caCert ) ; certificateSet . add ( alias ) ; } } catch ( KeyStoreException e ) { String emsg = "Unable to add CA certificate to trust store. Error: " + e . getMessage ( ) ; logger . error ( emsg , e ) ; throw new CryptoException ( emsg , e ) ; } |
public class ClassUtils { /** * Resolves the { @ link Class } type of { @ link Type } .
* @ param type { @ link Type } to resolve as a { @ link Class } .
* @ return the resolved { @ link Class } type { @ link Type } .
* @ throws IllegalArgumentException if the given { @ link Type } cannot be resolved as a { @ link Class } type .
* @ see java . lang . reflect . ParameterizedType
* @ see java . lang . reflect . Type */
@ NullSafe public static Class < ? > toRawType ( Type type ) { } } | Type resolvedType = type instanceof ParameterizedType ? ( ( ParameterizedType ) type ) . getRawType ( ) : type instanceof TypeVariable ? safeGetValue ( ( ) -> loadClass ( ( ( TypeVariable ) type ) . getName ( ) ) , Object . class ) : type ; return Class . class . cast ( Optional . ofNullable ( resolvedType ) . filter ( it -> it instanceof Class ) . orElseThrow ( ( ) -> newIllegalArgumentException ( "[%1$s] is not resolvable as a %2$s" , type , Class . class . getName ( ) ) ) ) ; |
public class CommerceCurrencyPersistenceImpl { /** * Returns the number of commerce currencies where groupId = & # 63 ; and primary = & # 63 ; and active = & # 63 ; .
* @ param groupId the group ID
* @ param primary the primary
* @ param active the active
* @ return the number of matching commerce currencies */
@ Override public int countByG_P_A ( long groupId , boolean primary , boolean active ) { } } | FinderPath finderPath = FINDER_PATH_COUNT_BY_G_P_A ; Object [ ] finderArgs = new Object [ ] { groupId , primary , active } ; Long count = ( Long ) finderCache . getResult ( finderPath , finderArgs , this ) ; if ( count == null ) { StringBundler query = new StringBundler ( 4 ) ; query . append ( _SQL_COUNT_COMMERCECURRENCY_WHERE ) ; query . append ( _FINDER_COLUMN_G_P_A_GROUPID_2 ) ; query . append ( _FINDER_COLUMN_G_P_A_PRIMARY_2 ) ; query . append ( _FINDER_COLUMN_G_P_A_ACTIVE_2 ) ; String sql = query . toString ( ) ; Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( sql ) ; QueryPos qPos = QueryPos . getInstance ( q ) ; qPos . add ( groupId ) ; qPos . add ( primary ) ; qPos . add ( active ) ; count = ( Long ) q . uniqueResult ( ) ; finderCache . putResult ( finderPath , finderArgs , count ) ; } catch ( Exception e ) { finderCache . removeResult ( finderPath , finderArgs ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return count . intValue ( ) ; |
public class XmlUtil { /** * Runs the given xpath and returns a { @ link String } result . */
public static String xpathExtract ( Node document , String xpathExpression , Map < String , String > namespaceMapping ) throws XPathException , MarshallingException { } } | return ( String ) executeXPath ( document , xpathExpression , namespaceMapping , XPathConstants . STRING ) ; |
public class BlueGreenDeploymentConfigurationMarshaller { /** * Marshall the given parameter object . */
public void marshall ( BlueGreenDeploymentConfiguration blueGreenDeploymentConfiguration , ProtocolMarshaller protocolMarshaller ) { } } | if ( blueGreenDeploymentConfiguration == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( blueGreenDeploymentConfiguration . getTerminateBlueInstancesOnDeploymentSuccess ( ) , TERMINATEBLUEINSTANCESONDEPLOYMENTSUCCESS_BINDING ) ; protocolMarshaller . marshall ( blueGreenDeploymentConfiguration . getDeploymentReadyOption ( ) , DEPLOYMENTREADYOPTION_BINDING ) ; protocolMarshaller . marshall ( blueGreenDeploymentConfiguration . getGreenFleetProvisioningOption ( ) , GREENFLEETPROVISIONINGOPTION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ListInputSecurityGroupsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListInputSecurityGroupsRequest listInputSecurityGroupsRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( listInputSecurityGroupsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listInputSecurityGroupsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listInputSecurityGroupsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class FlowPath { /** * This method returns TRUE if specified node was already executed during current pass , FALSE otherwise
* @ param nodeName
* @ return */
public boolean wasExecuted ( @ NonNull String nodeName ) { } } | ensureNodeStateExists ( nodeName ) ; return states . get ( nodeName ) . isExecuted ( ) ; |
public class AbstractDocBuilder { /** * get the modification history of the method . */
protected ModificationHistory getModificationHistory ( MethodDoc methodDoc ) { } } | ModificationHistory history = new ModificationHistory ( ) ; history . addModificationRecords ( this . parseModificationRecords ( methodDoc . tags ( ) ) ) ; return history ; |
public class CreateProposalLineItemsFromPackages { /** * Runs the example .
* @ param adManagerServices the services factory .
* @ param session the session .
* @ param packageId the ID of the package to create line items from .
* @ throws ApiException if the API request failed with one or more service errors .
* @ throws RemoteException if the API request failed due to other errors . */
public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session , long packageId ) throws RemoteException { } } | // Get the PackageService .
PackageServiceInterface packageService = adManagerServices . get ( session , PackageServiceInterface . class ) ; // Create a statement to select a single package .
StatementBuilder statementBuilder = new StatementBuilder ( ) . where ( "id = :id" ) . orderBy ( "id ASC" ) . limit ( 1 ) . withBindVariableValue ( "id" , packageId ) ; // Get the package .
PackagePage page = packageService . getPackagesByStatement ( statementBuilder . toStatement ( ) ) ; Package pkg = Iterables . getOnlyElement ( Arrays . asList ( page . getResults ( ) ) ) ; System . out . printf ( "Package with ID %d will create proposal line items using" + " product package with ID %d.%n" , pkg . getId ( ) , pkg . getProductPackageId ( ) ) ; // Remove limit and offset from statement .
statementBuilder . removeLimitAndOffset ( ) ; // Create action to activate packages .
com . google . api . ads . admanager . axis . v201808 . CreateProposalLineItemsFromPackages action = new com . google . api . ads . admanager . axis . v201808 . CreateProposalLineItemsFromPackages ( ) ; // Perform action .
UpdateResult result = packageService . performPackageAction ( action , statementBuilder . toStatement ( ) ) ; if ( result != null && result . getNumChanges ( ) > 0 ) { System . out . printf ( "Number of packages proposal line items were created for: %d%n" , result . getNumChanges ( ) ) ; } else { System . out . println ( "No proposal line items were created." ) ; } |
public class SynthesisReportGui { /** * Present data in javax . swing . table . DefaultTableModel form .
* @ param model { @ link ObjectTableModel }
* @ param formats Array of { @ link Format } array can contain null formatters in this case value is added as is
* @ param columns Columns headers
* @ return data in table form */
public static DefaultTableModel getAllDataAsTable ( ObjectTableModel model , Format [ ] formats , String [ ] columns ) { } } | final List < List < Object > > table = getAllTableData ( model , formats ) ; final DefaultTableModel tableModel = new DefaultTableModel ( ) ; for ( String header : columns ) { tableModel . addColumn ( header ) ; } for ( List < Object > row : table ) { tableModel . addRow ( new Vector ( row ) ) ; } return tableModel ; |
public class DescribeServicesRequest { /** * A JSON - formatted list of service codes available for AWS services .
* @ return A JSON - formatted list of service codes available for AWS services . */
public java . util . List < String > getServiceCodeList ( ) { } } | if ( serviceCodeList == null ) { serviceCodeList = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return serviceCodeList ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link PolygonPatchType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link PolygonPatchType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "PolygonPatch" , substitutionHeadNamespace = "http://www.opengis.net/gml" , substitutionHeadName = "_SurfacePatch" ) public JAXBElement < PolygonPatchType > createPolygonPatch ( PolygonPatchType value ) { } } | return new JAXBElement < PolygonPatchType > ( _PolygonPatch_QNAME , PolygonPatchType . class , null , value ) ; |
public class WDataListServletExample { /** * Adds a field to the example ' s layout .
* @ param input the input field to add .
* @ param labelText the label text for the field .
* @ param labelHint the optional label hint for the field . */
private void addField ( final WComponent input , final String labelText , final String labelHint ) { } } | WLabel label = new WLabel ( labelText , input ) ; if ( labelHint != null ) { label . setHint ( labelHint ) ; } layout . addField ( label , input ) ; |
public class BsThumbnailQueueCA { public void filter ( String name , EsAbstractConditionQuery . OperatorCall < BsThumbnailQueueCQ > queryLambda , ConditionOptionCall < FilterAggregationBuilder > opLambda , OperatorCall < BsThumbnailQueueCA > aggsLambda ) { } } | ThumbnailQueueCQ cq = new ThumbnailQueueCQ ( ) ; if ( queryLambda != null ) { queryLambda . callback ( cq ) ; } FilterAggregationBuilder builder = regFilterA ( name , cq . getQuery ( ) ) ; if ( opLambda != null ) { opLambda . callback ( builder ) ; } if ( aggsLambda != null ) { ThumbnailQueueCA ca = new ThumbnailQueueCA ( ) ; aggsLambda . callback ( ca ) ; ca . getAggregationBuilderList ( ) . forEach ( builder :: subAggregation ) ; } |
public class SimpleMutableDateTime { /** * Creates SimpleMutableDateTime and initializes it using given clock .
* @ param clock
* @ return */
public static final SimpleMutableDateTime now ( Clock clock ) { } } | ZonedDateTime zdt = ZonedDateTime . now ( clock ) ; SimpleMutableDateTime smt = SimpleMutableDateTime . from ( zdt ) ; return smt ; |
public class Crossing { /** * Sorts a bound array . */
protected static void sortBound ( double [ ] bound , int bc ) { } } | for ( int i = 0 ; i < bc - 4 ; i += 4 ) { int k = i ; for ( int j = i + 4 ; j < bc ; j += 4 ) { if ( bound [ k ] > bound [ j ] ) { k = j ; } } if ( k != i ) { double tmp = bound [ i ] ; bound [ i ] = bound [ k ] ; bound [ k ] = tmp ; tmp = bound [ i + 1 ] ; bound [ i + 1 ] = bound [ k + 1 ] ; bound [ k + 1 ] = tmp ; tmp = bound [ i + 2 ] ; bound [ i + 2 ] = bound [ k + 2 ] ; bound [ k + 2 ] = tmp ; tmp = bound [ i + 3 ] ; bound [ i + 3 ] = bound [ k + 3 ] ; bound [ k + 3 ] = tmp ; } } |
public class StorageWriter { /** * Flushes eligible operations to Storage , if necessary . Does not perform any mergers . */
private CompletableFuture < Void > flush ( Void ignored ) { } } | checkRunning ( ) ; long traceId = LoggerHelpers . traceEnterWithContext ( log , this . traceObjectId , "flush" ) ; // Flush everything we can flush .
val flushFutures = this . processors . values ( ) . stream ( ) . filter ( ProcessorCollection :: mustFlush ) . map ( a -> a . flush ( this . config . getFlushTimeout ( ) ) ) . collect ( Collectors . toList ( ) ) ; return Futures . allOfWithResults ( flushFutures ) . thenAcceptAsync ( flushResults -> { FlushStageResult result = new FlushStageResult ( ) ; flushResults . forEach ( result :: withFlushResult ) ; if ( result . getFlushedBytes ( ) + result . getMergedBytes ( ) + result . count > 0 ) { logStageEvent ( "Flush" , result ) ; } LoggerHelpers . traceLeave ( log , this . traceObjectId , "flush" , traceId ) ; } , this . executor ) ; |
public class UUtils { /** * Enable the specified debug level messages to be output . Note that both
* this Logger and whatever Handler you use , have to be set to enable the
* required log level for the handler to output the messages . If this UBench
* code is logging ' stand alone ' then this method will also change the
* output level of the log handlers .
* @ param level The log level to activate for future log levels . */
public static void setLogLevel ( Level level ) { } } | // all other ubench loggers inherit from here .
LOGGER . finer ( "Changing logging from " + LOGGER . getLevel ( ) ) ; LOGGER . setLevel ( level ) ; if ( ! LOGGER . getUseParentHandlers ( ) ) { LOGGER . setLevel ( level ) ; Stream . of ( LOGGER . getHandlers ( ) ) . forEach ( h -> h . setLevel ( level ) ) ; } LOGGER . finer ( "Changed logging to " + LOGGER . getLevel ( ) ) ; |
public class ViewHandler { /** * Clean up the query states after all rows have been consumed . */
private void cleanupViewStates ( ) { } } | completeRequestSpan ( currentRequest ( ) ) ; finishedDecoding ( ) ; viewInfoObservable = null ; viewRowObservable = null ; viewErrorObservable = null ; viewParsingState = QUERY_STATE_INITIAL ; |
public class JavadocResolver { /** * Return JavaDoc URL for class .
* @ param className Class name to find JavaDoc for .
* @ return JavaDoc URL for class . */
private String resolveDocByClass ( String className ) { } } | String urlBase = doc . getProperty ( className ) ; if ( urlBase != null ) { return urlBase + className . replace ( "." , "/" ) + ".html" ; } return null ; |
public class WeakHashMap { /** * Removes all of the mappings from this map .
* The map will be empty after this call returns . */
public void clear ( ) { } } | // clear out ref queue . We don ' t need to expunge entries
// since table is getting cleared .
while ( queue . poll ( ) != null ) ; modCount ++ ; Arrays . fill ( table , null ) ; size = 0 ; // Allocation of array may have caused GC , which may have caused
// additional entries to go stale . Removing these entries from the
// reference queue will make them eligible for reclamation .
while ( queue . poll ( ) != null ) ; |
public class SqsManager { /** * Check whether < code > ctSource < / code > contains CloudTrail log files .
* @ param ctSource a { @ link CloudTrailSource } .
* @ return < code > true < / code > if contains CloudTrail log files , < code > false < / code > otherwise . */
private boolean containsCloudTrailLogs ( CloudTrailSource ctSource ) { } } | SourceType sourceType = SourceType . valueOf ( ctSource . getSourceAttributes ( ) . get ( SourceAttributeKeys . SOURCE_TYPE . getAttributeKey ( ) ) ) ; switch ( sourceType ) { case CloudTrailLog : return true ; case CloudTrailValidationMessage : logger . warn ( String . format ( "Delete CloudTrail validation message: %s." , ctSource . toString ( ) ) ) ; return false ; case Other : default : logger . info ( String . format ( "Skip Non CloudTrail Log File: %s." , ctSource . toString ( ) ) ) ; return false ; } |
public class SuggestedFixes { /** * Returns a fix that adds a { @ code @ SuppressWarnings ( warningToSuppress ) } to the closest
* suppressible element to the node pointed at by { @ code state . getPath ( ) } , optionally suffixing
* the suppression with a comment suffix ( e . g . a reason for the suppression ) .
* < p > If the closest suppressible element already has a @ SuppressWarning annotation ,
* warningToSuppress will be added to the value in { @ code @ SuppressWarnings } instead .
* < p > In the event that a suppressible element couldn ' t be found ( e . g . : the state is pointing at a
* CompilationUnit , or some other internal inconsistency has occurred ) , or the enclosing
* suppressible element already has a { @ code @ SuppressWarnings } annotation with { @ code
* warningToSuppress } , this method will return null . */
@ Nullable public static Fix addSuppressWarnings ( VisitorState state , String warningToSuppress , @ Nullable String lineComment ) { } } | Builder fixBuilder = SuggestedFix . builder ( ) ; addSuppressWarnings ( fixBuilder , state , warningToSuppress , lineComment ) ; return fixBuilder . isEmpty ( ) ? null : fixBuilder . build ( ) ; |
public class TimeDuration { /** * Returns the difference between this duration and the given .
* @ param duration another duration
* @ return a new duration */
public TimeDuration minus ( TimeDuration duration ) { } } | return createWithCarry ( sec - duration . getSec ( ) , nanoSec - duration . getNanoSec ( ) ) ; |
public class Javalin { /** * Adds a request handler for the specified handlerType and path to the instance .
* This is the method that all the verb - methods ( get / post / put / etc ) call .
* @ see < a href = " https : / / javalin . io / documentation # handlers " > Handlers in docs < / a > */
public Javalin addHandler ( @ NotNull HandlerType httpMethod , @ NotNull String path , @ NotNull Handler handler ) { } } | return addHandler ( httpMethod , path , handler , new HashSet < > ( ) ) ; // no roles set for this route ( open to everyone with default access manager ) |
public class AbstractCache { /** * Helper method to send event when a cache entry is modified .
* @ param event the event to send . */
protected void sendEntryModifiedEvent ( CacheEntryEvent < T > event ) { } } | for ( org . xwiki . cache . event . CacheEntryListener < T > listener : this . cacheEntryListeners . getListeners ( org . xwiki . cache . event . CacheEntryListener . class ) ) { listener . cacheEntryModified ( event ) ; } |
public class FactoryBuilderSupport { /** * A hook to allow nodes to be processed once they have had all of their
* children applied and allows the actual node object that represents the
* Markup element to be changed . < br >
* It will call any registered postNodeCompletionDelegates , if you override
* this method be sure to call this impl at the end of your code .
* @ param node the current node being processed
* @ param parent the parent of the node being processed
* @ return the node , possibly new , that represents the markup element */
protected Object postNodeCompletion ( Object parent , Object node ) { } } | for ( Closure postNodeCompletionDelegate : getProxyBuilder ( ) . getPostNodeCompletionDelegates ( ) ) { ( postNodeCompletionDelegate ) . call ( new Object [ ] { this , parent , node } ) ; } return node ; |
public class EqualsBuilder { /** * Test if two Objects are equal using their equals method .
* @ param objectFieldValue
* the value of a field of the object
* @ param otherFieldValue
* the value of a field of the other object
* @ return used to chain calls */
public EqualsBuilder append ( Object objectFieldValue , Object otherFieldValue ) { } } | if ( equals && ! same ) { delegate . append ( objectFieldValue , otherFieldValue ) ; } return this ; |
public class SQLiteDatabase { /** * Collect statistics about all open databases in the current process .
* Used by bug report . */
static ArrayList < DbStats > getDbStats ( ) { } } | ArrayList < DbStats > dbStatsList = new ArrayList < DbStats > ( ) ; for ( SQLiteDatabase db : getActiveDatabases ( ) ) { db . collectDbStats ( dbStatsList ) ; } return dbStatsList ; |
public class DomainObject { /** * Returns the index of the first occurrence of the specified value
* in the list field , or - 1 if the list field does not contain the value .
* @ param fieldName
* @ param value
* @ return */
public int getIndexOfValue ( String fieldName , Object value ) { } } | int ret = - 1 ; Object val = value ; if ( value instanceof DomainObject ) val = ( ( DomainObject ) value ) . getRawObject ( ) ; Object lst = getFieldValue ( fieldName , true ) ; // internal
if ( lst instanceof List < ? > ) { List < ? > list = ( List < ? > ) lst ; for ( int i = 0 ; i < list . size ( ) ; i ++ ) { Object obj = list . get ( i ) ; if ( obj . equals ( val ) ) { ret = i ; break ; } } } else if ( lst != null && lst . getClass ( ) . isArray ( ) ) { int len = Array . getLength ( lst ) ; for ( int i = 0 ; i < len ; i ++ ) { Object obj = Array . get ( lst , i ) ; if ( obj . equals ( val ) ) { ret = i ; break ; } } } else { if ( ! getDomainObjectType ( ) . getFieldByName ( fieldName ) . isListOrArray ( ) ) throw new RuntimeException ( "field: [" + fieldName + "] is neither list nor array" ) ; if ( lst == null ) throw new RuntimeException ( "list or array field: [" + fieldName + "] is null" ) ; } return ret ; |
public class EJBHandlerResolver { /** * ( non - Javadoc )
* @ see com . ibm . wsspi . ejbcontainer . WSEJBHandlerResolver # retrieveJAXWSHandlers ( com . ibm . websphere . csi . J2EEName ) */
@ Override public List < Class < ? > > retrieveJAXWSHandlers ( J2EEName j2eeName ) { } } | JaxWsModuleMetaData jaxWsModuleMetaData = JaxWsMetaDataManager . getJaxWsModuleMetaData ( ) ; if ( jaxWsModuleMetaData == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Unable to get the JaxWsModuleMetaData from current invocation context while querying EJBHandler" ) ; } return null ; } JaxWsModuleInfo jaxWsModuleInfo = null ; try { Container containerToAdapt = jaxWsModuleMetaData . getModuleContainer ( ) ; jaxWsModuleInfo = containerToAdapt . adapt ( JaxWsModuleInfo . class ) ; } catch ( UnableToAdaptException e ) { throw new IllegalStateException ( e ) ; } if ( jaxWsModuleInfo == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Unable to get the JaxWsModuleInfo from current module {0} while querying EJBHandler" , jaxWsModuleMetaData . getModuleInfo ( ) . getName ( ) ) ; } return null ; } JaxWsServerMetaData jaxwsServerMetaData = jaxWsModuleMetaData . getServerMetaData ( ) ; String endpointName = jaxwsServerMetaData . retrieveEndpointName ( j2eeName ) ; if ( endpointName == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "No endpoint with j2eeName {0} exists in module {1}" , j2eeName , jaxWsModuleMetaData . getModuleInfo ( ) . getName ( ) ) ; } return null ; } EndpointInfo endpointInfo = jaxWsModuleInfo . getEndpointInfo ( endpointName ) ; if ( endpointInfo == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "No endpoint with endpoint name {0} exists in module {1}" , endpointName , jaxWsModuleMetaData . getModuleInfo ( ) . getName ( ) ) ; } return null ; } ClassLoader appContextClassLoader = jaxWsModuleMetaData . getAppContextClassLoader ( ) ; List < HandlerInfo > handlerInfos = endpointInfo . getHandlerChainsInfo ( ) . getAllHandlerInfos ( ) ; List < Class < ? > > handlerClasses = new ArrayList < Class < ? > > ( handlerInfos . size ( ) ) ; for ( HandlerInfo handlerInfo : handlerInfos ) { String handlerClassName = handlerInfo . getHandlerClass ( ) ; try { Class < ? > handlerClass = appContextClassLoader . loadClass ( handlerClassName ) ; handlerClasses . add ( handlerClass ) ; } catch ( ClassNotFoundException e ) { Tr . warning ( tc , "warn.could.not.find.handler" , handlerClassName , e . getMessage ( ) ) ; } } return handlerClasses ; |
public class Timex3Interval { /** * setter for TimexValueLE - sets
* @ generated
* @ param v value to set into the feature */
public void setTimexValueLE ( String v ) { } } | if ( Timex3Interval_Type . featOkTst && ( ( Timex3Interval_Type ) jcasType ) . casFeat_TimexValueLE == null ) jcasType . jcas . throwFeatMissing ( "TimexValueLE" , "de.unihd.dbs.uima.types.heideltime.Timex3Interval" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( Timex3Interval_Type ) jcasType ) . casFeatCode_TimexValueLE , v ) ; |
public class BitmexBaseService { /** * see https : / / www . bitmex . com / app / restAPI # Request - Rate - Limits */
protected < T extends HttpResponseAware > T updateRateLimit ( Supplier < T > httpResponseAwareSupplier ) { } } | if ( rateLimitReset != null ) { long waitMillis = rateLimitReset * 1000 - System . currentTimeMillis ( ) ; if ( rateLimitRemaining <= 0 && waitMillis >= 0 ) { throw new ExchangeException ( "The request is not executed due to rate limit, please wait for " + ( waitMillis / 1000 ) + " seconds, limit:" + rateLimit + ", reset: " + new Date ( rateLimitReset * 1000 ) ) ; } else { rateLimitRemaining -- ; } } HttpResponseAware responseAware = null ; boolean rateLimitsUpdated = false ; try { T result = httpResponseAwareSupplier . get ( ) ; responseAware = result ; return result ; } catch ( BitmexException e ) { if ( e . getHttpStatusCode ( ) == 429 ) { // we are warned !
try { Integer retryAfter = Integer . valueOf ( e . getResponseHeaders ( ) . get ( "Retry-After" ) . get ( 0 ) ) ; rateLimitRemaining = 0 ; rateLimitReset = System . currentTimeMillis ( ) / 1000 + retryAfter ; rateLimitsUpdated = true ; } catch ( Throwable ignored ) { } } else if ( e . getHttpStatusCode ( ) == 403 ) { // we are banned now !
rateLimitRemaining = 0 ; rateLimitReset = System . currentTimeMillis ( ) / 1000 + 5 ; // lets be quiet for 5 sec
} responseAware = e ; throw handleError ( e ) ; } catch ( Exception e ) { throw handleError ( e ) ; } finally { if ( responseAware != null && ! rateLimitsUpdated ) { Map < String , List < String > > responseHeaders = responseAware . getResponseHeaders ( ) ; rateLimit = Integer . valueOf ( responseHeaders . get ( "X-RateLimit-Limit" ) . get ( 0 ) ) ; rateLimitRemaining = Integer . valueOf ( responseHeaders . get ( "X-RateLimit-Remaining" ) . get ( 0 ) ) ; rateLimitReset = Long . valueOf ( responseHeaders . get ( "X-RateLimit-Reset" ) . get ( 0 ) ) ; rateLimitsUpdated = true ; } if ( rateLimitsUpdated ) { RateLimitUpdateListener rateLimitUpdateListener = exchange . getRateLimitUpdateListener ( ) ; if ( rateLimitUpdateListener != null ) { rateLimitUpdateListener . rateLimitUpdate ( rateLimit , rateLimitRemaining , rateLimitReset ) ; } } } |
public class Logger { /** * Issue a formatted log message with a level of INFO .
* @ param t the throwable
* @ param format the format string , as per { @ link String # format ( String , Object . . . ) }
* @ param params the parameters */
public void infof ( Throwable t , String format , Object ... params ) { } } | doLogf ( Level . INFO , FQCN , format , params , t ) ; |
public class HCSpecialNodeHandler { /** * Merge all inline CSS and JS elements contained in the source nodes into one
* script elements
* @ param aNodes
* Source list of nodes . May not be < code > null < / code > .
* @ param aOnDocumentReadyProvider
* if not < code > null < / code > than all combined document . ready ( ) scripts
* are kept as document . ready ( ) scripts using this provider . If
* < code > null < / code > than all document . ready ( ) scripts are converted to
* regular scripts and are executed after all other scripts . For AJAX
* calls , this should be < code > null < / code > as there is no " document
* ready " callback - alternatively you can provide a custom " on
* document ready " provider .
* @ return Target list . JS and CSS and other nodes are mixed . Inline JS and
* CSS that comes before files , is first . Than come the CSS and JS
* external as well as other elements . Finally the inline JS and CSS
* nodes to be emitted after the files are contained . So the resulting
* order is at it should be except that JS and CSS and other nodes are
* mixed . */
@ Nonnull @ ReturnsMutableCopy public static ICommonsList < IHCNode > getMergedInlineCSSAndJSNodes ( @ Nonnull final Iterable < ? extends IHCNode > aNodes , @ Nullable final IHCOnDocumentReadyProvider aOnDocumentReadyProvider ) { } } | ValueEnforcer . notNull ( aNodes , "Nodes" ) ; // Apply all modifiers
final Iterable < ? extends IHCNode > aRealSpecialNodes = applyModifiers ( aNodes ) ; // Do standard aggregations of CSS and JS
final ICommonsList < IHCNode > ret = new CommonsArrayList < > ( ) ; final CollectingJSCodeProvider aJSOnDocumentReadyBefore = new CollectingJSCodeProvider ( ) ; final CollectingJSCodeProvider aJSOnDocumentReadyAfter = new CollectingJSCodeProvider ( ) ; final CollectingJSCodeProvider aJSInlineBefore = new CollectingJSCodeProvider ( ) ; final CollectingJSCodeProvider aJSInlineAfter = new CollectingJSCodeProvider ( ) ; final InlineCSSList aCSSInlineBefore = new InlineCSSList ( ) ; final InlineCSSList aCSSInlineAfter = new InlineCSSList ( ) ; for ( final IHCNode aNode : aRealSpecialNodes ) { // Note : do not unwrap the node , because it is not allowed to merge JS / CSS
// with a conditional comment with JS / CSS without a conditional comment !
if ( HCJSNodeDetector . isDirectJSInlineNode ( aNode ) ) { // Check HCScriptInlineOnDocumentReady first , because it is a subclass
// of IHCScriptInline
if ( aNode instanceof HCScriptInlineOnDocumentReady ) { // Inline JS
final HCScriptInlineOnDocumentReady aScript = ( HCScriptInlineOnDocumentReady ) aNode ; ( aScript . isEmitAfterFiles ( ) ? aJSOnDocumentReadyAfter : aJSOnDocumentReadyBefore ) . appendFlattened ( aScript . getOnDocumentReadyCode ( ) ) ; } else { // Inline JS
final IHCScriptInline < ? > aScript = ( IHCScriptInline < ? > ) aNode ; ( aScript . isEmitAfterFiles ( ) ? aJSInlineAfter : aJSInlineBefore ) . appendFlattened ( aScript . getJSCodeProvider ( ) ) ; } } else if ( HCCSSNodeDetector . isDirectCSSInlineNode ( aNode ) ) { // Inline CSS
final HCStyle aStyle = ( HCStyle ) aNode ; ( aStyle . isEmitAfterFiles ( ) ? aCSSInlineAfter : aCSSInlineBefore ) . addInlineCSS ( aStyle . getMedia ( ) , aStyle . getStyleContent ( ) ) ; } else { // HCLink
// HCScriptFile
// HCConditionalCommentNode
if ( ! ( aNode instanceof HCLink ) && ! ( aNode instanceof HCScriptFile ) && ! ( aNode instanceof IHCConditionalCommentNode ) ) LOGGER . warn ( "Found unexpected node to merge inline CSS/JS: " + aNode ) ; // Add always !
// These nodes are either file based nodes ot conditional comment
// nodes
ret . add ( aNode ) ; } } // on - document - ready JS always as last inline JS !
if ( ! aJSOnDocumentReadyBefore . isEmpty ( ) ) if ( aOnDocumentReadyProvider != null ) aJSInlineBefore . append ( aOnDocumentReadyProvider . createOnDocumentReady ( aJSOnDocumentReadyBefore ) ) ; else aJSInlineBefore . append ( aJSOnDocumentReadyBefore ) ; if ( ! aJSOnDocumentReadyAfter . isEmpty ( ) ) if ( aOnDocumentReadyProvider != null ) aJSInlineAfter . append ( aOnDocumentReadyProvider . createOnDocumentReady ( aJSOnDocumentReadyAfter ) ) ; else aJSInlineAfter . append ( aJSOnDocumentReadyAfter ) ; // Finally add the inline JS
if ( ! aJSInlineBefore . isEmpty ( ) ) { // Add at the beginning
final HCScriptInline aScript = new HCScriptInline ( aJSInlineBefore ) . setEmitAfterFiles ( false ) ; aScript . internalSetNodeState ( EHCNodeState . RESOURCES_REGISTERED ) ; ret . add ( 0 , aScript ) ; } if ( ! aJSInlineAfter . isEmpty ( ) ) { // Add at the end
final HCScriptInline aScript = new HCScriptInline ( aJSInlineAfter ) . setEmitAfterFiles ( true ) ; aScript . internalSetNodeState ( EHCNodeState . RESOURCES_REGISTERED ) ; ret . add ( aScript ) ; } // Add all merged inline CSSs grouped by their media list
if ( aCSSInlineBefore . isNotEmpty ( ) ) { // Add at the beginning
int nIndex = 0 ; for ( final ICSSCodeProvider aEntry : aCSSInlineBefore . getAll ( ) ) { final HCStyle aStyle = new HCStyle ( aEntry . getCSSCode ( ) ) . setMedia ( aEntry . getMediaList ( ) ) . setEmitAfterFiles ( false ) ; aStyle . internalSetNodeState ( EHCNodeState . RESOURCES_REGISTERED ) ; ret . add ( nIndex , aStyle ) ; ++ nIndex ; } } if ( aCSSInlineAfter . isNotEmpty ( ) ) { // Add at the end
for ( final ICSSCodeProvider aEntry : aCSSInlineAfter . getAll ( ) ) { final HCStyle aStyle = new HCStyle ( aEntry . getCSSCode ( ) ) . setMedia ( aEntry . getMediaList ( ) ) . setEmitAfterFiles ( true ) ; aStyle . internalSetNodeState ( EHCNodeState . RESOURCES_REGISTERED ) ; ret . add ( aStyle ) ; } } return ret ; |
public class EOCImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setObjCName ( String newObjCName ) { } } | String oldObjCName = objCName ; objCName = newObjCName ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . EOC__OBJ_CNAME , oldObjCName , objCName ) ) ; |
public class BridgeState { /** * if there are exactly 2 members in the bridge , return a BridgeEvent
* @ param event
* @ return */
ManagerEvent addMember ( BridgeEnterEvent event ) { } } | List < BridgeEnterEvent > remaining = null ; synchronized ( members ) { if ( members . put ( event . getChannel ( ) , event ) == null && members . size ( ) == 2 ) { remaining = new ArrayList < > ( members . values ( ) ) ; } } if ( remaining == null ) { return null ; } logger . info ( "Members size " + remaining . size ( ) + " " + event ) ; BridgeEvent bridgeEvent = buildBridgeEvent ( BridgeEvent . BRIDGE_STATE_LINK , remaining ) ; logger . info ( "Bridge " + bridgeEvent . getChannel1 ( ) + " " + bridgeEvent . getChannel2 ( ) ) ; return bridgeEvent ; |
public class QDate { /** * XXX : buggy ( Because cal is buggy ) , may have to implement the sdf */
public String printLocaleDate ( ) { } } | _date . setTime ( _localTimeOfEpoch ) ; // SimpleDateFormat sdf = new SimpleDateFormat ( ) ;
// System . out . println ( " " + sdf . toPattern ( ) ) ;
if ( _dateFormat == null ) _dateFormat = DateFormat . getInstance ( ) ; return _dateFormat . format ( _date ) ; |
public class SecurityUtils { /** * Encodes / obfuscates a password . Although this does not prevent actual
* hacking of password , it does remove the obvious threats of having
* passwords stored as clear text .
* @ param password
* @ return a String containing the encoded password */
public static String encodePassword ( final char [ ] password ) { } } | if ( password == null ) { return null ; } final EncodedStringConverter converter = new EncodedStringConverter ( ) ; return converter . toString ( new String ( password ) ) ; |
public class DefaultConfigurationProvider { /** * Loading a map from preferences , using a prefix for the prefs keys
* @ since 5.6.5
* @ param pPrefs
* @ param pMap
* @ param pPrefix */
private static void load ( final SharedPreferences pPrefs , final Map < String , String > pMap , final String pPrefix ) { } } | // potential fix for # 1079 https : / / github . com / osmdroid / osmdroid / issues / 1079
if ( pPrefix == null || pMap == null ) return ; pMap . clear ( ) ; for ( final String key : pPrefs . getAll ( ) . keySet ( ) ) { if ( key != null && key . startsWith ( pPrefix ) ) { pMap . put ( key . substring ( pPrefix . length ( ) ) , pPrefs . getString ( key , null ) ) ; } } |
public class ReservoirLongsUnion { /** * Present this union with a long .
* @ param datum The given long datum . */
public void update ( final long datum ) { } } | if ( gadget_ == null ) { gadget_ = ReservoirLongsSketch . newInstance ( maxK_ ) ; } gadget_ . update ( datum ) ; |
public class NewJFrame { /** * Validate and set the datetime field on the screen given a date .
* @ param dateTime The datetime object */
public void setDateTime ( Date dateTime ) { } } | String dateTimeString = "" ; if ( dateTime != null ) dateTimeString = dateTimeFormat . format ( dateTime ) ; jTextField4 . setText ( dateTimeString ) ; jCalendarButton2 . setTargetDate ( dateTime ) ; jTimeButton2 . setTargetDate ( dateTime ) ; |
public class Try { /** * Retry a transformation if it fails . Default settings are to retry up to 7
* times , with an doubling backoff period starting @ 2 seconds delay before
* retry .
* @ param fn
* Function to retry if fails */
public < R > Try < R , Throwable > retry ( final Function < ? super T , ? extends R > fn ) { } } | return retry ( fn , 7 , 2 , TimeUnit . SECONDS ) ; |
public class CodedConstant { /** * Write object .
* @ param out the out
* @ param order the order
* @ param type the type
* @ param o the o
* @ param list the list
* @ throws IOException Signals that an I / O exception has occurred . */
public static void writeObject ( CodedOutputStream out , int order , FieldType type , Object o , boolean list ) throws IOException { } } | writeObject ( out , order , type , o , list , true ) ; |
public class AWSGlueClient { /** * Deletes a specified version of a table .
* @ param deleteTableVersionRequest
* @ return Result of the DeleteTableVersion operation returned by the service .
* @ throws EntityNotFoundException
* A specified entity does not exist
* @ throws InvalidInputException
* The input provided was not valid .
* @ throws InternalServiceException
* An internal service error occurred .
* @ throws OperationTimeoutException
* The operation timed out .
* @ sample AWSGlue . DeleteTableVersion
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / glue - 2017-03-31 / DeleteTableVersion " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public DeleteTableVersionResult deleteTableVersion ( DeleteTableVersionRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDeleteTableVersion ( request ) ; |
public class PropertyConfigLoader { /** * Get from config defined property files in classpath .
* @ param filePath property file path in classpath
* @ param configKey configkey
* @ return config value defined in property file */
public static Object get ( String filePath , String configKey ) { } } | if ( propertiesMap . containsKey ( filePath ) == false ) { loadProperty ( filePath ) ; } Properties properties = propertiesMap . get ( filePath ) ; if ( properties == null ) { return null ; } Object resultConfig = properties . getProperty ( configKey ) ; return resultConfig ; |
public class DecimalMinValidator { /** * { @ inheritDoc } */
@ Override public boolean isValid ( T value ) { } } | return value == null || value . doubleValue ( ) >= minValue . doubleValue ( ) ; |
public class StringParser { /** * Parse the given { @ link String } as { @ link Long } with radix
* { @ link # DEFAULT _ RADIX } .
* @ param sStr
* The string to parse . May be < code > null < / code > .
* @ param aDefault
* The default value to be returned if the passed string could not be
* converted to a valid value . May be < code > null < / code > .
* @ return < code > aDefault < / code > if the string does not represent a valid
* value . */
@ Nullable public static Long parseLongObj ( @ Nullable final String sStr , @ Nullable final Long aDefault ) { } } | return parseLongObj ( sStr , DEFAULT_RADIX , aDefault ) ; |
public class CmsResourceHistoryTable { /** * Adds a table column . < p > */
private void addSizeColumn ( ) { } } | Column < CmsHistoryResourceBean , ? > col = new TextColumn < CmsHistoryResourceBean > ( ) { @ Override public String getValue ( CmsHistoryResourceBean historyRes ) { return "" + historyRes . getSize ( ) ; } } ; addColumn ( col , CmsHistoryMessages . columnSize ( ) ) ; setColumnWidth ( col , 100 , Unit . PX ) ; |
public class Gauge { /** * Defines the color that will be used to colorize the subTitle
* of the gauge .
* @ param COLOR */
public void setSubTitleColor ( final Color COLOR ) { } } | if ( null == subTitleColor ) { _subTitleColor = COLOR ; fireUpdateEvent ( REDRAW_EVENT ) ; } else { subTitleColor . set ( COLOR ) ; } |
public class DetectKeyPhrasesRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DetectKeyPhrasesRequest detectKeyPhrasesRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( detectKeyPhrasesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( detectKeyPhrasesRequest . getText ( ) , TEXT_BINDING ) ; protocolMarshaller . marshall ( detectKeyPhrasesRequest . getLanguageCode ( ) , LANGUAGECODE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class AgreementsInner { /** * Creates or updates an integration account agreement .
* @ param resourceGroupName The resource group name .
* @ param integrationAccountName The integration account name .
* @ param agreementName The integration account agreement name .
* @ param agreement The integration account agreement .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the IntegrationAccountAgreementInner object */
public Observable < IntegrationAccountAgreementInner > createOrUpdateAsync ( String resourceGroupName , String integrationAccountName , String agreementName , IntegrationAccountAgreementInner agreement ) { } } | return createOrUpdateWithServiceResponseAsync ( resourceGroupName , integrationAccountName , agreementName , agreement ) . map ( new Func1 < ServiceResponse < IntegrationAccountAgreementInner > , IntegrationAccountAgreementInner > ( ) { @ Override public IntegrationAccountAgreementInner call ( ServiceResponse < IntegrationAccountAgreementInner > response ) { return response . body ( ) ; } } ) ; |
public class ParserRegistry { /** * Serializes a single configuration to an OutputStream
* @ param os
* @ param name
* @ param configuration */
public void serialize ( OutputStream os , String name , Configuration configuration ) throws XMLStreamException { } } | serialize ( os , null , Collections . singletonMap ( name , configuration ) ) ; |
public class DecoratingDynamicTypeBuilder { /** * { @ inheritDoc } */
public DynamicType . Builder < T > visit ( AsmVisitorWrapper asmVisitorWrapper ) { } } | return new DecoratingDynamicTypeBuilder < T > ( instrumentedType , typeAttributeAppender , new AsmVisitorWrapper . Compound ( this . asmVisitorWrapper , asmVisitorWrapper ) , classFileVersion , auxiliaryTypeNamingStrategy , annotationValueFilterFactory , annotationRetention , implementationContextFactory , methodGraphCompiler , typeValidation , classWriterStrategy , ignoredMethods , auxiliaryTypes , classFileLocator ) ; |
public class ServletUtil { /** * 从多级反向代理中获得第一个非unknown IP地址
* @ param ip 获得的IP地址
* @ return 第一个非unknown IP地址 */
private static String getMultistageReverseProxyIp ( String ip ) { } } | // 多级反向代理检测
if ( ip != null && ip . indexOf ( "," ) > 0 ) { final String [ ] ips = ip . trim ( ) . split ( "," ) ; for ( String subIp : ips ) { if ( false == isUnknow ( subIp ) ) { ip = subIp ; break ; } } } return ip ; |
public class GeneralFactory { /** * 获取带负载均衡的ThriftClient对象 */
@ Override @ SneakyThrows @ SuppressWarnings ( "rawtypes" ) public ThriftClient getThriftClient ( List < ServerInfo > serverInfoList , LoadBalance loadBalance , String param ) { } } | Class [ ] paramTypes = { List . class , String . class } ; Object [ ] params = { serverInfoList , param } ; return new LoadBalanceThriftClientImpl ( ( LoadBalance ) loadBalance . getClass ( ) . getConstructor ( paramTypes ) . newInstance ( params ) , thriftClientConfiguration ) ; |
public class GifDecoder { /** * Reads GIF image from byte array .
* @ param data containing GIF file .
* @ return read status code ( 0 = no errors ) . */
synchronized int read ( byte [ ] data ) { } } | this . header = getHeaderParser ( ) . setData ( data ) . parseHeader ( ) ; if ( data != null ) { setData ( header , data ) ; } return status ; |
public class PatternBeanDefinitionParser { /** * / * static helper */
static Tuple2 < String , String > doParseAndGetPattern ( Element element , ParserContext parserContext , BeanDefinitionBuilder builder ) { } } | String id = Check . ifNullOrBlank ( element . getAttribute ( "id" ) , UUID . randomUUID ( ) . toString ( ) ) ; String pattern = element . getTextContent ( ) ; Sitemap . registerPattern ( id , pattern , parserContext ) ; return new Tuple2 < String , String > ( id , pattern ) ; |
public class AbstractRedirectResourceDetails { /** * Extract a redirect uri from the resource and / or the current request .
* @ param request the current { @ link DefaultAccessTokenRequest }
* @ return a redirect uri if one can be established */
public String getRedirectUri ( AccessTokenRequest request ) { } } | String redirectUri = request . getFirst ( "redirect_uri" ) ; if ( redirectUri == null && request . getCurrentUri ( ) != null && useCurrentUri ) { redirectUri = request . getCurrentUri ( ) ; } if ( redirectUri == null && getPreEstablishedRedirectUri ( ) != null ) { // Override the redirect _ uri if it is pre - registered
redirectUri = getPreEstablishedRedirectUri ( ) ; } return redirectUri ; |
public class MessageProcessorControl { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . runtime . SIMPMessageProcessorControllable # getForeignDestinationIterator ( ) */
public SIMPIterator getForeignDestinationIterator ( ) { } } | DestinationTypeFilter filter = new DestinationTypeFilter ( ) ; filter . FOREIGN_DESTINATION = Boolean . TRUE ; SIMPIterator destItr = destinationIndex . iterator ( filter ) ; return new ControllableIterator ( destItr ) ; |
public class Iconomy6 { /** * Import accounts from a flatfile .
* @ param sender The command sender so we can send back messages .
* @ return True if the convert is done . Else false . */
private boolean importFlatFile ( String sender ) { } } | boolean result = false ; try { List < String > file = new ArrayList < > ( ) ; String str ; while ( ( str = flatFileReader . readLine ( ) ) != null ) { file . add ( str ) ; } flatFileReader . close ( ) ; List < User > userList = new ArrayList < > ( ) ; for ( String aFile : file ) { String [ ] info = aFile . split ( " " ) ; try { double balance = Double . parseDouble ( info [ 1 ] . split ( ":" ) [ 1 ] ) ; userList . add ( new User ( info [ 0 ] , balance ) ) ; } catch ( NumberFormatException e ) { Common . getInstance ( ) . sendConsoleMessage ( Level . SEVERE , "User " + info [ 0 ] + " have a invalid balance" + info [ 1 ] ) ; } catch ( ArrayIndexOutOfBoundsException e ) { Common . getInstance ( ) . sendConsoleMessage ( Level . WARNING , "Line not formatted correctly. I read:" + Arrays . toString ( info ) ) ; } } addAccountToString ( sender , userList ) ; result = true ; } catch ( IOException e ) { Common . getInstance ( ) . getLogger ( ) . severe ( "A error occured while reading the iConomy database file! Message: " + e . getMessage ( ) ) ; } return result ; |
public class POEditorClient { /** * Uploads a translation file . For the moment it only takes terms into account .
* @ param projectId id of the project
* @ param translationFile terms file to upload
* @ param allTags - for the all the imported terms
* @ param newTags - for the terms which aren ' t already in the project
* @ param obsoleteTags - for the terms which are in the project but not in the imported file and " overwritten _ translations "
* @ return UploadDetails */
public UploadDetails uploadTerms ( String projectId , File translationFile , String [ ] allTags , String [ ] newTags , String [ ] obsoleteTags ) { } } | Map < String , String [ ] > tags = new HashMap < String , String [ ] > ( ) ; if ( allTags != null ) { tags . put ( "all" , allTags ) ; } if ( newTags != null ) { tags . put ( "new" , newTags ) ; } if ( obsoleteTags != null ) { tags . put ( "obsolete" , obsoleteTags ) ; } String tagsStr = new Gson ( ) . toJson ( tags ) ; TypedFile typedFile = new TypedFile ( "application/xml" , translationFile ) ; UploadResponse ur = service . upload ( "upload" , apiKey , projectId , "terms" , typedFile , null , "0" , tagsStr ) ; ApiUtils . checkResponse ( ur . response ) ; return ur . details ; |
public class AWSGlueClient { /** * Resets a bookmark entry .
* @ param resetJobBookmarkRequest
* @ return Result of the ResetJobBookmark operation returned by the service .
* @ throws EntityNotFoundException
* A specified entity does not exist
* @ throws InvalidInputException
* The input provided was not valid .
* @ throws InternalServiceException
* An internal service error occurred .
* @ throws OperationTimeoutException
* The operation timed out .
* @ sample AWSGlue . ResetJobBookmark
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / glue - 2017-03-31 / ResetJobBookmark " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public ResetJobBookmarkResult resetJobBookmark ( ResetJobBookmarkRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeResetJobBookmark ( request ) ; |
public class Ingest { /** * Continue to ingest transaction logs until the currentState is
* no longer INGEST . If lastScan is set to true , then we process
* till the end of the file and return . */
int ingestFSEdits ( ) throws IOException { } } | FSDirectory fsDir = fsNamesys . dir ; int numEdits = 0 ; long recentOpcodeOffsets [ ] = new long [ 2 ] ; Arrays . fill ( recentOpcodeOffsets , - 1 ) ; EnumMap < FSEditLogOpCodes , Holder < Integer > > opCounts = new EnumMap < FSEditLogOpCodes , Holder < Integer > > ( FSEditLogOpCodes . class ) ; boolean error = false ; boolean reopen = false ; boolean quitAfterScan = false ; long sharedLogTxId = FSEditLogLoader . TXID_IGNORE ; long localLogTxId = FSEditLogLoader . TXID_IGNORE ; FSEditLogOp op = null ; FSEditLog localEditLog = fsDir . fsImage . getEditLog ( ) ; while ( running && ! quitAfterScan ) { // if the application requested that we make a final pass over
// the transaction log , then we remember it here . We close and
// reopen the file to ensure that we can see all the data in the
// file , one reason being that NFS has open - to - close cache
// coherancy and the edit log could be stored in NFS .
if ( reopen || lastScan ) { inputEditStream . close ( ) ; inputEditStream = standby . setupIngestStreamWithRetries ( startTxId ) ; if ( lastScan ) { // QUIESCE requested by Standby thread
LOG . info ( "Ingest: Starting last scan of transaction log: " + this . toString ( ) ) ; quitAfterScan = true ; } // discard older buffers and start a fresh one .
inputEditStream . refresh ( currentPosition , localEditLog . getLastWrittenTxId ( ) ) ; setCatchingUp ( ) ; reopen = false ; } // Process all existing transactions till end of file
while ( running ) { if ( lastScan && ! quitAfterScan ) { // Standby thread informed the ingest to quiesce
// we should refresh the input stream as soon as possible
// then quitAfterScan will be true
break ; } // record the current file offset .
currentPosition = inputEditStream . getPosition ( ) ; InjectionHandler . processEvent ( InjectionEvent . INGEST_BEFORE_LOAD_EDIT ) ; fsNamesys . writeLock ( ) ; try { error = false ; op = ingestFSEdit ( inputEditStream ) ; /* * In the case of segments recovered on primary namenode startup , we
* have segments that are finalized ( by name ) , but not containing the
* ending transaction . Without this check , we will keep looping until
* the next checkpoint to discover this situation . */
if ( ! inputEditStream . isInProgress ( ) && standby . getLastCorrectTxId ( ) == inputEditStream . getLastTxId ( ) ) { // this is a correct segment with no end segment transaction
LOG . info ( "Ingest: Reached finalized log segment end with no end marker. " + this . toString ( ) ) ; tearDown ( localEditLog , false , true ) ; break ; } if ( op == null ) { FSNamesystem . LOG . debug ( "Ingest: Invalid opcode, reached end of log " + "Number of transactions found " + numEdits ) ; break ; // No more transactions .
} sharedLogTxId = op . txid ; // Verify transaction ids match .
localLogTxId = localEditLog . getLastWrittenTxId ( ) + 1 ; // Fatal error only when the log contains transactions from the future
// we allow to process a transaction with smaller txid than local
// we will simply skip it later after reading from the ingest edits
if ( localLogTxId < sharedLogTxId || InjectionHandler . falseCondition ( InjectionEvent . INGEST_TXID_CHECK ) ) { String message = "The transaction id in the edit log : " + sharedLogTxId + " does not match the transaction id inferred" + " from FSIMAGE : " + localLogTxId ; LOG . fatal ( message ) ; throw new RuntimeException ( message ) ; } // skip previously loaded transactions
if ( ! canApplyTransaction ( sharedLogTxId , localLogTxId , op ) ) continue ; // for recovery , we do not want to re - load transactions ,
// but we want to populate local log with them
if ( shouldLoad ( sharedLogTxId ) ) { FSEditLogLoader . loadEditRecord ( logVersion , inputEditStream , recentOpcodeOffsets , opCounts , fsNamesys , fsDir , numEdits , op ) ; } LOG . info ( "Ingest: " + this . toString ( ) + ", size: " + inputEditStream . length ( ) + ", processing transaction at offset: " + currentPosition + ", txid: " + op . txid + ", opcode: " + op . opCode ) ; if ( op . opCode == FSEditLogOpCodes . OP_START_LOG_SEGMENT ) { LOG . info ( "Ingest: Opening log segment: " + this . toString ( ) ) ; localEditLog . open ( ) ; } else if ( op . opCode == FSEditLogOpCodes . OP_END_LOG_SEGMENT ) { InjectionHandler . processEventIO ( InjectionEvent . INGEST_CLEAR_STANDBY_STATE ) ; LOG . info ( "Ingest: Closing log segment: " + this . toString ( ) ) ; tearDown ( localEditLog , true , true ) ; numEdits ++ ; LOG . info ( "Ingest: Reached log segment end. " + this . toString ( ) ) ; break ; } else { localEditLog . logEdit ( op ) ; if ( inputEditStream . getReadChecksum ( ) != FSEditLog . getChecksumForWrite ( ) . getValue ( ) ) { throw new IOException ( "Ingest: mismatched r/w checksums for transaction #" + numEdits ) ; } } numEdits ++ ; standby . setLastCorrectTxId ( op . txid ) ; } catch ( ChecksumException cex ) { LOG . info ( "Checksum error reading the transaction #" + numEdits + " reopening the file" ) ; reopen = true ; break ; } catch ( IOException e ) { LOG . info ( "Encountered error reading transaction" , e ) ; error = true ; // if we haven ' t reached eof , then error .
break ; } finally { if ( localEditLog . isOpen ( ) ) { localEditLog . logSyncIfNeeded ( ) ; } fsNamesys . writeUnlock ( ) ; } } // end inner while ( running ) - - all breaks come here
// if we failed to read the entire transaction from disk ,
// then roll back to the offset where there was a last good
// read , sleep for sometime for new transaction to
// appear in the file and then continue ;
if ( error || running ) { // discard older buffers and start a fresh one .
inputEditStream . refresh ( currentPosition , localEditLog . getLastWrittenTxId ( ) ) ; setCatchingUp ( ) ; if ( error ) { LOG . info ( "Ingest: Incomplete transaction record at offset " + inputEditStream . getPosition ( ) + " but the file is of size " + inputEditStream . length ( ) + ". Continuing...." ) ; } if ( running && ! lastScan ) { try { Thread . sleep ( 100 ) ; // sleep for a second
} catch ( InterruptedException e ) { // break out of waiting if we receive an interrupt .
} } } } // end outer while ( running )
// / / / / / FINAL ACTIONS / / / / /
// This was the last scan of the file but we could not read a full
// transaction from disk . If we proceed this will corrupt the image
if ( error ) { String errorMessage = FSEditLogLoader . getErrorMessage ( recentOpcodeOffsets , currentPosition ) ; LOG . error ( errorMessage ) ; throw new IOException ( "Failed to read the edits log. " + "Incomplete transaction at " + currentPosition ) ; } // If the last Scan was completed , then stop the Ingest thread .
if ( lastScan && quitAfterScan ) { LOG . info ( "Ingest: lastScan completed. " + this . toString ( ) ) ; running = false ; if ( localEditLog . isOpen ( ) ) { // quiesced non - finalized segment
LOG . info ( "Ingest: Reached non-finalized log segment end. " + this . toString ( ) ) ; tearDown ( localEditLog , false , localLogTxId != startTxId ) ; } } FSEditLogLoader . dumpOpCounts ( opCounts ) ; return numEdits ; // total transactions consumed |
public class NetUtils { /** * Util method to build socket addr from either :
* < host >
* < host > : < post >
* < fs > : / / < host > : < port > / < path > */
public static InetSocketAddress createSocketAddr ( String target , int defaultPort ) { } } | // Format [ IPv6 Address ] : Port
int colonIndex = target . indexOf ( ':' ) ; if ( colonIndex < 0 && defaultPort == - 1 ) { throw new RuntimeException ( "Not a host:port pair: " + target ) ; } String hostname ; int port = - 1 ; if ( ! target . contains ( "/" ) ) { target = "hdfs://" + target ; } // a new uri
URI addr = new Path ( target ) . toUri ( ) ; hostname = addr . getHost ( ) ; port = addr . getPort ( ) ; if ( port == - 1 ) { port = defaultPort ; } if ( getStaticResolution ( hostname ) != null ) { hostname = getStaticResolution ( hostname ) ; } return InetSocketAddressFactory . createWithResolveRetry ( hostname , port ) ; |
public class DefaultUsageFormatter { /** * Prints the usage to { @ link JCommander # getConsole ( ) } on the underlying commander instance . */
public final void usage ( String commandName ) { } } | StringBuilder sb = new StringBuilder ( ) ; usage ( commandName , sb ) ; commander . getConsole ( ) . println ( sb . toString ( ) ) ; |
public class HtmlColumn { /** * < p > Return the value of the < code > headerClass < / code > property . < / p >
* < p > Contents : Space - separated list of CSS style class ( es ) that will be
* applied to any header generated for this column . */
public java . lang . String getHeaderClass ( ) { } } | return ( java . lang . String ) getStateHelper ( ) . eval ( PropertyKeys . headerClass ) ; |
public class RSAUtils { /** * 使用模和指数生成RSA公钥
* 注意 : 【 此代码用了默认补位方式 , 为RSA / None / PKCS1Padding , 不同JDK默认的补位方式可能不同 , 如Android默认是RSA
* / None / NoPadding 】
* @ param modulus
* @ param exponent
* 公钥指数
* @ return */
public static RSAPublicKey getPublicKey ( String modulus , String exponent ) { } } | try { BigInteger b1 = new BigInteger ( modulus ) ; BigInteger b2 = new BigInteger ( exponent ) ; KeyFactory keyFactory = KeyFactory . getInstance ( KEY_ALGORITHM ) ; RSAPublicKeySpec keySpec = new RSAPublicKeySpec ( b1 , b2 ) ; return ( RSAPublicKey ) keyFactory . generatePublic ( keySpec ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; return null ; } |
public class FctBnAccEntitiesProcessors { /** * < p > Get PrcGoodsLossLineSave ( create and put into map ) . < / p >
* @ param pAddParam additional param
* @ return requested PrcGoodsLossLineSave
* @ throws Exception - an exception */
protected final PrcGoodsLossLineSave < RS > lazyGetPrcGoodsLossLineSave ( final Map < String , Object > pAddParam ) throws Exception { } } | @ SuppressWarnings ( "unchecked" ) PrcGoodsLossLineSave < RS > proc = ( PrcGoodsLossLineSave < RS > ) this . processorsMap . get ( PrcGoodsLossLineSave . class . getSimpleName ( ) ) ; if ( proc == null ) { proc = new PrcGoodsLossLineSave < RS > ( ) ; proc . setSrvAccSettings ( getSrvAccSettings ( ) ) ; proc . setSrvOrm ( getSrvOrm ( ) ) ; proc . setSrvWarehouseEntry ( getSrvWarehouseEntry ( ) ) ; proc . setSrvCogsEntry ( getSrvCogsEntry ( ) ) ; proc . setSrvDatabase ( getSrvDatabase ( ) ) ; // assigning fully initialized object :
this . processorsMap . put ( PrcGoodsLossLineSave . class . getSimpleName ( ) , proc ) ; } return proc ; |
public class Session { /** * 在事务中执行操作 , 通过实现 { @ link VoidFunc0 } 接口的call方法执行多条SQL语句从而完成事务
* @ param func 函数抽象 , 在函数中执行多个SQL操作 , 多个操作会被合并为同一事务
* @ throws SQLException
* @ since 3.2.3 */
public void tx ( VoidFunc1 < Session > func ) throws SQLException { } } | try { beginTransaction ( ) ; func . call ( this ) ; commit ( ) ; } catch ( Throwable e ) { quietRollback ( ) ; throw ( e instanceof SQLException ) ? ( SQLException ) e : new SQLException ( e ) ; } |
public class HttpChannelPool { /** * A low - level operation that triggers a new connection attempt . Used only by :
* < ul >
* < li > { @ link # connect ( SessionProtocol , PoolKey , CompletableFuture , ClientConnectionTimingsBuilder ) } -
* The pool has been exhausted . < / li >
* < li > { @ link HttpSessionHandler } - HTTP / 2 upgrade has failed . < / li >
* < / ul > */
void connect ( SocketAddress remoteAddress , SessionProtocol desiredProtocol , Promise < Channel > sessionPromise ) { } } | final Bootstrap bootstrap = getBootstrap ( desiredProtocol ) ; final ChannelFuture connectFuture = bootstrap . connect ( remoteAddress ) ; connectFuture . addListener ( ( ChannelFuture future ) -> { if ( future . isSuccess ( ) ) { initSession ( desiredProtocol , future , sessionPromise ) ; } else { sessionPromise . setFailure ( future . cause ( ) ) ; } } ) ; |
public class ListSchemaExtensionsResult { /** * Information about the schema extensions applied to the directory .
* @ return Information about the schema extensions applied to the directory . */
public java . util . List < SchemaExtensionInfo > getSchemaExtensionsInfo ( ) { } } | if ( schemaExtensionsInfo == null ) { schemaExtensionsInfo = new com . amazonaws . internal . SdkInternalList < SchemaExtensionInfo > ( ) ; } return schemaExtensionsInfo ; |
public class ParquetInputFormat { /** * Generates and returns the read schema based on the projected fields for a given file .
* @ param fileSchema The schema of the given file .
* @ param filePath The path of the given file .
* @ return The read schema based on the given file ' s schema and the projected fields . */
private MessageType getReadSchema ( MessageType fileSchema , Path filePath ) { } } | RowTypeInfo fileTypeInfo = ( RowTypeInfo ) ParquetSchemaConverter . fromParquetType ( fileSchema ) ; List < Type > types = new ArrayList < > ( ) ; for ( int i = 0 ; i < fieldNames . length ; ++ i ) { String readFieldName = fieldNames [ i ] ; TypeInformation < ? > readFieldType = fieldTypes [ i ] ; if ( fileTypeInfo . getFieldIndex ( readFieldName ) < 0 ) { if ( ! skipWrongSchemaFileSplit ) { throw new IllegalArgumentException ( "Field " + readFieldName + " cannot be found in schema of " + " Parquet file: " + filePath + "." ) ; } else { this . skipThisSplit = true ; return fileSchema ; } } if ( ! readFieldType . equals ( fileTypeInfo . getTypeAt ( readFieldName ) ) ) { if ( ! skipWrongSchemaFileSplit ) { throw new IllegalArgumentException ( "Expecting type " + readFieldType + " for field " + readFieldName + " but found type " + fileTypeInfo . getTypeAt ( readFieldName ) + " in Parquet file: " + filePath + "." ) ; } else { this . skipThisSplit = true ; return fileSchema ; } } types . add ( fileSchema . getType ( readFieldName ) ) ; } return new MessageType ( fileSchema . getName ( ) , types ) ; |
public class AnnotationIssues { /** * remove branch targets that have been passed
* @ param pc the current pc */
public void clearBranchTargets ( int pc ) { } } | Iterator < Integer > it = branchTargets . iterator ( ) ; while ( it . hasNext ( ) ) { int target = it . next ( ) . intValue ( ) ; if ( target <= pc ) { it . remove ( ) ; } } |
public class TimeOfDay { /** * Sets the minute value for this TimeOfDay .
* @ param minute * Minutes in an hour . Currently , only 0 , 15 , 30 , and 45 are supported .
* This
* field is required . */
public void setMinute ( com . google . api . ads . admanager . axis . v201811 . MinuteOfHour minute ) { } } | this . minute = minute ; |
public class VirtualMachineScaleSetsInner { /** * Power off ( stop ) one or more virtual machines in a VM scale set . Note that resources are still attached and you are getting charged for the resources . Instead , use deallocate to release resources and avoid charges .
* @ param resourceGroupName The name of the resource group .
* @ param vmScaleSetName The name of the VM scale set .
* @ param instanceIds The virtual machine scale set instance ids . Omitting the virtual machine scale set instance ids will result in the operation being performed on all virtual machines in the virtual machine scale set .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the OperationStatusResponseInner object if successful . */
public OperationStatusResponseInner beginPowerOff ( String resourceGroupName , String vmScaleSetName , List < String > instanceIds ) { } } | return beginPowerOffWithServiceResponseAsync ( resourceGroupName , vmScaleSetName , instanceIds ) . toBlocking ( ) . single ( ) . body ( ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.