signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ConstantPool { /** * Get maximum i such that < tt > start < = i < = end < / tt > and * < tt > s . substring ( start , i ) < / tt > fits JVM UTF string encoding limit . */ int getUtfEncodingLimit ( String s , int start , int end ) { } }
if ( ( end - start ) * 3 <= MAX_UTF_ENCODING_SIZE ) { return end ; } int limit = MAX_UTF_ENCODING_SIZE ; for ( int i = start ; i != end ; i ++ ) { int c = s . charAt ( i ) ; if ( 0 != c && c <= 0x7F ) { -- limit ; } else if ( c < 0x7FF ) { limit -= 2 ; } else { limit -= 3 ; } if ( limit < 0 ) { return i ; } } return end ;
public class ConcurrentLinkedHashMap { /** * Drains the buffers and applies the pending operations . * @ param maxToDrain the maximum number of operations to drain */ @ GuardedBy ( "evictionLock" ) void drainBuffers ( int maxToDrain ) { } }
// A mostly strict ordering is achieved by observing that each buffer // contains tasks in a weakly sorted order starting from the last drain . // The buffers can be merged into a sorted list in O ( n ) time by using // counting sort and chaining on a collision . // The output is capped to the expected number of tasks plus additional // slack to optimistically handle the concurrent additions to the buffers . Task [ ] tasks = new Task [ maxToDrain ] ; // Moves the tasks into the output array , applies them , and updates the // marker for the starting order of the next drain . int maxTaskIndex = moveTasksFromBuffers ( tasks ) ; runTasks ( tasks , maxTaskIndex ) ; updateDrainedOrder ( tasks , maxTaskIndex ) ;
public class UniverseApi { /** * Get structure information Returns information on requested structure if * you are on the ACL . Otherwise , returns \ & quot ; Forbidden \ & quot ; for all * inputs . - - - This route is cached for up to 3600 seconds * @ param structureId * An Eve structure ID ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param token * Access token to use if unable to set a header ( optional ) * @ return ApiResponse & lt ; StructureResponse & gt ; * @ throws ApiException * If fail to call the API , e . g . server error or cannot * deserialize the response body */ public ApiResponse < StructureResponse > getUniverseStructuresStructureIdWithHttpInfo ( Long structureId , String datasource , String ifNoneMatch , String token ) throws ApiException { } }
com . squareup . okhttp . Call call = getUniverseStructuresStructureIdValidateBeforeCall ( structureId , datasource , ifNoneMatch , token , null ) ; Type localVarReturnType = new TypeToken < StructureResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class TouchAction { /** * Get the mjsonwp parameters for this Action . * @ return A map of parameters for this touch action to pass as part of mjsonwp . */ protected Map < String , List < Object > > getParameters ( ) { } }
List < ActionParameter > actionList = parameterBuilder . build ( ) ; return ImmutableMap . of ( "actions" , actionList . stream ( ) . map ( ActionParameter :: getParameterMap ) . collect ( toList ( ) ) ) ;
public class Nodes { /** * Loads the nodes from disk . * @ throws IOException if the nodes could not be deserialized . */ public void load ( ) throws IOException { } }
final File nodesDir = getNodesDir ( ) ; final File [ ] subdirs = nodesDir . listFiles ( new FileFilter ( ) { public boolean accept ( File child ) { return child . isDirectory ( ) ; } } ) ; final Map < String , Node > newNodes = new TreeMap < > ( ) ; if ( subdirs != null ) { for ( File subdir : subdirs ) { try { XmlFile xmlFile = new XmlFile ( Jenkins . XSTREAM , new File ( subdir , "config.xml" ) ) ; if ( xmlFile . exists ( ) ) { Node node = ( Node ) xmlFile . read ( ) ; newNodes . put ( node . getNodeName ( ) , node ) ; } } catch ( IOException e ) { Logger . getLogger ( Nodes . class . getName ( ) ) . log ( Level . WARNING , "could not load " + subdir , e ) ; } } } Queue . withLock ( new Runnable ( ) { @ Override public void run ( ) { nodes . entrySet ( ) . removeIf ( stringNodeEntry -> ! ( stringNodeEntry . getValue ( ) instanceof EphemeralNode ) ) ; nodes . putAll ( newNodes ) ; jenkins . updateComputerList ( ) ; jenkins . trimLabels ( ) ; } } ) ;
public class GenericsUtils { /** * for TypeVariableUtils access */ @ SuppressWarnings ( "PMD.AvoidProtectedMethodInFinalClassNotExtending" ) protected static Type [ ] resolveTypeVariables ( final Type [ ] types , final Map < String , Type > generics , final boolean countPreservedVariables ) { } }
if ( types . length == 0 ) { return NO_TYPES ; } final Type [ ] resolved = new Type [ types . length ] ; for ( int i = 0 ; i < types . length ; i ++ ) { resolved [ i ] = resolveTypeVariables ( types [ i ] , generics , countPreservedVariables ) ; } return resolved ;
public class ServerDef { /** * < pre > * The name of the job of which this server is a member . * NOTE ( mrry ) : The ` cluster ` field must contain a ` JobDef ` with a ` name ` field * that matches this name . * < / pre > * < code > optional string job _ name = 2 ; < / code > */ public java . lang . String getJobName ( ) { } }
java . lang . Object ref = jobName_ ; if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; jobName_ = s ; return s ; }
public class DbcHelper { /** * Detect database vender info . * @ param conn * @ return * @ throws SQLException */ public static DatabaseVendor detectDbVendor ( Connection conn ) throws SQLException { } }
DatabaseMetaData dmd = conn . getMetaData ( ) ; String dpn = dmd . getDatabaseProductName ( ) ; if ( StringUtils . equalsAnyIgnoreCase ( "MySQL" , dpn ) ) { return DatabaseVendor . MYSQL ; } if ( StringUtils . equalsAnyIgnoreCase ( "PostgreSQL" , dpn ) ) { return DatabaseVendor . POSTGRESQL ; } if ( StringUtils . equalsAnyIgnoreCase ( "Microsoft SQL Server" , dpn ) ) { return DatabaseVendor . MSSQL ; } return DatabaseVendor . UNKNOWN ;
public class MariaDbStatement { /** * executes a select query . * @ param sql the query to send to the server * @ return a result set * @ throws SQLException if something went wrong */ public ResultSet executeQuery ( String sql ) throws SQLException { } }
if ( executeInternal ( sql , fetchSize , Statement . NO_GENERATED_KEYS ) ) { return results . getResultSet ( ) ; } return SelectResultSet . createEmptyResultSet ( ) ;
public class Href { /** * Add this path to the URI . * @ param suffix The suffix * @ return New HREF */ public Href path ( final Object suffix ) { } }
return new Href ( URI . create ( new StringBuilder ( Href . TRAILING_SLASH . matcher ( this . uri . toString ( ) ) . replaceAll ( "" ) ) . append ( '/' ) . append ( Href . encode ( suffix . toString ( ) ) ) . toString ( ) ) , this . params , this . fragment ) ;
public class AbstractHttpWriter { /** * Sends request using single thread pool so that it can be easily terminated ( use case : time out ) * { @ inheritDoc } * @ see org . apache . gobblin . writer . http . HttpWriterDecoration # sendRequest ( org . apache . http . client . methods . HttpUriRequest ) */ @ Override public ListenableFuture < CloseableHttpResponse > sendRequest ( final HttpUriRequest request ) throws IOException { } }
return singleThreadPool . submit ( new Callable < CloseableHttpResponse > ( ) { @ Override public CloseableHttpResponse call ( ) throws Exception { return client . execute ( request ) ; } } ) ;
public class ActionDefaultPointcut { protected boolean isCallbackMethodImplementation ( Method method ) { } }
if ( ! ActionHook . class . isAssignableFrom ( method . getDeclaringClass ( ) ) ) { return false ; // not required if statement but for performance } for ( Method callbackMethod : callbackMethodSet ) { if ( isImplementsMethod ( method , callbackMethod ) ) { return true ; } } return false ;
public class DisambiguatedAlchemyEntity { /** * Set the website associated with this concept tag . * @ param website website associated with this concept tag */ public void setWebsite ( String website ) { } }
if ( website != null ) { website = website . trim ( ) ; } this . website = website ;
public class GlobalStateConfigurationBuilder { /** * Defines the @ { @ link LocalConfigurationStorage } . Defaults to @ { @ link org . infinispan . globalstate . impl . VolatileLocalConfigurationStorage } */ public GlobalStateConfigurationBuilder configurationStorageSupplier ( Supplier < ? extends LocalConfigurationStorage > configurationStorageSupplier ) { } }
configurationStorage ( ConfigurationStorage . CUSTOM ) ; attributes . attribute ( CONFIGURATION_STORAGE_SUPPLIER ) . set ( configurationStorageSupplier ) ; return this ;
public class UriEscape { /** * Perform am URI path < strong > unescape < / strong > operation * on a < tt > String < / tt > input using < tt > UTF - 8 < / tt > as encoding , writing results to a < tt > Writer < / tt > . * This method will unescape every percent - encoded ( < tt > % HH < / tt > ) sequences present in input , * even for those characters that do not need to be percent - encoded in this context ( unreserved characters * can be percent - encoded even if / when this is not required , though it is not generally considered a * good practice ) . * This method will use < tt > UTF - 8 < / tt > in order to determine the characters specified in the * percent - encoded byte sequences . * This method is < strong > thread - safe < / strong > . * @ param text the < tt > String < / tt > to be unescaped . * @ param writer the < tt > java . io . Writer < / tt > to which the unescaped result will be written . Nothing will * be written at all to this writer if input is < tt > null < / tt > . * @ throws IOException if an input / output exception occurs * @ since 1.1.2 */ public static void unescapeUriPath ( final String text , final Writer writer ) throws IOException { } }
unescapeUriPath ( text , writer , DEFAULT_ENCODING ) ;
public class StringUtils { /** * Formats a string with or without line breaks into a string with lines with less than a supplied number of * characters per line . * @ param aString A string to format * @ param aCount A number of characters to allow per line * @ return A string formatted using the supplied count */ public static String toCharCount ( final String aString , final int aCount ) { } }
final StringBuilder builder = new StringBuilder ( ) ; final String [ ] words = aString . split ( "\\s" ) ; int count = 0 ; for ( final String word : words ) { count += word . length ( ) ; if ( count < aCount ) { builder . append ( word ) ; if ( ( count += 1 ) < aCount ) { builder . append ( ' ' ) ; } else { builder . append ( EOL ) . append ( DOUBLE_SPACE ) ; count = 2 ; } } else { builder . append ( EOL ) . append ( DOUBLE_SPACE ) . append ( word ) ; count = word . length ( ) + 2 ; // two spaces at start of line } } return builder . toString ( ) ;
public class ImageMemoryCache { /** * Return all bitmaps in memory cache associated with the given image URI . < br / > */ public static List < Bitmap > getCachedBitmapsForImageUri ( String imageUri , ImageMemoryCache memoryCache ) { } }
List < Bitmap > values = new ArrayList < Bitmap > ( ) ; for ( String key : memoryCache . getKeySet ( ) ) { if ( key . startsWith ( imageUri ) ) { values . add ( ( Bitmap ) memoryCache . getObjectFromCache ( key ) ) ; } } return values ;
public class AbstractXmlHttpMessageConverter { /** * Transforms the given { @ code Source } to the { @ code Result } . * @ param source the source to transform from * @ param result the result to transform to * @ throws TransformerException in case of transformation errors */ protected void transform ( Source source , Result result ) throws TransformerException { } }
this . transformerFactory . newTransformer ( ) . transform ( source , result ) ;
public class EnumLiteralDeclarationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setLiteral ( Keyword newLiteral ) { } }
if ( newLiteral != literal ) { NotificationChain msgs = null ; if ( literal != null ) msgs = ( ( InternalEObject ) literal ) . eInverseRemove ( this , EOPPOSITE_FEATURE_BASE - XtextPackage . ENUM_LITERAL_DECLARATION__LITERAL , null , msgs ) ; if ( newLiteral != null ) msgs = ( ( InternalEObject ) newLiteral ) . eInverseAdd ( this , EOPPOSITE_FEATURE_BASE - XtextPackage . ENUM_LITERAL_DECLARATION__LITERAL , null , msgs ) ; msgs = basicSetLiteral ( newLiteral , msgs ) ; if ( msgs != null ) msgs . dispatch ( ) ; } else if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , XtextPackage . ENUM_LITERAL_DECLARATION__LITERAL , newLiteral , newLiteral ) ) ;
public class GrabAnnotationTransformation { /** * Adds the annotation to the internal target list if a match is found . * @ param node the AST node we are processing */ public void visitAnnotations ( AnnotatedNode node ) { } }
super . visitAnnotations ( node ) ; for ( AnnotationNode an : node . getAnnotations ( ) ) { String name = an . getClassNode ( ) . getName ( ) ; if ( ( GRAB_CLASS_NAME . equals ( name ) ) || ( allowShortGrab && GRAB_SHORT_NAME . equals ( name ) ) || ( grabAliases . contains ( name ) ) ) { grabAnnotations . add ( an ) ; } if ( ( GRABEXCLUDE_CLASS_NAME . equals ( name ) ) || ( allowShortGrabExcludes && GRABEXCLUDE_SHORT_NAME . equals ( name ) ) || ( grabExcludeAliases . contains ( name ) ) ) { grabExcludeAnnotations . add ( an ) ; } if ( ( GRABCONFIG_CLASS_NAME . equals ( name ) ) || ( allowShortGrabConfig && GRABCONFIG_SHORT_NAME . equals ( name ) ) || ( grabConfigAliases . contains ( name ) ) ) { grabConfigAnnotations . add ( an ) ; } if ( ( GRAPES_CLASS_NAME . equals ( name ) ) || ( allowShortGrapes && GRAPES_SHORT_NAME . equals ( name ) ) || ( grapesAliases . contains ( name ) ) ) { grapesAnnotations . add ( an ) ; } if ( ( GRABRESOLVER_CLASS_NAME . equals ( name ) ) || ( allowShortGrabResolver && GRABRESOLVER_SHORT_NAME . equals ( name ) ) || ( grabResolverAliases . contains ( name ) ) ) { grabResolverAnnotations . add ( an ) ; } }
public class AmazonWorkDocsClient { /** * Deactivates the specified user , which revokes the user ' s access to Amazon WorkDocs . * @ param deactivateUserRequest * @ return Result of the DeactivateUser operation returned by the service . * @ throws EntityNotExistsException * The resource does not exist . * @ throws UnauthorizedOperationException * The operation is not permitted . * @ throws UnauthorizedResourceAccessException * The caller does not have access to perform the action on the resource . * @ throws FailedDependencyException * The AWS Directory Service cannot reach an on - premises instance . Or a dependency under the control of the * organization is failing , such as a connected Active Directory . * @ throws ServiceUnavailableException * One or more of the dependencies is unavailable . * @ sample AmazonWorkDocs . DeactivateUser * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / workdocs - 2016-05-01 / DeactivateUser " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DeactivateUserResult deactivateUser ( DeactivateUserRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeactivateUser ( request ) ;
public class TransliteratorIDParser { /** * Parse a global filter of the form " [ f ] " or " ( [ f ] ) " , depending * on ' withParens ' . * @ param id the pattern the parse * @ param pos INPUT - OUTPUT parameter . On input , the position of * the first character to parse . On output , the position after * the last character parsed . * @ param dir the direction . * @ param withParens INPUT - OUTPUT parameter . On entry , if * withParens [ 0 ] is 0 , then parens are disallowed . If it is 1, * then parens are requires . If it is - 1 , then parens are * optional , and the return result will be set to 0 or 1. * @ param canonID OUTPUT parameter . The pattern for the filter * added to the canonID , either at the end , if dir is FORWARD , or * at the start , if dir is REVERSE . The pattern will be enclosed * in parentheses if appropriate , and will be suffixed with an * ID _ DELIM character . May be null . * @ return a UnicodeSet object or null . A non - null results * indicates a successful parse , regardless of whether the filter * applies to the given direction . The caller should discard it * if withParens ! = ( dir = = REVERSE ) . */ public static UnicodeSet parseGlobalFilter ( String id , int [ ] pos , int dir , int [ ] withParens , StringBuffer canonID ) { } }
UnicodeSet filter = null ; int start = pos [ 0 ] ; if ( withParens [ 0 ] == - 1 ) { withParens [ 0 ] = Utility . parseChar ( id , pos , OPEN_REV ) ? 1 : 0 ; } else if ( withParens [ 0 ] == 1 ) { if ( ! Utility . parseChar ( id , pos , OPEN_REV ) ) { pos [ 0 ] = start ; return null ; } } pos [ 0 ] = PatternProps . skipWhiteSpace ( id , pos [ 0 ] ) ; if ( UnicodeSet . resemblesPattern ( id , pos [ 0 ] ) ) { ParsePosition ppos = new ParsePosition ( pos [ 0 ] ) ; try { filter = new UnicodeSet ( id , ppos , null ) ; } catch ( IllegalArgumentException e ) { pos [ 0 ] = start ; return null ; } String pattern = id . substring ( pos [ 0 ] , ppos . getIndex ( ) ) ; pos [ 0 ] = ppos . getIndex ( ) ; if ( withParens [ 0 ] == 1 && ! Utility . parseChar ( id , pos , CLOSE_REV ) ) { pos [ 0 ] = start ; return null ; } // In the forward direction , append the pattern to the // canonID . In the reverse , insert it at zero , and invert // the presence of parens ( " A " < - > " ( A ) " ) . if ( canonID != null ) { if ( dir == FORWARD ) { if ( withParens [ 0 ] == 1 ) { pattern = String . valueOf ( OPEN_REV ) + pattern + CLOSE_REV ; } canonID . append ( pattern + ID_DELIM ) ; } else { if ( withParens [ 0 ] == 0 ) { pattern = String . valueOf ( OPEN_REV ) + pattern + CLOSE_REV ; } canonID . insert ( 0 , pattern + ID_DELIM ) ; } } } return filter ;
public class ArmeriaConfigurationUtil { /** * Adds { @ link Port } s to the specified { @ link ServerBuilder } . */ public static void configurePorts ( ServerBuilder server , List < Port > ports ) { } }
requireNonNull ( server , "server" ) ; requireNonNull ( ports , "ports" ) ; ports . forEach ( p -> { final String ip = p . getIp ( ) ; final String iface = p . getIface ( ) ; final int port = p . getPort ( ) ; final List < SessionProtocol > protocols = p . getProtocols ( ) ; if ( ip == null ) { if ( iface == null ) { server . port ( new ServerPort ( port , protocols ) ) ; } else { try { final Enumeration < InetAddress > e = NetworkInterface . getByName ( iface ) . getInetAddresses ( ) ; while ( e . hasMoreElements ( ) ) { server . port ( new ServerPort ( new InetSocketAddress ( e . nextElement ( ) , port ) , protocols ) ) ; } } catch ( SocketException e ) { throw new IllegalStateException ( "Failed to find an iface: " + iface , e ) ; } } } else if ( iface == null ) { if ( NetUtil . isValidIpV4Address ( ip ) || NetUtil . isValidIpV6Address ( ip ) ) { final byte [ ] bytes = NetUtil . createByteArrayFromIpAddressString ( ip ) ; try { server . port ( new ServerPort ( new InetSocketAddress ( InetAddress . getByAddress ( bytes ) , port ) , protocols ) ) ; } catch ( UnknownHostException e ) { // Should never happen . throw new Error ( e ) ; } } else { throw new IllegalStateException ( "invalid IP address: " + ip ) ; } } else { throw new IllegalStateException ( "A port cannot have both IP and iface: " + p ) ; } } ) ;
public class HumanName { /** * syntactic sugar */ public StringType addSuffixElement ( ) { } }
StringType t = new StringType ( ) ; if ( this . suffix == null ) this . suffix = new ArrayList < StringType > ( ) ; this . suffix . add ( t ) ; return t ;
public class BaseSecurityService { /** * Changes the user ' s password . * @ param oldPassword Current password . * @ param newPassword New password . * @ return Null or empty if succeeded . Otherwise , displayable reason why change failed . */ @ Override public String changePassword ( final String oldPassword , final String newPassword ) { } }
return Security . changePassword ( brokerSession , oldPassword , newPassword ) ;
public class GetDataCatalogEncryptionSettingsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetDataCatalogEncryptionSettingsRequest getDataCatalogEncryptionSettingsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getDataCatalogEncryptionSettingsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getDataCatalogEncryptionSettingsRequest . getCatalogId ( ) , CATALOGID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FormValidator { public static boolean validateSingleView ( Activity activity , View targetView , IValidationCallback callback ) { } }
return validateSingleView ( activity , activity . getWindow ( ) . getDecorView ( ) , targetView , callback ) ;
public class ResultUtil { /** * Convert a timestamp internal value ( scaled number of seconds + fractional * seconds ) into a SFTimestamp . * @ param timestampStr timestamp object * @ param scale timestamp scale * @ param internalColumnType snowflake timestamp type * @ param resultVersion For new result version , timestamp with timezone is formatted as * the seconds since epoch with fractional part in the decimal followed * by time zone index . E . g . : " 123.456 1440 " . Here 123.456 is the * number * of seconds since epoch and 1440 is the timezone index . * @ param sessionTZ session timezone * @ param session session object * @ return converted snowflake timestamp object * @ throws SFException if timestampStr is an invalid timestamp */ static public SFTimestamp getSFTimestamp ( String timestampStr , int scale , int internalColumnType , long resultVersion , TimeZone sessionTZ , SFSession session ) throws SFException { } }
logger . debug ( "public Timestamp getTimestamp(int columnIndex)" ) ; try { TimeUtil . TimestampType tsType = null ; switch ( internalColumnType ) { case Types . TIMESTAMP : tsType = TimeUtil . TimestampType . TIMESTAMP_NTZ ; break ; case SnowflakeUtil . EXTRA_TYPES_TIMESTAMP_TZ : tsType = TimeUtil . TimestampType . TIMESTAMP_TZ ; logger . trace ( "Handle timestamp with timezone {} encoding: {}" , ( resultVersion > 0 ? "new" : "old" ) , timestampStr ) ; break ; case SnowflakeUtil . EXTRA_TYPES_TIMESTAMP_LTZ : tsType = TimeUtil . TimestampType . TIMESTAMP_LTZ ; break ; } // Construct a timestamp return TimeUtil . getSFTimestamp ( timestampStr , scale , tsType , resultVersion , sessionTZ ) ; } catch ( IllegalArgumentException ex ) { throw ( SFException ) IncidentUtil . generateIncidentV2WithException ( session , new SFException ( ErrorCode . IO_ERROR , "Invalid timestamp value: " + timestampStr ) , null , null ) ; }
public class IvyMultiBranchProject { /** * Stapler URL binding used by the configure page to check the location of * alternate settings file . * @ param value file to check * @ return validation of file */ @ SuppressWarnings ( UNUSED ) public FormValidation doCheckIvySettingsFile ( @ QueryParameter String value ) { } }
String v = Util . fixEmpty ( value ) ; if ( ( v == null ) || ( v . length ( ) == 0 ) ) { // Null values are allowed . return FormValidation . ok ( ) ; } if ( v . startsWith ( "/" ) || v . startsWith ( "\\" ) || v . matches ( "^\\w\\:\\\\.*" ) ) { return FormValidation . error ( "Ivy settings file must be a relative path." ) ; } return FormValidation . ok ( ) ;
public class AbstractLinearClassifierFactory { /** * Trains a { @ link Classifier } on a { @ link Dataset } . * @ return A { @ link Classifier } trained on the data . */ public LinearClassifier < L , F > trainClassifier ( GeneralDataset < L , F > data ) { } }
labelIndex = data . labelIndex ( ) ; featureIndex = data . featureIndex ( ) ; double [ ] [ ] weights = trainWeights ( data ) ; return new LinearClassifier < L , F > ( weights , featureIndex , labelIndex ) ;
public class LinearSolverQr_DDRM { /** * Solves for X using the QR decomposition . * @ param B A matrix that is n by m . Not modified . * @ param X An n by m matrix where the solution is written to . Modified . */ @ Override public void solve ( DMatrixRMaj B , DMatrixRMaj X ) { } }
if ( B . numRows != numRows ) throw new IllegalArgumentException ( "Unexpected dimensions for X: X rows = " + X . numRows + " expected = " + numCols ) ; X . reshape ( numCols , B . numCols ) ; int BnumCols = B . numCols ; Y . reshape ( numRows , 1 , false ) ; Z . reshape ( numRows , 1 , false ) ; // solve each column one by one for ( int colB = 0 ; colB < BnumCols ; colB ++ ) { // make a copy of this column in the vector for ( int i = 0 ; i < numRows ; i ++ ) { Y . data [ i ] = B . get ( i , colB ) ; } // Solve Qa = b // a = Q ' b CommonOps_DDRM . multTransA ( Q , Y , Z ) ; // solve for Rx = b using the standard upper triangular solver TriangularSolver_DDRM . solveU ( R . data , Z . data , numCols ) ; // save the results for ( int i = 0 ; i < numCols ; i ++ ) { X . set ( i , colB , Z . data [ i ] ) ; } }
public class QrHelperFunctions_ZDRM { /** * Extracts a house holder vector from the rows of A and stores it in u * @ param A Complex matrix with householder vectors stored in the upper right triangle * @ param row Row in A * @ param col0 first row in A ( implicitly assumed to be r + i0) * @ param col1 last row + 1 in A * @ param u Output array storage * @ param offsetU first index in U */ public static void extractHouseholderRow ( ZMatrixRMaj A , int row , int col0 , int col1 , double u [ ] , int offsetU ) { } }
int indexU = ( offsetU + col0 ) * 2 ; u [ indexU ] = 1 ; u [ indexU + 1 ] = 0 ; int indexA = ( row * A . numCols + ( col0 + 1 ) ) * 2 ; System . arraycopy ( A . data , indexA , u , indexU + 2 , ( col1 - col0 - 1 ) * 2 ) ;
public class filterpostbodyinjection { /** * Use this API to unset the properties of filterpostbodyinjection resource . * Properties that need to be unset are specified in args array . */ public static base_response unset ( nitro_service client , filterpostbodyinjection resource , String [ ] args ) throws Exception { } }
filterpostbodyinjection unsetresource = new filterpostbodyinjection ( ) ; return unsetresource . unset_resource ( client , args ) ;
public class PackageSummaryBuilder { /** * Build the content for the package doc . * @ param node the XML element that specifies which components to document * @ param contentTree the content tree to which the package contents * will be added */ public void buildContent ( XMLNode node , Content contentTree ) { } }
Content packageContentTree = packageWriter . getContentHeader ( ) ; buildChildren ( node , packageContentTree ) ; contentTree . addContent ( packageContentTree ) ;
public class IntUtils { /** * Produces a permutation of the integers from 0 ( inclusive ) to numElements ( exclusive ) which can * then be used to permute other things . * @ deprecated Use { @ link com . bbn . bue . common . math . Permutation # createForNElements ( int , Random ) } * instead , which prevents bugs due to being inconsistent about how to interpret the permutation * array . */ @ Deprecated public static int [ ] permutation ( final int numElements , final Random rng ) { } }
final int [ ] permutation = arange ( numElements ) ; shuffle ( permutation , checkNotNull ( rng ) ) ; return permutation ;
public class DOMHelper { /** * Figure out whether node2 should be considered as being later * in the document than node1 , in Document Order as defined * by the XPath model . This may not agree with the ordering defined * by other XML applications . * There are some cases where ordering isn ' t defined , and neither are * the results of this function - - though we ' ll generally return true . * TODO : Make sure this does the right thing with attribute nodes ! ! ! * @ param node1 DOM Node to perform position comparison on . * @ param node2 DOM Node to perform position comparison on . * @ return false if node2 comes before node1 , otherwise return true . * You can think of this as * < code > ( node1 . documentOrderPosition & lt ; = node2 . documentOrderPosition ) < / code > . */ public static boolean isNodeAfter ( Node node1 , Node node2 ) { } }
if ( node1 == node2 || isNodeTheSame ( node1 , node2 ) ) return true ; // Default return value , if there is no defined ordering boolean isNodeAfter = true ; Node parent1 = getParentOfNode ( node1 ) ; Node parent2 = getParentOfNode ( node2 ) ; // Optimize for most common case if ( parent1 == parent2 || isNodeTheSame ( parent1 , parent2 ) ) // then we know they are siblings { if ( null != parent1 ) isNodeAfter = isNodeAfterSibling ( parent1 , node1 , node2 ) ; else { // If both parents are null , ordering is not defined . // We ' re returning a value in lieu of throwing an exception . // Not a case we expect to arise in XPath , but beware if you // try to reuse this method . // We can just fall through in this case , which allows us // to hit the debugging code at the end of the function . // return isNodeAfter ; } } else { // General strategy : Figure out the lengths of the two // ancestor chains , reconcile the lengths , and look for // the lowest common ancestor . If that ancestor is one of // the nodes being compared , it comes before the other . // Otherwise perform a sibling compare . // NOTE : If no common ancestor is found , ordering is undefined // and we return the default value of isNodeAfter . // Count parents in each ancestor chain int nParents1 = 2 , nParents2 = 2 ; // include node & parent obtained above while ( parent1 != null ) { nParents1 ++ ; parent1 = getParentOfNode ( parent1 ) ; } while ( parent2 != null ) { nParents2 ++ ; parent2 = getParentOfNode ( parent2 ) ; } // Initially assume scan for common ancestor starts with // the input nodes . Node startNode1 = node1 , startNode2 = node2 ; // If one ancestor chain is longer , adjust its start point // so we ' re comparing at the same depths if ( nParents1 < nParents2 ) { // Adjust startNode2 to depth of startNode1 int adjust = nParents2 - nParents1 ; for ( int i = 0 ; i < adjust ; i ++ ) { startNode2 = getParentOfNode ( startNode2 ) ; } } else if ( nParents1 > nParents2 ) { // adjust startNode1 to depth of startNode2 int adjust = nParents1 - nParents2 ; for ( int i = 0 ; i < adjust ; i ++ ) { startNode1 = getParentOfNode ( startNode1 ) ; } } Node prevChild1 = null , prevChild2 = null ; // so we can " back up " // Loop up the ancestor chain looking for common parent while ( null != startNode1 ) { if ( startNode1 == startNode2 || isNodeTheSame ( startNode1 , startNode2 ) ) // common parent ? { if ( null == prevChild1 ) // first time in loop ? { // Edge condition : one is the ancestor of the other . isNodeAfter = ( nParents1 < nParents2 ) ? true : false ; break ; // from while loop } else { // Compare ancestors below lowest - common as siblings isNodeAfter = isNodeAfterSibling ( startNode1 , prevChild1 , prevChild2 ) ; break ; // from while loop } } // end if ( startNode1 = = startNode2) // Move up one level and try again prevChild1 = startNode1 ; startNode1 = getParentOfNode ( startNode1 ) ; prevChild2 = startNode2 ; startNode2 = getParentOfNode ( startNode2 ) ; } // end while ( parents exist to examine ) } // end big else ( not immediate siblings ) // WARNING : The following diagnostic won ' t report the early // " same node " case . Fix if / when needed . /* - - please do not remove . . . very useful for diagnostics - - System . out . println ( " node1 = " + node1 . getNodeName ( ) + " ( " + node1 . getNodeType ( ) + " ) " + " , node2 = " + node2 . getNodeName ( ) + " ( " + node2 . getNodeType ( ) + " ) " + " , isNodeAfter = " + isNodeAfter ) ; */ return isNodeAfter ;
public class RectifyImageOps { /** * Creates a transform that goes from rectified to original distorted pixel coordinates . * Rectification includes removal of lens distortion . Used for rendering rectified images . * @ param param Intrinsic parameters . * @ param rectify Transform for rectifying the image . * @ return Transform from rectified to unrectified pixels */ public static Point2Transform2_F64 transformRectToPixel ( CameraPinholeBrown param , DMatrixRMaj rectify ) { } }
return ImplRectifyImageOps_F64 . transformRectToPixel ( param , rectify ) ;
public class AuditCollectorUtil { /** * Get security audit results */ private static Audit getSecurityAudit ( JSONArray jsonArray , JSONArray global ) { } }
LOGGER . info ( "NFRR Audit Collector auditing STATIC_SECURITY_ANALYSIS" ) ; Audit audit = new Audit ( ) ; audit . setType ( AuditType . STATIC_SECURITY_ANALYSIS ) ; Audit basicAudit ; if ( ( basicAudit = doBasicAuditCheck ( jsonArray , global , AuditType . STATIC_SECURITY_ANALYSIS ) ) != null ) { return basicAudit ; } Set < String > auditStatuses ; for ( Object o : jsonArray ) { JSONArray auditJO = ( JSONArray ) ( ( JSONObject ) o ) . get ( STR_AUDITSTATUSES ) ; Optional < Object > urlOptObj = Optional . ofNullable ( ( ( JSONObject ) o ) . get ( STR_URL ) ) ; urlOptObj . ifPresent ( urlObj -> audit . getUrl ( ) . add ( urlOptObj . get ( ) . toString ( ) ) ) ; auditJO . stream ( ) . forEach ( status -> audit . getAuditStatusCodes ( ) . add ( ( String ) status ) ) ; } auditStatuses = audit . getAuditStatusCodes ( ) ; if ( auditStatuses . contains ( CodeQualityAuditStatus . STATIC_SECURITY_SCAN_FAIL . name ( ) ) || auditStatuses . contains ( CodeQualityAuditStatus . STATIC_SECURITY_SCAN_FOUND_HIGH . name ( ) ) || auditStatuses . contains ( CodeQualityAuditStatus . STATIC_SECURITY_SCAN_FOUND_CRITICAL . name ( ) ) ) { audit . setAuditStatus ( AuditStatus . FAIL ) ; audit . setDataStatus ( DataStatus . OK ) ; } else if ( auditStatuses . contains ( CodeQualityAuditStatus . STATIC_SECURITY_SCAN_OK . name ( ) ) ) { audit . setAuditStatus ( AuditStatus . OK ) ; audit . setDataStatus ( DataStatus . OK ) ; } else { audit . setAuditStatus ( AuditStatus . NA ) ; audit . setDataStatus ( DataStatus . NO_DATA ) ; } return audit ;
public class UpgradeProcess { /** * Analyzes the difference between a set of files and a set * of installed files . Computes which files should be added , * upgraded , deleted and left alone because of collisions . * @ return A report describing the result of the analysis . * @ throws Exception */ public UpgradeReport computeUpgrade ( ) throws Exception { } }
if ( null == upgradedFilesDir ) { throw new Exception ( "Upgraded files directory must be specified" ) ; } if ( null == targetDir ) { throw new Exception ( "Target directory must be specified" ) ; } if ( null == installedManifest ) { installedManifest = new FileSetManifest ( ) ; } UpgradeReport report = new UpgradeReport ( ) ; report . setInstalledFileSetManifest ( installedManifest ) ; FileSetManifest upgradedManifest = FileSetManifest . fromDirectory ( upgradedFilesDir ) ; report . setUpgradeFileSetManifest ( upgradedManifest ) ; FileSetManifest installationFilesManifest = null ; if ( null == installationFilesDir ) { installationFilesManifest = new FileSetManifest ( ) ; } else { installationFilesManifest = FileSetManifest . fromDirectory ( installationFilesDir ) ; } DigestComputerSha1 digestComputer = new DigestComputerSha1 ( ) ; // Detect added and upgraded files Set < String > relevantPaths = new HashSet < String > ( ) ; for ( FileManifest upgraded : upgradedManifest . getAllFileManifest ( ) ) { String path = upgraded . getRelativePath ( ) ; relevantPaths . add ( path ) ; FileManifest installed = installedManifest . getFileManifest ( path ) ; if ( null == installed ) { // New file report . addAddedPath ( path ) ; } else if ( false == upgraded . equals ( installed ) ) { // Upgraded file report . addUpgradedPath ( path ) ; } } // Detect deleted files for ( FileManifest installed : installedManifest . getAllFileManifest ( ) ) { String path = installed . getRelativePath ( ) ; FileManifest upgraded = upgradedManifest . getFileManifest ( path ) ; FileManifest installationManifest = installationFilesManifest . getFileManifest ( path ) ; if ( null == upgraded && null != installationManifest ) { // File has moved from upgrade - able path to installation directory . // Remove from the installation manifest report . addPathToAssumeDeleted ( path ) ; } else if ( null == upgraded ) { // Deleted file relevantPaths . add ( path ) ; report . addDeletedPath ( path ) ; } } // Construct a relevant manifest of what is found on disk FileSetManifest diskManifest = new FileSetManifest ( ) ; report . setDiskFileSetManifest ( diskManifest ) ; for ( String path : relevantPaths ) { File targetFile = new File ( targetDir , path ) ; if ( targetFile . exists ( ) ) { FileManifest fileManifest = new FileManifest ( ) ; fileManifest . setRelativePath ( path ) ; if ( targetFile . isDirectory ( ) ) { fileManifest . setDirectory ( true ) ; } else { try { String digest = digestComputer . computeDocumentDigest ( targetFile ) ; fileManifest . setDigest ( digest ) ; } catch ( Exception e ) { throw new Exception ( "Error while computing digest on file: " + targetFile . getAbsolutePath ( ) , e ) ; } } diskManifest . addFileManifest ( fileManifest ) ; } } // Only files that were not changed can be deleted for ( String path : report . getDeletedPaths ( ) ) { FileManifest installed = installedManifest . getFileManifest ( path ) ; FileManifest disk = diskManifest . getFileManifest ( path ) ; // If file is already deleted , then its OK if ( null == disk ) { // OK report . addPathToAssumeUpgraded ( path ) ; } else if ( installed . equals ( disk ) ) { // This path can be deleted if ( installed . isDirectory ( ) ) { report . addDirectoryToBeDeleted ( path ) ; } else { report . addFileToBeDeleted ( path ) ; } } else { // At this point , the element on disk is different // then the element in the installed manifest UpgradeCollision collision = new UpgradeCollision ( UpgradeCollision . Type . MODIFIED , path , null , installed , disk ) ; report . addCollision ( collision ) ; } } // Only files that are not present can be added , unless they // are exactly the same for ( String path : report . getAddedPaths ( ) ) { FileManifest upgraded = upgradedManifest . getFileManifest ( path ) ; FileManifest disk = diskManifest . getFileManifest ( path ) ; // Check if it exists on disk if ( null == disk ) { // Normal case : it is not located on disk // Add file or directory if ( upgraded . isDirectory ( ) ) { report . addDirectoryToBeAdded ( path ) ; } else { report . addFileToBeAdded ( path ) ; } } else if ( upgraded . equals ( disk ) ) { // What is found on disk is exactly the same // as what the upgrade would do . Therefore , simply // assume that the file was installed without performing // any operations on the element report . addPathToAssumeUpgraded ( path ) ; } else { // Collision UpgradeCollision collision = new UpgradeCollision ( UpgradeCollision . Type . BLOCKED , path , upgraded , null , disk ) ; report . addCollision ( collision ) ; } } // Upgrade files that were not changed for ( String path : report . getUpgradedPaths ( ) ) { FileManifest installed = installedManifest . getFileManifest ( path ) ; FileManifest upgrade = upgradedManifest . getFileManifest ( path ) ; FileManifest disk = diskManifest . getFileManifest ( path ) ; // If file was deleted , then we do not need to upgrade // This is a special kind of collision since this is a file that was // deliberately removed from the atlas if ( null == disk ) { UpgradeCollision collision = new UpgradeCollision ( UpgradeCollision . Type . DELETED , path , upgrade , null , disk ) ; report . addCollision ( collision ) ; // Assume the manifest of the upgrade for this file , because this is // equivalent as if the user had installed the latest upgrade and then // deleted the file report . addPathToAssumeUpgraded ( path ) ; } else if ( installed . equals ( disk ) ) { // User has not changed what was installed // Check if type is changed during upgrade if ( upgrade . isDirectory ( ) != disk . isDirectory ( ) ) { if ( disk . isDirectory ( ) ) { report . addDirectoryToBeDeleted ( path ) ; report . addFileToBeAdded ( path ) ; } else { // Disk is file , upgrade is directory report . addFileToBeDeleted ( path ) ; report . addDirectoryToBeAdded ( path ) ; } } else { // Type was not changed . Therefore , this is a file that // needs upgrading report . addFileToBeUpgraded ( path ) ; } } else if ( upgrade . equals ( disk ) ) { // The user has modified the disk version , but it is the same as // what is found in the upgrade . Therefore , no changes to the element // is required . Just assume that the upgrade was installed . report . addPathToAssumeUpgraded ( path ) ; } else { // What is found on disk is not the same as what was found // in the install manifest . Furthermore , the upgrade is different // than what is found on disk . Therefore , this is a collision . UpgradeCollision collision = new UpgradeCollision ( UpgradeCollision . Type . MODIFIED , path , upgrade , installed , disk ) ; report . addCollision ( collision ) ; } } return report ;
public class Bindable { /** * Create an updated { @ link Bindable } instance with an existing value . * @ param existingValue the existing value * @ return an updated { @ link Bindable } */ public Bindable < T > withExistingValue ( T existingValue ) { } }
Assert . isTrue ( existingValue == null || this . type . isArray ( ) || this . boxedType . resolve ( ) . isInstance ( existingValue ) , ( ) -> "ExistingValue must be an instance of " + this . type ) ; Supplier < T > value = ( existingValue != null ) ? ( ) -> existingValue : null ; return new Bindable < > ( this . type , this . boxedType , value , NO_ANNOTATIONS ) ;
public class GhprbExtensionContext { /** * Overrides global settings for cancelling builds when a PR was updated */ void cancelBuildsOnUpdate ( Runnable closure ) { } }
GhprbCancelBuildsOnUpdateContext context = new GhprbCancelBuildsOnUpdateContext ( ) ; ContextExtensionPoint . executeInContext ( closure , context ) ; extensions . add ( new GhprbCancelBuildsOnUpdate ( context . getOverrideGlobal ( ) ) ) ;
public class LocalDateTime { /** * Gets the value of the field at the specified index . * This method is required to support the < code > ReadablePartial < / code > * interface . The supported fields are Year , MonthOfDay , DayOfMonth and MillisOfDay . * @ param index the index , zero to two * @ return the value * @ throws IndexOutOfBoundsException if the index is invalid */ public int getValue ( int index ) { } }
switch ( index ) { case YEAR : return getChronology ( ) . year ( ) . get ( getLocalMillis ( ) ) ; case MONTH_OF_YEAR : return getChronology ( ) . monthOfYear ( ) . get ( getLocalMillis ( ) ) ; case DAY_OF_MONTH : return getChronology ( ) . dayOfMonth ( ) . get ( getLocalMillis ( ) ) ; case MILLIS_OF_DAY : return getChronology ( ) . millisOfDay ( ) . get ( getLocalMillis ( ) ) ; default : throw new IndexOutOfBoundsException ( "Invalid index: " + index ) ; }
public class ListTopicsDetectionJobsResult { /** * A list containing the properties of each job that is returned . * @ param topicsDetectionJobPropertiesList * A list containing the properties of each job that is returned . */ public void setTopicsDetectionJobPropertiesList ( java . util . Collection < TopicsDetectionJobProperties > topicsDetectionJobPropertiesList ) { } }
if ( topicsDetectionJobPropertiesList == null ) { this . topicsDetectionJobPropertiesList = null ; return ; } this . topicsDetectionJobPropertiesList = new java . util . ArrayList < TopicsDetectionJobProperties > ( topicsDetectionJobPropertiesList ) ;
public class UAgentInfo { /** * Detects a phone ( probably ) running the Firefox OS . * @ return detection of a Firefox OS phone */ public boolean detectFirefoxOSPhone ( ) { } }
// First , let ' s make sure we ' re NOT on another major mobile OS . if ( detectIos ( ) || detectAndroid ( ) || detectSailfish ( ) ) { return false ; } if ( ( userAgent . indexOf ( engineFirefox ) != - 1 ) && ( userAgent . indexOf ( mobile ) != - 1 ) ) { return true ; } return false ;
public class Interpreter { /** * Blocking call to read a line from the parser . * @ return true on EOF or false * @ throws ParseException on parser exception */ private boolean readLine ( ) throws ParseException { } }
try { return parser . Line ( ) ; } catch ( ParseException e ) { yield ( ) ; if ( EOF ) return true ; throw e ; }
public class AlluxioMaster { /** * Starts the Alluxio master . * @ param args command line arguments , should be empty */ public static void main ( String [ ] args ) { } }
if ( args . length != 0 ) { LOG . info ( "java -cp {} {}" , RuntimeConstants . ALLUXIO_JAR , AlluxioMaster . class . getCanonicalName ( ) ) ; System . exit ( - 1 ) ; } CommonUtils . PROCESS_TYPE . set ( CommonUtils . ProcessType . MASTER ) ; MasterProcess process ; try { process = AlluxioMasterProcess . Factory . create ( ) ; } catch ( Throwable t ) { ProcessUtils . fatalError ( LOG , t , "Failed to create master process" ) ; // fatalError will exit , so we shouldn ' t reach here . throw t ; } // Register a shutdown hook for master , so that master closes the journal files when it // receives SIGTERM . ProcessUtils . stopProcessOnShutdown ( process ) ; ProcessUtils . run ( process ) ;
public class EncodingSupport { /** * Converts a chunk of data into base 64 encoding . * @ param rawData * @ param lineLength optional length of lines in return value * @ return */ public static String encodeBase64 ( byte [ ] rawData , int lineLength ) { } }
if ( rawData == null ) { return "" ; } StringBuffer retval = new StringBuffer ( ) ; int i = 0 ; int n = 0 ; for ( ; i < rawData . length - 2 ; i += 3 ) { if ( lineLength > 0 && i > 0 && i % lineLength == 0 ) { retval . append ( "\n" ) ; } // n is a 32 bit number // shift all the way to left first to get rid of sign // 3 . shift last 8 bits to left 2 . shift next 8 bits left of first 1 . start here with last 8 bits shifted into int n = ( ( ( rawData [ i ] << 24 ) >>> 8 ) + ( ( rawData [ i + 1 ] << 24 ) >>> 16 ) + ( ( rawData [ i + 2 ] << 24 ) >>> 24 ) ) ; // this results in a 24 bit number ( stored in 32 bit int ) // of which 4 chunks of 6 bits are used to pick characters of base64Chars retval . append ( base64Chars . charAt ( ( n >>> 18 ) & 63 ) ) ; retval . append ( base64Chars . charAt ( ( n >>> 12 ) & 63 ) ) ; retval . append ( base64Chars . charAt ( ( n >>> 6 ) & 63 ) ) ; retval . append ( base64Chars . charAt ( n & 63 ) ) ; } // finish according to spec if ( i + 1 == rawData . length ) { n = ( ( rawData [ i ] << 24 ) >>> 8 ) ; retval . append ( base64Chars . charAt ( ( n >>> 18 ) & 63 ) ) ; retval . append ( base64Chars . charAt ( ( n >>> 12 ) & 63 ) ) ; retval . append ( "==" ) ; } if ( i + 2 == rawData . length ) { n = ( ( rawData [ i ] << 24 ) >>> 8 ) + ( ( rawData [ i + 1 ] << 24 ) >>> 16 ) ; retval . append ( base64Chars . charAt ( ( n >>> 18 ) & 63 ) ) ; retval . append ( base64Chars . charAt ( ( n >>> 12 ) & 63 ) ) ; retval . append ( base64Chars . charAt ( ( n >>> 6 ) & 63 ) ) ; retval . append ( '=' ) ; } return retval . toString ( ) ;
public class OracleSpatialUtils { /** * Generates the SQL to convert the given string to a CLOB . * @ param varchar * the value to convert . * @ return the SQL to convert the string to a CLOB . */ public static String convertToClob ( final String varchar ) { } }
int startIndex = 0 ; int endIndex = Math . min ( startIndex + 4000 , varchar . length ( ) ) ; final StringBuilder clobs = new StringBuilder ( "TO_CLOB('" ) . append ( varchar . substring ( startIndex , endIndex ) ) . append ( "')" ) ; while ( endIndex < varchar . length ( ) ) { startIndex = endIndex ; endIndex = Math . min ( startIndex + 4000 , varchar . length ( ) ) ; clobs . append ( " || TO_CLOB('" ) . append ( varchar . substring ( startIndex , endIndex ) ) . append ( "')" ) ; } return clobs . toString ( ) ;
public class RelationalOperations { /** * Returns true if polyline _ a overlaps polyline _ b . */ private static boolean polylineOverlapsPolyline_ ( Polyline polyline_a , Polyline polyline_b , double tolerance , ProgressTracker progress_tracker ) { } }
// Quick rasterize test to see whether the the geometries are disjoint . if ( tryRasterizedContainsOrDisjoint_ ( polyline_a , polyline_b , tolerance , false ) == Relation . disjoint ) return false ; return linearPathOverlapsLinearPath_ ( polyline_a , polyline_b , tolerance ) ;
public class PolygonalDomain { /** * Check whether this polygon contains a given point . * @ param point The coordinates specifying the point to check . * @ return < code > true < / code > this { @ link PolygonalDomain } contains this point . */ public boolean containsPoint ( Coordinate point ) { } }
Point p = new GeometryFactory ( ) . createPoint ( point ) ; return this . getGeometry ( ) . contains ( p ) ;
public class FSInputChecker { /** * Seek to the given position in the stream . * The next read ( ) will be from that position . * < p > This method may seek past the end of the file . * This produces no exception and an attempt to read from * the stream will result in - 1 indicating the end of the file . * @ param pos the postion to seek to . * @ exception IOException if an I / O error occurs . * ChecksumException if the chunk to seek to is corrupted */ public synchronized void seek ( long pos ) throws IOException { } }
if ( pos < 0 ) { return ; } // optimize : check if the pos is in the buffer long start = chunkPos - this . count ; if ( pos >= start && pos < chunkPos ) { this . pos = ( int ) ( pos - start ) ; return ; } // reset the current state resetState ( ) ; // seek to a checksum boundary chunkPos = getChunkPosition ( pos ) ; // scan to the desired position int delta = ( int ) ( pos - chunkPos ) ; if ( delta > 0 ) { readFully ( this , new byte [ delta ] , 0 , delta ) ; }
public class ParseBigDecimal { /** * Fixes the symbols in the input String ( currently only decimal separator and grouping separator ) so that the * String can be parsed as a BigDecimal . * @ param s * the String to fix * @ param symbols * the decimal format symbols * @ return the fixed String */ private static String fixSymbols ( final String s , final DecimalFormatSymbols symbols ) { } }
final char groupingSeparator = symbols . getGroupingSeparator ( ) ; final char decimalSeparator = symbols . getDecimalSeparator ( ) ; return s . replace ( String . valueOf ( groupingSeparator ) , "" ) . replace ( decimalSeparator , DEFAULT_DECIMAL_SEPARATOR ) ;
public class AbstractDestinationHandler { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . interfaces . DestinationHandler # closeConsumers ( ) */ public void closeConsumers ( ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "closeConsumers" ) ; // Tell any destinations that target this destination to close their consumers if ( aliasesThatTargetThisDest != null ) { synchronized ( aliasesThatTargetThisDest ) { Iterator i = aliasesThatTargetThisDest . iterator ( ) ; while ( i . hasNext ( ) ) { AbstractAliasDestinationHandler abstractAliasDestinationHandler = ( AbstractAliasDestinationHandler ) i . next ( ) ; abstractAliasDestinationHandler . closeConsumers ( ) ; } } } // Close all consumers if ( isPubSub ( ) ) { SubscriptionTypeFilter filter = new SubscriptionTypeFilter ( ) ; filter . LOCAL = Boolean . TRUE ; SIMPIterator itr = getSubscriptionIndex ( ) . iterator ( filter ) ; while ( itr . hasNext ( ) ) { ControllableSubscription subscription = ( ControllableSubscription ) itr . next ( ) ; ConsumerDispatcher cd = ( ConsumerDispatcher ) subscription . getOutputHandler ( ) ; // Look for subscriptions made through this destination ( as opposed to // through an alias ) if ( cd . getConsumerDispatcherState ( ) . getTopicSpaceUuid ( ) . equals ( getUuid ( ) ) ) { // Close the consumers of the subscription cd . closeAllConsumersForDelete ( this ) ; } } // Remove the ProxyHandlers attached to this destination // sanjay liberty change // messageProcessor . getProxyHandler ( ) . topicSpaceDeletedEvent ( this ) ; // Delete any durable subscriptions if they are made through an alias // in - case the underlying topicspace is not being deleted if ( isAlias ( ) ) { itr = getSubscriptionIndex ( ) . iterator ( filter ) ; while ( itr . hasNext ( ) ) { ControllableSubscription subscription = ( ControllableSubscription ) itr . next ( ) ; ConsumerDispatcher cd = ( ConsumerDispatcher ) subscription . getOutputHandler ( ) ; ConsumerDispatcherState subState = cd . getConsumerDispatcherState ( ) ; // Look for subscriptions made through this alias if ( ( subState . getTopicSpaceUuid ( ) . equals ( getUuid ( ) ) ) && ( cd . isDurable ( ) ) ) { try { deleteDurableSubscription ( subState . getSubscriberID ( ) , subState . getDurableHome ( ) ) ; } catch ( SIException e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.AbstractDestinationHandler.closeConsumers" , "1:347:1.57" , this ) ; SibTr . exception ( tc , e ) ; } } } itr . finished ( ) ; } } else { LocalizationPoint ptoPMessageItemStream = getQueuePoint ( messageProcessor . getMessagingEngineUuid ( ) ) ; if ( ptoPMessageItemStream != null ) { ConsumerDispatcher consumerManager = ( ConsumerDispatcher ) ptoPMessageItemStream . getConsumerManager ( ) ; consumerManager . closeAllConsumersForDelete ( this ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "closeConsumers" ) ; return ;
public class AbstractWebApplicationServiceResponseBuilder { /** * Determine response type response . * @ param finalService the final service * @ return the response type */ protected Response . ResponseType getWebApplicationServiceResponseType ( final WebApplicationService finalService ) { } }
val request = HttpRequestUtils . getHttpServletRequestFromRequestAttributes ( ) ; val methodRequest = request != null ? request . getParameter ( CasProtocolConstants . PARAMETER_METHOD ) : null ; final Function < String , String > func = FunctionUtils . doIf ( StringUtils :: isBlank , t -> { val registeredService = this . servicesManager . findServiceBy ( finalService ) ; if ( registeredService != null ) { return registeredService . getResponseType ( ) ; } return null ; } , f -> methodRequest ) ; val method = func . apply ( methodRequest ) ; if ( StringUtils . isBlank ( method ) ) { return Response . ResponseType . REDIRECT ; } return Response . ResponseType . valueOf ( method . toUpperCase ( ) ) ;
public class AttributeCriterionPane { private void buildUI ( ) { } }
// Attribute select : attributeSelect = new SelectItem ( "attributeItem" ) ; attributeSelect . setWidth ( 140 ) ; attributeSelect . setShowTitle ( false ) ; attributeSelect . setValueMap ( getSearchableAttributes ( layer ) ) ; attributeSelect . setHint ( I18nProvider . getSearch ( ) . gridChooseAttribute ( ) ) ; attributeSelect . setShowHintInField ( true ) ; attributeSelect . setValidateOnChange ( true ) ; attributeSelect . setShowErrorStyle ( true ) ; attributeSelect . setRequired ( true ) ; // Operator select : operatorSelect = new SelectItem ( "operatorItem" ) ; operatorSelect . setDisabled ( true ) ; operatorSelect . setWidth ( 140 ) ; operatorSelect . setShowTitle ( false ) ; operatorSelect . setValidateOnChange ( true ) ; operatorSelect . setShowErrorStyle ( true ) ; operatorSelect . setRequired ( true ) ; // Value form item : valueItem = new AttributeFormItem ( "valueItem" ) ; valueItem . setShowTitle ( false ) ; valueItem . setDisabled ( true ) ; valueItem . setWidth ( 150 ) ; // Mechanisms : attributeSelect . addChangedHandler ( new ChangedHandler ( ) { public void onChanged ( ChangedEvent event ) { selectedAttribute = getSelectedAttribute ( ) ; if ( selectedAttribute != null ) { // Adjust operator value map and enabled : operatorSelect . setDisabled ( false ) ; String [ ] operators = getOperatorsForAttributeType ( selectedAttribute ) ; operatorSelect . setValueMap ( operators ) ; operatorSelect . setValue ( operators [ 0 ] ) ; // Adjust value form item and enable : valueItem . setAttributeInfo ( selectedAttribute ) ; valueItem . setDisabled ( false ) ; valueItem . setWidth ( form . getWidth ( ) - 290 ) ; } } } ) ; // Finalize : form = new DynamicForm ( ) ; form . setNumCols ( 6 ) ; form . setHeight ( 26 ) ; form . setWidth100 ( ) ; form . setFields ( attributeSelect , operatorSelect , valueItem ) ; addChild ( form ) ;
public class CommercePriceListPersistenceImpl { /** * Returns the last commerce price list in the ordered set where groupId = & # 63 ; and status = & # 63 ; . * @ param groupId the group ID * @ param status the status * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce price list * @ throws NoSuchPriceListException if a matching commerce price list could not be found */ @ Override public CommercePriceList findByG_S_Last ( long groupId , int status , OrderByComparator < CommercePriceList > orderByComparator ) throws NoSuchPriceListException { } }
CommercePriceList commercePriceList = fetchByG_S_Last ( groupId , status , orderByComparator ) ; if ( commercePriceList != null ) { return commercePriceList ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( ", status=" ) ; msg . append ( status ) ; msg . append ( "}" ) ; throw new NoSuchPriceListException ( msg . toString ( ) ) ;
public class JobExecutionStatusDetailsMarshaller { /** * Marshall the given parameter object . */ public void marshall ( JobExecutionStatusDetails jobExecutionStatusDetails , ProtocolMarshaller protocolMarshaller ) { } }
if ( jobExecutionStatusDetails == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( jobExecutionStatusDetails . getDetailsMap ( ) , DETAILSMAP_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SeekableStreamIndexTaskRunner { /** * Returns true if , given that we want to start reading from recordSequenceNumber and end at endSequenceNumber , there * is more left to read . Used in pre - read checks to determine if there is anything left to read . */ private boolean isMoreToReadBeforeReadingRecord ( final SequenceOffsetType recordSequenceNumber , final SequenceOffsetType endSequenceNumber ) { } }
final int compareToEnd = createSequenceNumber ( recordSequenceNumber ) . compareTo ( createSequenceNumber ( endSequenceNumber ) ) ; return isEndOffsetExclusive ( ) ? compareToEnd < 0 : compareToEnd <= 0 ;
public class LByteSupplierBuilder { /** * Builds the functional interface implementation and if previously provided calls the consumer . */ @ Nonnull public final LByteSupplier build ( ) { } }
final LByteSupplier eventuallyFinal = this . eventually ; LByteSupplier retval ; final Case < LBoolSupplier , LByteSupplier > [ ] casesArray = cases . toArray ( new Case [ cases . size ( ) ] ) ; retval = LByteSupplier . byteSup ( ( ) -> { try { for ( Case < LBoolSupplier , LByteSupplier > aCase : casesArray ) { if ( aCase . casePredicate ( ) . getAsBool ( ) ) { return aCase . caseFunction ( ) . getAsByte ( ) ; } } return eventuallyFinal . getAsByte ( ) ; } catch ( Error e ) { // NOSONAR throw e ; } catch ( Throwable e ) { // NOSONAR throw Handler . handleOrPropagate ( e , handling ) ; } } ) ; if ( consumer != null ) { consumer . accept ( retval ) ; } return retval ;
public class Messager { /** * Print a message . * Part of DocErrorReporter . * @ param pos the position where the error occurs * @ param msg message to print */ public void printNotice ( SourcePosition pos , String msg ) { } }
if ( diagListener != null ) { report ( DiagnosticType . NOTE , pos , msg ) ; return ; } PrintWriter noticeWriter = getWriter ( WriterKind . NOTICE ) ; if ( pos == null ) noticeWriter . println ( msg ) ; else noticeWriter . println ( pos + ": " + msg ) ; noticeWriter . flush ( ) ;
public class VirtualLinkControl { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . SIMPVirtualLinkControllable # getForeignBusControlAdapter ( ) */ public SIMPForeignBusControllable getForeignBusControllable ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "getForeignBusControlAdapter" ) ; SibTr . exit ( tc , "getForeignBusControlAdapter" ) ; } return ( SIMPForeignBusControllable ) _messageProcessor . getDestinationManager ( ) . getForeignBusIndex ( ) . findByName ( _link . getBusName ( ) , null ) . getControlAdapter ( ) ;
public class AbstractCanalInstance { /** * 初始化单个eventParser , 不需要考虑group */ protected void startEventParserInternal ( CanalEventParser eventParser , boolean isGroup ) { } }
if ( eventParser instanceof AbstractEventParser ) { AbstractEventParser abstractEventParser = ( AbstractEventParser ) eventParser ; // 首先启动log position管理器 CanalLogPositionManager logPositionManager = abstractEventParser . getLogPositionManager ( ) ; if ( ! logPositionManager . isStart ( ) ) { logPositionManager . start ( ) ; } } if ( eventParser instanceof MysqlEventParser ) { MysqlEventParser mysqlEventParser = ( MysqlEventParser ) eventParser ; CanalHAController haController = mysqlEventParser . getHaController ( ) ; if ( haController instanceof HeartBeatHAController ) { ( ( HeartBeatHAController ) haController ) . setCanalHASwitchable ( mysqlEventParser ) ; } if ( ! haController . isStart ( ) ) { haController . start ( ) ; } }
public class ShardedDistributedMessageQueue { /** * Return the shard for this timestamp * @ param messageTime * @ param modShard * @ return */ private String getShardKey ( long messageTime , int modShard ) { } }
long timePartition ; if ( metadata . getPartitionDuration ( ) != null ) timePartition = ( messageTime / metadata . getPartitionDuration ( ) ) % metadata . getPartitionCount ( ) ; else timePartition = 0 ; return getName ( ) + ":" + timePartition + ":" + modShard ;
public class Controller { /** * Returns the value of a request parameter as a String , or default value if the parameter does not exist . * @ param name a String specifying the name of the parameter * @ param defaultValue a String value be returned when the value of parameter is null * @ return a String representing the single value of the parameter */ public String getPara ( String name , String defaultValue ) { } }
String result = request . getParameter ( name ) ; return result != null && ! "" . equals ( result ) ? result : defaultValue ;
public class ColorNames { /** * Replies the color value for the given color name . * < p > See the documentation of the { @ link # ColorNames } type for obtaining a list of the colors . * @ param colorName the color name . * @ param defaultValue if the given name does not corresponds to a known color , this value is replied . * @ return the color value . */ @ Pure public static int getColorFromName ( String colorName , int defaultValue ) { } }
final Integer value = COLOR_MATCHES . get ( Strings . nullToEmpty ( colorName ) . toLowerCase ( ) ) ; if ( value != null ) { return value . intValue ( ) ; } return defaultValue ;
public class CommercePaymentMethodGroupRelPersistenceImpl { /** * Returns the last commerce payment method group rel in the ordered set where groupId = & # 63 ; and active = & # 63 ; . * @ param groupId the group ID * @ param active the active * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce payment method group rel * @ throws NoSuchPaymentMethodGroupRelException if a matching commerce payment method group rel could not be found */ @ Override public CommercePaymentMethodGroupRel findByG_A_Last ( long groupId , boolean active , OrderByComparator < CommercePaymentMethodGroupRel > orderByComparator ) throws NoSuchPaymentMethodGroupRelException { } }
CommercePaymentMethodGroupRel commercePaymentMethodGroupRel = fetchByG_A_Last ( groupId , active , orderByComparator ) ; if ( commercePaymentMethodGroupRel != null ) { return commercePaymentMethodGroupRel ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( ", active=" ) ; msg . append ( active ) ; msg . append ( "}" ) ; throw new NoSuchPaymentMethodGroupRelException ( msg . toString ( ) ) ;
public class RoundRobin { public boolean addAll ( Collection < T > items ) { } }
boolean wasAllAdded = true ; for ( T item : items ) { if ( ! add ( item ) ) wasAllAdded = false ; } return wasAllAdded ;
public class U { /** * Documented , # random */ public static int random ( final int min , final int max ) { } }
return min + new java . security . SecureRandom ( ) . nextInt ( max - min + 1 ) ;
public class BoxApiFile { /** * Gets a request that copies a file * @ param id id of the file to copy * @ param parentId id of the parent folder to copy the file into * @ return request to copy a file */ public BoxRequestsFile . CopyFile getCopyRequest ( String id , String parentId ) { } }
BoxRequestsFile . CopyFile request = new BoxRequestsFile . CopyFile ( id , parentId , getFileCopyUrl ( id ) , mSession ) ; return request ;
public class SimpleGraphRelationshipGenerator { /** * { @ inheritDoc } * Returns an edge - set corresponding to a randomly chosen simple graph . */ @ Override protected List < Pair < Integer , Integer > > doGenerateEdges ( ) { } }
List < Pair < Integer , Integer > > edges = new ArrayList < > ( ) ; MutableDegreeDistribution distribution = new MutableSimpleDegreeDistribution ( getConfiguration ( ) . getDegrees ( ) ) ; while ( ! distribution . isZeroList ( ) ) { // int length = distribution . size ( ) ; int index = 0 ; long min = Long . MAX_VALUE ; // find minimal nonzero element for ( int i = 0 ; i < distribution . size ( ) ; ++ i ) { long elem = distribution . get ( i ) ; if ( elem != 0 && elem < min ) { min = elem ; index = i ; } } WeightedReservoirSampler sampler = new WeightedReservoirSampler ( ) ; // Obtain a candidate list : while ( true ) { MutableDegreeDistribution temp = new MutableSimpleDegreeDistribution ( distribution . getDegrees ( ) ) ; int candidateIndex = sampler . randomIndexChoice ( temp . getDegrees ( ) , index ) ; Pair < Integer , Integer > edgeCandidate = Pair . of ( candidateIndex , index ) ; // Checks if edge has already been added . boolean skip = false ; for ( Pair < Integer , Integer > edge : edges ) { if ( edge . equals ( edgeCandidate ) ) { skip = true ; break ; } } if ( skip ) { continue ; } // Prepare the candidate set and test if it is graphical temp . decrease ( index ) ; temp . decrease ( candidateIndex ) ; if ( temp . isValid ( ) ) { distribution = temp ; edges . add ( edgeCandidate ) ; // edge is allowed , add it . break ; } } } return edges ;
public class CmsDefaultPageEditor { /** * Performs the delete locale action . < p > * @ throws JspException if something goes wrong */ public void actionDeleteElementLocale ( ) throws JspException { } }
try { Locale loc = getElementLocale ( ) ; m_page . removeLocale ( loc ) ; // write the modified xml content m_file . setContents ( m_page . marshal ( ) ) ; m_file = getCms ( ) . writeFile ( m_file ) ; List < Locale > locales = m_page . getLocales ( ) ; if ( locales . size ( ) > 0 ) { // set first locale as new display locale Locale newLoc = locales . get ( 0 ) ; setParamElementlanguage ( newLoc . toString ( ) ) ; m_elementLocale = newLoc ; } else { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_GET_LOCALES_1 , getParamResource ( ) ) ) ; } } initContent ( ) ; } catch ( CmsXmlException e ) { // an error occurred while trying to delete the locale , stop action showErrorPage ( e ) ; } catch ( CmsException e ) { // should usually never happen if ( LOG . isInfoEnabled ( ) ) { LOG . info ( e . getLocalizedMessage ( ) , e ) ; } }
public class AdUnit { /** * Gets the effectiveLabelFrequencyCaps value for this AdUnit . * @ return effectiveLabelFrequencyCaps * Contains the set of labels applied directly to the ad unit * as well as * those inherited from parent ad units . This field * is readonly and is * assigned by Google . */ public com . google . api . ads . admanager . axis . v201811 . LabelFrequencyCap [ ] getEffectiveLabelFrequencyCaps ( ) { } }
return effectiveLabelFrequencyCaps ;
public class Model { /** * Bulk score the frame < code > fr < / code > , producing a Frame result ; the 1st Vec is the * predicted class , the remaining Vecs are the probability distributions . * For Regression ( single - class ) models , the 1st and only Vec is the * prediction value . * The flat < code > adapt < / code > * @ param fr frame which should be scored * @ param adapt a flag enforcing an adaptation of < code > fr < / code > to this model . If flag * is < code > false < / code > scoring code expect that < code > fr < / code > is already adapted . * @ return a new frame containing a predicted values . For classification it contains a column with * prediction and distribution for all response classes . For regression it contains only * one column with predicted values . */ public final Frame score ( Frame fr , boolean adapt ) { } }
if ( isSupervised ( ) ) { int ridx = fr . find ( responseName ( ) ) ; if ( ridx != - 1 ) { // drop the response for scoring ! fr = new Frame ( fr ) ; fr . remove ( ridx ) ; } } // Adapt the Frame layout - returns adapted frame and frame containing only // newly created vectors Frame [ ] adaptFrms = adapt ? adapt ( fr , false ) : null ; // Adapted frame containing all columns - mix of original vectors from fr // and newly created vectors serving as adaptors Frame adaptFrm = adapt ? adaptFrms [ 0 ] : fr ; // Contains only newly created vectors . The frame eases deletion of these vectors . Frame onlyAdaptFrm = adapt ? adaptFrms [ 1 ] : null ; // Invoke scoring Frame output = scoreImpl ( adaptFrm ) ; // Be nice to DKV and delete vectors which i created : - ) if ( adapt ) onlyAdaptFrm . delete ( ) ; return output ;
public class StreamExecutionEnvironment { /** * Registers the given type with the serialization stack . If the type is * eventually serialized as a POJO , then the type is registered with the * POJO serializer . If the type ends up being serialized with Kryo , then it * will be registered at Kryo to make sure that only tags are written . * @ param type * The class of the type to register . */ public void registerType ( Class < ? > type ) { } }
if ( type == null ) { throw new NullPointerException ( "Cannot register null type class." ) ; } TypeInformation < ? > typeInfo = TypeExtractor . createTypeInfo ( type ) ; if ( typeInfo instanceof PojoTypeInfo ) { config . registerPojoType ( type ) ; } else { config . registerKryoType ( type ) ; }
public class RoundingParams { /** * Sets the border around the rounded drawable * @ param color of the border * @ param width of the width */ public RoundingParams setBorder ( @ ColorInt int color , float width ) { } }
Preconditions . checkArgument ( width >= 0 , "the border width cannot be < 0" ) ; mBorderWidth = width ; mBorderColor = color ; return this ;
public class Intersection { /** * Perform intersect set operation on the two given sketch arguments and return the result as an * ordered CompactSketch on the heap . * @ param a The first sketch argument * @ param b The second sketch argument * @ return an ordered CompactSketch on the heap */ public CompactSketch intersect ( final Sketch a , final Sketch b ) { } }
return intersect ( a , b , true , null ) ;
public class AtomBlog { /** * { @ inheritDoc } */ @ Override public Iterator < BlogEntry > getEntries ( ) throws BlogClientException { } }
if ( entriesCollection == null ) { throw new BlogClientException ( "No primary entry collection" ) ; } return new AtomEntryIterator ( entriesCollection ) ;
public class WSRdbOnePhaseXaResourceImpl { /** * XAER _ RMERR return code in XAException */ public void rollback ( Xid xid ) throws XAException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . entry ( this , tc , "rollback" , new Object [ ] { ivManagedConnection , AdapterUtil . toString ( xid ) } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { String cId = null ; try { cId = ivManagedConnection . mcf . getCorrelator ( ivManagedConnection ) ; } catch ( SQLException x ) { // will just log the exception here and ignore it since its in trace Tr . debug ( this , tc , "got an exception trying to get the correlator in commit, exception is: " , x ) ; } if ( cId != null ) { StringBuffer stbuf = new StringBuffer ( 200 ) ; stbuf . append ( "Correlator: DB2, ID: " ) ; stbuf . append ( cId ) ; if ( xid != null ) { stbuf . append ( "Transaction ID : " ) ; stbuf . append ( xid ) ; } stbuf . append ( " ROLLBACK" ) ; Tr . debug ( this , tc , stbuf . toString ( ) ) ; } } // Reset so we can deferred enlist in a future global transaction . ivManagedConnection . wasLazilyEnlistedInGlobalTran = false ; try { // If no work was done during the transaction , the autoCommit value may still // be on . In this case , just no - op , since some drivers like ConnectJDBC 3.1 // don ' t allow commit / rollback when autoCommit is on . ivSqlConn . rollback ( ) ; ivStateManager . setState ( WSStateManager . XA_ROLLBACK ) ; } catch ( SQLException sqe ) { FFDCFilter . processException ( sqe , "com.ibm.ws.rsadapter.spi.WSRdbOnePhaseXaResourceImpl.rollback" , "342" , this ) ; Tr . error ( tc , "DSA_INTERNAL_ERROR" , new Object [ ] { "Exception caught during rollback on the OnePhaseXAResource" , sqe } ) ; XAException xae = new XAException ( XAException . XAER_RMERR ) ; traceXAException ( xae , currClass ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "rollback" , "Exception" ) ; throw xae ; } catch ( TransactionException te ) { // Exception means setState failed because it was invalid to set the state in this case FFDCFilter . processException ( te , "com.ibm.ws.rsadapter.spi.WSRdbOnePhaseXaResourceImpl.rollback" , "351" , this ) ; Tr . error ( tc , "INVALID_TX_STATE" , new Object [ ] { "OnePhaseXAResource.rollback()" , ivManagedConnection . getTransactionStateAsString ( ) } ) ; XAException xae = new XAException ( XAException . XAER_RMERR ) ; traceXAException ( xae , currClass ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "rollback" , "Exception" ) ; throw xae ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "rollback" ) ;
public class ArticleFeatureExtractor { public FeatureVector getFeatureVector ( Area node ) { } }
FeatureVector ret = new FeatureVector ( ) ; String text = node . getText ( ) ; int plen = text . length ( ) ; if ( plen == 0 ) plen = 1 ; // kvuli deleni nulou ret . setFontSize ( node . getFontSize ( ) / avgfont ) ; ret . setWeight ( node . getFontWeight ( ) ) ; ret . setStyle ( node . getFontStyle ( ) ) ; ret . setReplaced ( node . isReplaced ( ) ) ; ret . setAabove ( countAreasAbove ( node ) ) ; ret . setAbelow ( countAreasBelow ( node ) ) ; ret . setAleft ( countAreasLeft ( node ) ) ; ret . setAright ( countAreasRight ( node ) ) ; ret . setNlines ( getLineCount ( node ) ) ; ret . setDepth ( node . getDepth ( ) + 1 ) ; // + 2 : annotator counts the boxes and their areas as well ret . setTlength ( text . length ( ) ) ; ret . setPdigits ( countChars ( text , Character . DECIMAL_DIGIT_NUMBER ) / ( double ) plen ) ; ret . setPlower ( countChars ( text , Character . LOWERCASE_LETTER ) / ( double ) plen ) ; ret . setPupper ( countChars ( text , Character . UPPERCASE_LETTER ) / ( double ) plen ) ; ret . setPspaces ( countChars ( text , Character . SPACE_SEPARATOR ) / ( double ) plen ) ; ret . setPpunct ( countCharsPunct ( text ) / ( double ) plen ) ; ret . setRelx ( getRelX ( node ) ) ; ret . setRely ( getRelY ( node ) ) ; ret . setTlum ( getAverageTextLuminosity ( node ) ) ; ret . setBglum ( getBackgroundLuminosity ( node ) ) ; ret . setContrast ( getContrast ( node ) ) ; ret . setCperc ( ca . getColorPercentage ( node ) ) ; ret . setBcperc ( bca . getColorPercentage ( node ) ) ; ret . setMarkedness ( getMarkedness ( node ) ) ; Tag t = node . getMostSupportedTag ( ) ; ret . setTagLevel ( t == null ? - 1 : t . getLevel ( ) ) ; // TODO ostatni vlastnosti obdobne return ret ;
public class TimestampUtils { /** * Load date / time information into the provided calendar returning the fractional seconds . */ private ParsedTimestamp parseBackendTimestamp ( String str ) throws SQLException { } }
char [ ] s = str . toCharArray ( ) ; int slen = s . length ; // This is pretty gross . . ParsedTimestamp result = new ParsedTimestamp ( ) ; // We try to parse these fields in order ; all are optional // ( but some combinations don ' t make sense , e . g . if you have // both date and time then they must be whitespace - separated ) . // At least one of date and time must be present . // leading whitespace // yyyy - mm - dd // whitespace // hh : mm : ss // whitespace // timezone in one of the formats : + hh , - hh , + hh : mm , - hh : mm // whitespace // if date is present , an era specifier : AD or BC // trailing whitespace try { int start = skipWhitespace ( s , 0 ) ; // Skip leading whitespace int end = firstNonDigit ( s , start ) ; int num ; char sep ; // Possibly read date . if ( charAt ( s , end ) == '-' ) { // Date result . hasDate = true ; // year result . year = number ( s , start , end ) ; start = end + 1 ; // Skip ' - ' // month end = firstNonDigit ( s , start ) ; result . month = number ( s , start , end ) ; sep = charAt ( s , end ) ; if ( sep != '-' ) { throw new NumberFormatException ( "Expected date to be dash-separated, got '" + sep + "'" ) ; } start = end + 1 ; // Skip ' - ' // day of month end = firstNonDigit ( s , start ) ; result . day = number ( s , start , end ) ; start = skipWhitespace ( s , end ) ; // Skip trailing whitespace } // Possibly read time . if ( Character . isDigit ( charAt ( s , start ) ) ) { // Time . result . hasTime = true ; // Hours end = firstNonDigit ( s , start ) ; result . hour = number ( s , start , end ) ; sep = charAt ( s , end ) ; if ( sep != ':' ) { throw new NumberFormatException ( "Expected time to be colon-separated, got '" + sep + "'" ) ; } start = end + 1 ; // Skip ' : ' // minutes end = firstNonDigit ( s , start ) ; result . minute = number ( s , start , end ) ; sep = charAt ( s , end ) ; if ( sep != ':' ) { throw new NumberFormatException ( "Expected time to be colon-separated, got '" + sep + "'" ) ; } start = end + 1 ; // Skip ' : ' // seconds end = firstNonDigit ( s , start ) ; result . second = number ( s , start , end ) ; start = end ; // Fractional seconds . if ( charAt ( s , start ) == '.' ) { end = firstNonDigit ( s , start + 1 ) ; // Skip ' . ' num = number ( s , start + 1 , end ) ; for ( int numlength = ( end - ( start + 1 ) ) ; numlength < 9 ; ++ numlength ) { num *= 10 ; } result . nanos = num ; start = end ; } start = skipWhitespace ( s , start ) ; // Skip trailing whitespace } // Possibly read timezone . sep = charAt ( s , start ) ; if ( sep == '-' || sep == '+' ) { int tzsign = ( sep == '-' ) ? - 1 : 1 ; int tzhr ; int tzmin ; int tzsec ; end = firstNonDigit ( s , start + 1 ) ; // Skip + / - tzhr = number ( s , start + 1 , end ) ; start = end ; sep = charAt ( s , start ) ; if ( sep == ':' ) { end = firstNonDigit ( s , start + 1 ) ; // Skip ' : ' tzmin = number ( s , start + 1 , end ) ; start = end ; } else { tzmin = 0 ; } tzsec = 0 ; sep = charAt ( s , start ) ; if ( sep == ':' ) { end = firstNonDigit ( s , start + 1 ) ; // Skip ' : ' tzsec = number ( s , start + 1 , end ) ; start = end ; } // Setting offset does not seem to work correctly in all // cases . . So get a fresh calendar for a synthetic timezone // instead result . tz = getCalendar ( tzsign , tzhr , tzmin , tzsec ) ; start = skipWhitespace ( s , start ) ; // Skip trailing whitespace } if ( result . hasDate && start < slen ) { String eraString = new String ( s , start , slen - start ) ; if ( eraString . startsWith ( "AD" ) ) { result . era = GregorianCalendar . AD ; start += 2 ; } else if ( eraString . startsWith ( "BC" ) ) { result . era = GregorianCalendar . BC ; start += 2 ; } } if ( start < slen ) { throw new NumberFormatException ( "Trailing junk on timestamp: '" + new String ( s , start , slen - start ) + "'" ) ; } if ( ! result . hasTime && ! result . hasDate ) { throw new NumberFormatException ( "Timestamp has neither date nor time" ) ; } } catch ( NumberFormatException nfe ) { throw new PSQLException ( GT . tr ( "Bad value for type timestamp/date/time: {1}" , str ) , PSQLState . BAD_DATETIME_FORMAT , nfe ) ; } return result ;
public class PropertyValuesHolder { /** * Internal function ( called from ObjectAnimator ) to set up the setter and getter * prior to running the animation . If the setter has not been manually set for this * object , it will be derived automatically given the property name , target object , and * types of values supplied . If no getter has been set , it will be supplied iff any of the * supplied values was null . If there is a null value , then the getter ( supplied or derived ) * will be called to set those null values to the current value of the property * on the target object . * @ param target The object on which the setter ( and possibly getter ) exist . */ void setupSetterAndGetter ( Object target ) { } }
// if ( mProperty ! = null ) { // / / check to make sure that mProperty is on the class of target // try { // Object testValue = mProperty . get ( target ) ; // for ( Keyframe kf : mKeyframeSet . mKeyframes ) { // if ( ! kf . hasValue ( ) ) { // kf . setValue ( mProperty . get ( target ) ) ; // return ; // } catch ( ClassCastException e ) { // Log . e ( " PropertyValuesHolder " , " No such property ( " + mProperty . getName ( ) + // " ) on target object " + target + " . Trying reflection instead " ) ; // mProperty = null ; Class targetClass = target . getClass ( ) ; if ( mSetter == null ) { setupSetter ( targetClass ) ; } for ( Keyframe kf : mKeyframeSet . mKeyframes ) { if ( ! kf . hasValue ( ) ) { if ( mGetter == null ) { setupGetter ( targetClass ) ; } try { kf . setValue ( mGetter . invoke ( target ) ) ; } catch ( InvocationTargetException e ) { Log . e ( "PropertyValuesHolder" , e . toString ( ) ) ; } catch ( IllegalAccessException e ) { Log . e ( "PropertyValuesHolder" , e . toString ( ) ) ; } } }
public class ST_CoordDim { /** * Returns the dimension of the coordinates of the given geometry . * @ param geom Geometry * @ return The dimension of the coordinates of the given geometry * @ throws IOException */ public static Integer getCoordinateDimension ( byte [ ] geom ) throws IOException { } }
if ( geom == null ) { return null ; } return GeometryMetaData . getMetaDataFromWKB ( geom ) . dimension ;
public class TrivialSwap { /** * Swap the elements of two lists at the specified positions . The run time of this method * depends on the implementation of the lists since elements are removed and added in the * lists . * @ param < E > the type of elements in this list . * @ param list1 one of the lists that will have one of its values swapped . * @ param list1Index the index of the first list that will be swapped . * @ param list2 the other list that will have one of its values swapped . * @ param list2Index the index of the second list that will be swapped . */ public static < E > void swap ( List < E > list1 , int list1Index , List < E > list2 , int list2Index ) { } }
if ( list1 . get ( list1Index ) != list2 . get ( list2Index ) ) { E hold = list1 . remove ( list1Index ) ; if ( list1 != list2 || list1Index > list2Index ) { list1 . add ( list1Index , list2 . get ( list2Index ) ) ; } else { list1 . add ( list1Index , list2 . get ( list2Index - 1 ) ) ; } list2 . remove ( list2Index ) ; list2 . add ( list2Index , hold ) ; }
public class InterconnectAttachmentClient { /** * Creates an InterconnectAttachment in the specified project using the data included in the * request . * < p > Sample code : * < pre > < code > * try ( InterconnectAttachmentClient interconnectAttachmentClient = InterconnectAttachmentClient . create ( ) ) { * ProjectRegionName region = ProjectRegionName . of ( " [ PROJECT ] " , " [ REGION ] " ) ; * InterconnectAttachment interconnectAttachmentResource = InterconnectAttachment . newBuilder ( ) . build ( ) ; * Operation response = interconnectAttachmentClient . insertInterconnectAttachment ( region , interconnectAttachmentResource ) ; * < / code > < / pre > * @ param region Name of the region for this request . * @ param interconnectAttachmentResource Represents an InterconnectAttachment ( VLAN attachment ) * resource . For more information , see Creating VLAN Attachments . ( = = resource _ for * beta . interconnectAttachments = = ) ( = = resource _ for v1 . interconnectAttachments = = ) * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation insertInterconnectAttachment ( ProjectRegionName region , InterconnectAttachment interconnectAttachmentResource ) { } }
InsertInterconnectAttachmentHttpRequest request = InsertInterconnectAttachmentHttpRequest . newBuilder ( ) . setRegion ( region == null ? null : region . toString ( ) ) . setInterconnectAttachmentResource ( interconnectAttachmentResource ) . build ( ) ; return insertInterconnectAttachment ( request ) ;
public class ListTargetsForPolicyResult { /** * A list of structures , each of which contains details about one of the entities to which the specified policy is * attached . * @ param targets * A list of structures , each of which contains details about one of the entities to which the specified * policy is attached . */ public void setTargets ( java . util . Collection < PolicyTargetSummary > targets ) { } }
if ( targets == null ) { this . targets = null ; return ; } this . targets = new java . util . ArrayList < PolicyTargetSummary > ( targets ) ;
public class SchemaService { /** * Get the given application ' s StorageService option . If none is found , assign and * return the default . Unlike { @ link # getStorageService ( ApplicationDefinition ) } , this * method will not throw an exception if the storage service is unknown or has not * been initialized . * @ param appDef { @ link ApplicationDefinition } of an application . * @ return The application ' s declared or assigned StorageService option . */ public String getStorageServiceOption ( ApplicationDefinition appDef ) { } }
String ssName = appDef . getOption ( CommonDefs . OPT_STORAGE_SERVICE ) ; if ( Utils . isEmpty ( ssName ) ) { ssName = DoradusServer . instance ( ) . getDefaultStorageService ( ) ; appDef . setOption ( CommonDefs . OPT_STORAGE_SERVICE , ssName ) ; } return ssName ;
public class ProgressSource { /** * Update progress . */ public void updateProgress ( long latestProgress , long expectedProgress ) { } }
lastProgress = progress ; progress = latestProgress ; expected = expectedProgress ; if ( connected ( ) == false ) state = State . CONNECTED ; else state = State . UPDATE ; // The threshold effectively divides the progress into // different set of ranges : // Range 0 : 0 . . threshold - 1, // Range 1 : threshold . . 2 * threshold - 1 // Range n : n * threshold . . ( n + 1 ) * threshold - 1 // To determine which range the progress belongs to , it // would be calculated as follow : // range number = progress / threshold // Notification should only be triggered when the current // progress and the last progress are in different ranges , // i . e . they have different range numbers . // Using this range scheme , notification will be generated // only once when the progress reaches each range . if ( lastProgress / threshold != progress / threshold ) { progressMonitor . updateProgress ( this ) ; } // Detect read overrun if ( expected != - 1 ) { if ( progress >= expected && progress != 0 ) close ( ) ; }
public class TreeElement { /** * Set the target scope for this anchor ' s URI . Any page flow that handles the URI will be made active within the * given scope . Scopes allow multiple page flows to be active within the same user session ; page flows in different * scopes do not in general interact with each other . This attribute is commonly used in conjunction with the * < code > target < / code > attribute to invoke a new page flow in a separate window . * @ param scope a String that identifies the scope in which the target page flow will be made active . * @ netui : attribute required = " false " rtexprvalue = " true " * description = " Set the target scope for this anchor ' s URI . " */ public void setScope ( String scope ) { } }
ExtendedInfo info = getInfo ( scope ) ; if ( info != null ) info . _scope = scope ;
public class HillSlope { /** * / * ( non - Javadoc ) * @ see org . hortonmachine . hmachine . modules . hydrogeomorphology . adige . core . IHillSlope # getLinkSlope ( ) */ public double getLinkSlope ( ) { } }
if ( ( int ) linkSlope == - 1 ) { // hillslopeFeature . getAttribute ( baricenterElevationAttribute ) ; double startElev = ( Double ) linkFeature . getAttribute ( NetworkChannel . STARTELEVNAME ) ; double endElev = ( Double ) linkFeature . getAttribute ( NetworkChannel . ENDELEVNAME ) ; linkSlope = ( startElev - endElev ) / getLinkLength ( ) ; if ( linkSlope <= 0 ) { /* * if < 0 then probably it is very flat and the dem si not precise . The slope is * set . */ linkSlope = 0.001 ; } } return linkSlope ;
public class DynamicLibrariesManager { /** * Open a resource from the given class loader . */ public IO . Readable getResourceFrom ( ClassLoader cl , String path , byte priority ) { } }
IOProvider . Readable provider = new IOProviderFromPathUsingClassloader ( cl ) . get ( path ) ; if ( provider == null ) return null ; try { return provider . provideIOReadable ( priority ) ; } catch ( IOException e ) { return null ; }
public class Transport { /** * @ param timeout * @ throws TransportException */ private synchronized void cleanupThread ( long timeout ) throws TransportException { } }
Thread t = this . thread ; if ( t != null && Thread . currentThread ( ) != t ) { this . thread = null ; try { log . debug ( "Interrupting transport thread" ) ; t . interrupt ( ) ; log . debug ( "Joining transport thread" ) ; t . join ( timeout ) ; log . debug ( "Joined transport thread" ) ; } catch ( InterruptedException e ) { throw new TransportException ( "Failed to join transport thread" , e ) ; } } else if ( t != null ) { this . thread = null ; }
public class TrieNode { /** * Gets string . * @ param root the root * @ return the string */ public String getString ( TrieNode root ) { } }
if ( this == root ) return "" ; String parentStr = null == getParent ( ) ? "" : getParent ( ) . getString ( root ) ; return parentStr + getToken ( ) ;
public class RTreeIndexCoreExtension { /** * Create Triggers to Maintain Spatial Index Values * @ param tableName * table name * @ param geometryColumnName * geometry column name * @ param idColumnName * id column name */ public void createAllTriggers ( String tableName , String geometryColumnName , String idColumnName ) { } }
createInsertTrigger ( tableName , geometryColumnName , idColumnName ) ; createUpdate1Trigger ( tableName , geometryColumnName , idColumnName ) ; createUpdate2Trigger ( tableName , geometryColumnName , idColumnName ) ; createUpdate3Trigger ( tableName , geometryColumnName , idColumnName ) ; createUpdate4Trigger ( tableName , geometryColumnName , idColumnName ) ; createDeleteTrigger ( tableName , geometryColumnName , idColumnName ) ;
public class BaseCondition { /** * Returns the topmost condition , never { @ code null } . * @ return the topmost condition following up the parent chain or { @ code this } if parent is { @ code null } . */ public BaseCondition getRoot ( ) { } }
BaseCondition p = this ; while ( p . getParent ( ) != null ) { p = p . getParent ( ) ; } return p ;
public class EmptyField { /** * Constructor . * @ param record The parent record . * @ param strName The field name . * @ param iDataLength The maximum string length ( pass - 1 for default ) . * @ param strDesc The string description ( usually pass null , to use the resource file desc ) . * @ param strDefault The default value ( if object , this value is the default value , if string , the string is the default ) . */ public void init ( Record record , String strName , int iDataLength , String strDesc , Object strDefault ) { } }
super . init ( record , Constants . BLANK , 0 , Constants . BLANK , null ) ; m_data = Constants . BLANK ;
public class HorizontalRecordsProcessor { /** * 名前の定義の範囲を修正する 。 * @ param sheet * @ param recordOperation */ private void correctNameRange ( final Sheet sheet , final RecordOperation recordOperation ) { } }
if ( recordOperation . isNotExecuteRecordOperation ( ) ) { return ; } final Workbook workbook = sheet . getWorkbook ( ) ; final int numName = workbook . getNumberOfNames ( ) ; if ( numName == 0 ) { return ; } // 操作をしていないセルの範囲の取得 final CellRangeAddress notOperateRange = new CellRangeAddress ( recordOperation . getTopLeftPoisitoin ( ) . y , recordOperation . getBottomRightPosition ( ) . y - recordOperation . getCountInsertRecord ( ) , recordOperation . getTopLeftPoisitoin ( ) . x , recordOperation . getBottomRightPosition ( ) . x ) ; for ( int i = 0 ; i < numName ; i ++ ) { final Name name = workbook . getNameAt ( i ) ; if ( name . isDeleted ( ) || name . isFunctionName ( ) ) { // 削除されている場合 、 関数の場合はスキップ continue ; } if ( ! sheet . getSheetName ( ) . equals ( name . getSheetName ( ) ) ) { // 自身のシートでない名前は 、 修正しない 。 continue ; } AreaReference areaRef = new AreaReference ( name . getRefersToFormula ( ) , POIUtils . getVersion ( sheet ) ) ; CellReference firstCellRef = areaRef . getFirstCell ( ) ; CellReference lastCellRef = areaRef . getLastCell ( ) ; if ( notOperateRange . isInRange ( firstCellRef . getRow ( ) , firstCellRef . getCol ( ) ) ) { // 自身のセルの範囲の場合は 、 行の範囲を広げる 。 lastCellRef = new CellReference ( lastCellRef . getSheetName ( ) , recordOperation . getBottomRightPosition ( ) . y , lastCellRef . getCol ( ) , lastCellRef . isRowAbsolute ( ) , lastCellRef . isColAbsolute ( ) ) ; areaRef = new AreaReference ( firstCellRef , lastCellRef , sheet . getWorkbook ( ) . getSpreadsheetVersion ( ) ) ; // 修正した範囲を再設定する name . setRefersToFormula ( areaRef . formatAsString ( ) ) ; } else if ( notOperateRange . getLastRow ( ) < firstCellRef . getRow ( ) ) { /* * 名前の定義の場合 、 自身のセルの範囲より下方にあるセルの範囲の場合 、 * 自動的に修正されるため 、 修正は必要なし 。 */ } }
public class LogRepositoryListener { /** * ( non - Javadoc ) * @ see org . eclipse . aether . util . listener . AbstractRepositoryListener # metadataInstalled * ( org . eclipse . aether . RepositoryEvent ) */ @ Override public void metadataInstalled ( RepositoryEvent event ) { } }
log . fine ( "Installed " + event . getMetadata ( ) + " to " + event . getFile ( ) ) ;
public class Common { /** * Read the Reader line for line and return the result in a list */ public static List < String > readToList ( Reader r ) throws IOException { } }
try ( BufferedReader in = new BufferedReader ( r ) ) { List < String > l = new ArrayList < > ( ) ; String line = null ; while ( ( line = in . readLine ( ) ) != null ) l . add ( line ) ; return Collections . unmodifiableList ( l ) ; }
public class AwsSecurityFindingFilters { /** * The AWS account ID in which a finding is generated . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setAwsAccountId ( java . util . Collection ) } or { @ link # withAwsAccountId ( java . util . Collection ) } if you want to * override the existing values . * @ param awsAccountId * The AWS account ID in which a finding is generated . * @ return Returns a reference to this object so that method calls can be chained together . */ public AwsSecurityFindingFilters withAwsAccountId ( StringFilter ... awsAccountId ) { } }
if ( this . awsAccountId == null ) { setAwsAccountId ( new java . util . ArrayList < StringFilter > ( awsAccountId . length ) ) ; } for ( StringFilter ele : awsAccountId ) { this . awsAccountId . add ( ele ) ; } return this ;
public class DbLoadAction { /** * 返回结果为已处理成功的记录 */ public DbLoadContext load ( RowBatch rowBatch , WeightController controller ) { } }
Assert . notNull ( rowBatch ) ; Identity identity = rowBatch . getIdentity ( ) ; DbLoadContext context = buildContext ( identity ) ; try { List < EventData > datas = rowBatch . getDatas ( ) ; context . setPrepareDatas ( datas ) ; // 执行重复录入数据过滤 datas = context . getPrepareDatas ( ) ; if ( datas == null || datas . size ( ) == 0 ) { logger . info ( "##no eventdata for load, return" ) ; return context ; } // 因为所有的数据在DbBatchLoader已按照DateMediaSource进行归好类 , 不同数据源介质会有不同的DbLoadAction进行处理 // 设置media source时 , 只需要取第一节点的source即可 context . setDataMediaSource ( ConfigHelper . findDataMedia ( context . getPipeline ( ) , datas . get ( 0 ) . getTableId ( ) ) . getSource ( ) ) ; interceptor . prepare ( context ) ; // 执行重复录入数据过滤 datas = context . getPrepareDatas ( ) ; // 处理下ddl语句 , ddl / dml语句不可能是在同一个batch中 , 由canal进行控制 // 主要考虑ddl的幂等性问题 , 尽可能一个ddl一个batch , 失败或者回滚都只针对这条sql if ( isDdlDatas ( datas ) ) { doDdl ( context , datas ) ; } else { WeightBuckets < EventData > buckets = buildWeightBuckets ( context , datas ) ; List < Long > weights = buckets . weights ( ) ; controller . start ( weights ) ; // weights可能为空 , 也得调用start方法 if ( CollectionUtils . isEmpty ( datas ) ) { logger . info ( "##no eventdata for load" ) ; } adjustPoolSize ( context ) ; // 根据manager配置调整线程池 adjustConfig ( context ) ; // 调整一下运行参数 // 按权重构建数据对象 // 处理数据 for ( int i = 0 ; i < weights . size ( ) ; i ++ ) { Long weight = weights . get ( i ) ; controller . await ( weight . intValue ( ) ) ; // 处理同一个weight下的数据 List < EventData > items = buckets . getItems ( weight ) ; logger . debug ( "##start load for weight:" + weight ) ; // 预处理下数据 // 进行一次数据合并 , 合并相同pk的多次I / U / D操作 items = DbLoadMerger . merge ( items ) ; // 按I / U / D进行归并处理 DbLoadData loadData = new DbLoadData ( ) ; doBefore ( items , context , loadData ) ; // 执行load操作 doLoad ( context , loadData ) ; controller . single ( weight . intValue ( ) ) ; logger . debug ( "##end load for weight:" + weight ) ; } } interceptor . commit ( context ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; interceptor . error ( context ) ; } catch ( Exception e ) { interceptor . error ( context ) ; throw new LoadException ( e ) ; } return context ; // 返回处理成功的记录