signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class JdbcUtil { /** * queryJsonArray . * @ param sql sql * @ param paramList paramList * @ param connection connection * @ param tableName tableName * @ param isDebug the specified debug flag * @ return JSONArray * @ throws SQLException SQLException * @ throws JSONException JSONException * @ throws RepositoryException repositoryException */ public static JSONArray queryJsonArray ( final String sql , final List < Object > paramList , final Connection connection , final String tableName , final boolean isDebug ) throws SQLException , JSONException , RepositoryException { } }
final JSONObject jsonObject = queryJson ( sql , paramList , connection , false , tableName , isDebug ) ; return jsonObject . getJSONArray ( Keys . RESULTS ) ;
public class HtmlAdaptorServlet { /** * Display a MBeans attributes and operations * @ param request The HTTP request * @ param response The HTTP response * @ exception ServletException Thrown if an error occurs * @ exception IOException Thrown if an I / O error occurs */ private void inspectMBean ( HttpServletRequest request , HttpServletResponse response ) throws ServletException , IOException { } }
String name = request . getParameter ( "name" ) ; if ( trace ) log . trace ( "inspectMBean, name=" + name ) ; try { MBeanData data = getMBeanData ( name ) ; request . setAttribute ( "mbeanData" , data ) ; RequestDispatcher rd = this . getServletContext ( ) . getRequestDispatcher ( "/inspectmbean.jsp" ) ; rd . forward ( request , response ) ; } catch ( Exception e ) { throw new ServletException ( "Failed to get MBean data" , e ) ; }
public class SynchroData { /** * Extract raw table data from the input stream . * @ param is input stream */ public void process ( InputStream is ) throws Exception { } }
readHeader ( is ) ; readVersion ( is ) ; readTableData ( readTableHeaders ( is ) , is ) ;
public class TracyFutureTask { /** * When called from the parent thread to retrieve callable result , * it will merge the worker TracyThreadContext into the parent TracyThreadContext */ @ Override public V get ( long timeout , TimeUnit unit ) throws InterruptedException , ExecutionException , TimeoutException { } }
V v = super . get ( timeout , unit ) ; this . mergeWorkerTracyTheadContext ( ) ; return v ;
public class GraphBackedMetadataRepository { /** * Deletes a given trait from an existing entity represented by a guid . * @ param guid globally unique identifier for the entity * @ param traitNameToBeDeleted name of the trait * @ throws RepositoryException */ @ Override @ GraphTransaction public void deleteTrait ( String guid , String traitNameToBeDeleted ) throws TraitNotFoundException , EntityNotFoundException , RepositoryException { } }
LOG . debug ( "Deleting trait={} from entity={}" , traitNameToBeDeleted , guid ) ; GraphTransactionInterceptor . lockObjectAndReleasePostCommit ( guid ) ; AtlasVertex instanceVertex = graphHelper . getVertexForGUID ( guid ) ; List < String > traitNames = GraphHelper . getTraitNames ( instanceVertex ) ; if ( ! traitNames . contains ( traitNameToBeDeleted ) ) { throw new TraitNotFoundException ( "Could not find trait=" + traitNameToBeDeleted + " in the repository for entity: " + guid ) ; } try { final String entityTypeName = GraphHelper . getTypeName ( instanceVertex ) ; String relationshipLabel = GraphHelper . getTraitLabel ( entityTypeName , traitNameToBeDeleted ) ; AtlasEdge edge = graphHelper . getEdgeForLabel ( instanceVertex , relationshipLabel ) ; if ( edge != null ) { deleteHandler . deleteEdgeReference ( edge , DataTypes . TypeCategory . TRAIT , false , true ) ; } // update the traits in entity once trait removal is successful traitNames . remove ( traitNameToBeDeleted ) ; updateTraits ( instanceVertex , traitNames ) ; } catch ( Exception e ) { throw new RepositoryException ( e ) ; }
public class LineEndingConversion { /** * Convert line endings of a string to the given type . Default to Unix type . * @ param input * The string containing line endings to be converted . * @ param type * Type of line endings to convert the string into . * @ return * String updated with the new line endings or null if given null . */ public static String convertEOL ( String input , EOLType type ) { } }
if ( null == input || 0 == input . length ( ) ) { return input ; } // Convert line endings to Unix LF , // which also sets up the string for other conversions input = input . replace ( "\r\n" , "\n" ) ; input = input . replace ( "\r" , "\n" ) ; switch ( type ) { case CR : case Mac : // Convert line endings to CR input = input . replace ( "\n" , "\r" ) ; break ; case CRLF : case Windows : // Convert line endings to Windows CR / LF input = input . replace ( "\n" , "\r\n" ) ; break ; default : case LF : case Unix : // Conversion already completed return input ; case LFCR : // Convert line endings to LF / CR input = input . replace ( "\n" , "\n\r" ) ; break ; } return input ;
public class MpInitiatorMailbox { /** * when the MpScheduler needs to log the completion of a transaction to its local repair log */ void deliverToRepairLog ( VoltMessage msg ) { } }
assert ( Thread . currentThread ( ) . getId ( ) == m_taskThreadId ) ; m_repairLog . deliver ( msg ) ;
public class Database { /** * Get a list of all of the columns on a table . * @ param table * The table to check . * @ return A list of all of the columns . * @ throws DatabaseException * If a database error occurs . */ public Collection < String > listColumns ( String table ) throws SQLException { } }
Collection < String > result = new ArrayList < String > ( ) ; DatabaseMetaData dbm = connection . getMetaData ( ) ; ResultSet rs = dbm . getColumns ( null , null , table , null ) ; while ( rs . next ( ) ) { result . add ( rs . getString ( "COLUMN_NAME" ) ) ; } return result ;
public class LifecycleCallbackHelper { /** * Gets the annotated method from the class object . * @ param clazz the Class to be inspected . * @ param annotationClass the annotation class object * @ return a Method object or null if there is no annotated method . */ @ SuppressWarnings ( "rawtypes" ) public Method getAnnotatedMethod ( Class clazz , Class < ? extends Annotation > annotationClass ) { } }
Method m = null ; Method [ ] methods = clazz . getDeclaredMethods ( ) ; for ( int i = 0 ; i < methods . length ; i ++ ) { Annotation [ ] a = methods [ i ] . getAnnotations ( ) ; if ( a != null ) { for ( int j = 0 ; j < a . length ; j ++ ) { if ( a [ j ] . annotationType ( ) == annotationClass ) { if ( m == null ) { m = methods [ i ] ; } else { Tr . warning ( tc , "DUPLICATE_CALLBACK_METHOD_CWWKC2454W" , new Object [ ] { methods [ i ] . getName ( ) , clazz . getName ( ) } ) ; } } } } } return m ;
public class AtomPairs2DFingerprinter { /** * Encodes name for halogen paths * @ param dist * @ param a * @ param b * @ return */ private static String encodeHalPath ( int dist , IAtom a , IAtom b ) { } }
return dist + "_" + ( isHalogen ( a ) ? "X" : a . getSymbol ( ) ) + "_" + ( isHalogen ( b ) ? "X" : b . getSymbol ( ) ) ;
public class JFXSpinnerSkin { /** * { @ inheritDoc } */ @ Override protected void layoutChildren ( double contentX , double contentY , double contentWidth , double contentHeight ) { } }
final double strokeWidth = arc . getStrokeWidth ( ) ; final double radius = Math . min ( contentWidth , contentHeight ) / 2 - strokeWidth / 2 ; final double arcSize = snapSize ( radius * 2 + strokeWidth ) ; arcPane . resizeRelocate ( ( contentWidth - arcSize ) / 2 + 1 , ( contentHeight - arcSize ) / 2 + 1 , arcSize , arcSize ) ; updateArcLayout ( radius , arcSize ) ; fillRect . setWidth ( arcSize ) ; fillRect . setHeight ( arcSize ) ; if ( ! isValid ) { initialize ( ) ; isValid = true ; } if ( ! getSkinnable ( ) . isIndeterminate ( ) ) { arc . setLength ( arcLength ) ; if ( text . isVisible ( ) ) { final double progress = control . getProgress ( ) ; int intProgress = ( int ) Math . round ( progress * 100.0 ) ; Font font = text . getFont ( ) ; text . setFont ( Font . font ( font . getFamily ( ) , radius / 1.7 ) ) ; text . setText ( ( progress > 1 ? 100 : intProgress ) + "%" ) ; text . relocate ( ( arcSize - text . getLayoutBounds ( ) . getWidth ( ) ) / 2 , ( arcSize - text . getLayoutBounds ( ) . getHeight ( ) ) / 2 ) ; } }
public class DescribeMaintenanceWindowExecutionTaskInvocationsRequest { /** * Optional filters used to scope down the returned task invocations . The supported filter key is STATUS with the * corresponding values PENDING , IN _ PROGRESS , SUCCESS , FAILED , TIMED _ OUT , CANCELLING , and CANCELLED . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setFilters ( java . util . Collection ) } or { @ link # withFilters ( java . util . Collection ) } if you want to override * the existing values . * @ param filters * Optional filters used to scope down the returned task invocations . The supported filter key is STATUS with * the corresponding values PENDING , IN _ PROGRESS , SUCCESS , FAILED , TIMED _ OUT , CANCELLING , and CANCELLED . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeMaintenanceWindowExecutionTaskInvocationsRequest withFilters ( MaintenanceWindowFilter ... filters ) { } }
if ( this . filters == null ) { setFilters ( new com . amazonaws . internal . SdkInternalList < MaintenanceWindowFilter > ( filters . length ) ) ; } for ( MaintenanceWindowFilter ele : filters ) { this . filters . add ( ele ) ; } return this ;
public class AbstractEDBService { /** * Runs all registered begin commit hooks on the EDBCommit object . Logs exceptions which occurs in the hooks , except * for ServiceUnavailableExceptions and EDBExceptions . If an EDBException occurs , it is thrown and so returned to * the calling instance . */ private void runBeginCommitHooks ( EDBCommit commit ) throws EDBException { } }
for ( EDBBeginCommitHook hook : beginCommitHooks ) { try { hook . onStartCommit ( commit ) ; } catch ( ServiceUnavailableException e ) { // Ignore } catch ( EDBException e ) { throw e ; } catch ( Exception e ) { logger . error ( "Error while performing EDBBeginCommitHook" , e ) ; } }
public class EditText { /** * Sets font feature settings . The format is the same as the CSS * font - feature - settings attribute : * http : / / dev . w3 . org / csswg / css - fonts / # propdef - font - feature - settings * @ param fontFeatureSettings font feature settings represented as CSS compatible string * @ see # getFontFeatureSettings ( ) * @ see android . graphics . Paint # getFontFeatureSettings * @ attr ref android . R . styleable # TextView _ fontFeatureSettings */ @ TargetApi ( Build . VERSION_CODES . LOLLIPOP ) public void setFontFeatureSettings ( String fontFeatureSettings ) { } }
if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . LOLLIPOP ) mInputView . setFontFeatureSettings ( fontFeatureSettings ) ;
public class ReadFileRecordResponse { /** * addResponse - - add a new record response . * @ param response Record response to add */ public void addResponse ( RecordResponse response ) { } }
if ( records == null ) { records = new RecordResponse [ 1 ] ; } else { RecordResponse old [ ] = records ; records = new RecordResponse [ old . length + 1 ] ; System . arraycopy ( old , 0 , records , 0 , old . length ) ; } records [ records . length - 1 ] = response ;
public class LdapHelper { /** * Returns an Sub - Entry of an LDAP User / LDAP Group . * @ param cn of that Entry . * @ param owner DN of Parent Node . * @ return a new Entry . */ public Node getEntry ( final String cn , final String owner ) { } }
// TODO implement me ! Node entry = new LdapEntry ( cn , owner ) ; return entry ;
public class ServiceRegistry { /** * 获取所有服务描述 * @ return 所有注册的服务的方法描述 * @ throws IllegalStateException */ public Collection < MethodDescriptor < KEY > > getAllServiceDescriptors ( ) throws IllegalStateException { } }
Preconditions . checkNotNull ( serviceDescriptors , "serviceDescriptors cannot be null" ) ; return serviceDescriptors . values ( ) ;
public class FilterQuery { /** * add a filter with multiple values to build FilterQuery instance * @ param field * @ param valueToFilter */ public void addMultipleValuesFilter ( String field , Set < String > valueToFilter ) { } }
if ( ! valueToFilter . isEmpty ( ) ) { filterQueries . put ( field , new FilterFieldValue ( field , valueToFilter ) ) ; }
public class RedBlackTree { /** * restore red - black tree invariant */ private RedBlackTreeNode < Key , Value > balance ( RedBlackTreeNode < Key , Value > h ) { } }
// assert ( h ! = null ) ; if ( isRed ( h . getRight ( ) ) ) h = rotateLeft ( h ) ; if ( isRed ( h . getLeft ( ) ) && isRed ( h . getLeft ( ) . getLeft ( ) ) ) h = rotateRight ( h ) ; if ( isRed ( h . getLeft ( ) ) && isRed ( h . getRight ( ) ) ) flipColors ( h ) ; h . setSize ( size ( h . getLeft ( ) ) + size ( h . getRight ( ) ) + 1 ) ; return h ;
public class RAMJobStore { /** * Retrieve the given < code > { @ link org . quartz . triggers . Trigger } < / code > . * @ return The desired < code > Trigger < / code > , or null if there is no match . */ @ Override public OperableTrigger retrieveTrigger ( String triggerKey ) { } }
synchronized ( lock ) { TriggerWrapper tw = wrappedTriggersByKey . get ( triggerKey ) ; return ( tw != null ) ? ( OperableTrigger ) tw . getTrigger ( ) . clone ( ) : null ; }
public class HdfsSpout { /** * renames files and returns the new file path */ private Path renameCompletedFile ( Path file ) throws IOException { } }
String fileName = file . toString ( ) ; String fileNameMinusSuffix = fileName . substring ( 0 , fileName . indexOf ( inprogress_suffix ) ) ; String newName = new Path ( fileNameMinusSuffix ) . getName ( ) ; Path newFile = new Path ( archiveDirPath + Path . SEPARATOR + newName ) ; LOG . info ( "Completed consuming file {}" , fileNameMinusSuffix ) ; if ( ! hdfs . rename ( file , newFile ) ) { throw new IOException ( "Rename failed for file: " + file ) ; } LOG . debug ( "Renamed file {} to {} " , file , newFile ) ; return newFile ;
public class ProxyBuilder { /** * Add * < pre > * abstractMethodErrorMessage = method + " cannot be called " ; * abstractMethodError = new AbstractMethodError ( abstractMethodErrorMessage ) ; * throw abstractMethodError ; * < / pre > * to the { @ code code } . * @ param code The code to add to * @ param method The method that is abstract * @ param abstractMethodErrorMessage The { @ link Local } to store the error message * @ param abstractMethodError The { @ link Local } to store the error object */ private static void throwAbstractMethodError ( Code code , Method method , Local < String > abstractMethodErrorMessage , Local < AbstractMethodError > abstractMethodError ) { } }
TypeId < AbstractMethodError > abstractMethodErrorClass = TypeId . get ( AbstractMethodError . class ) ; MethodId < AbstractMethodError , Void > abstractMethodErrorConstructor = abstractMethodErrorClass . getConstructor ( TypeId . STRING ) ; code . loadConstant ( abstractMethodErrorMessage , "'" + method + "' cannot be called" ) ; code . newInstance ( abstractMethodError , abstractMethodErrorConstructor , abstractMethodErrorMessage ) ; code . throwValue ( abstractMethodError ) ;
public class CoverageDataCore { /** * Get the coverage data value for the pixel value * @ param griddedTile * gridded tile * @ param pixelValue * pixel value * @ return coverage data value */ public Double getValue ( GriddedTile griddedTile , float pixelValue ) { } }
Double value = null ; if ( ! isDataNull ( pixelValue ) ) { value = pixelValueToValue ( griddedTile , new Double ( pixelValue ) ) ; } return value ;
public class ReversePurgeItemHashMap { /** * Increments the value mapped to the key if the key is present in the map . Otherwise , * the key is inserted with the putAmount . * @ param key the key of the value to increment * @ param adjustAmount the amount by which to increment the value */ void adjustOrPutValue ( final T key , final long adjustAmount ) { } }
final int arrayMask = keys . length - 1 ; int probe = ( int ) hash ( key . hashCode ( ) ) & arrayMask ; int drift = 1 ; while ( states [ probe ] != 0 && ! keys [ probe ] . equals ( key ) ) { probe = ( probe + 1 ) & arrayMask ; drift ++ ; // only used for theoretical analysis assert ( drift < DRIFT_LIMIT ) : "drift: " + drift + " >= DRIFT_LIMIT" ; } if ( states [ probe ] == 0 ) { // adding the key to the table the value assert ( numActive <= loadThreshold ) : "numActive: " + numActive + " > loadThreshold: " + loadThreshold ; keys [ probe ] = key ; values [ probe ] = adjustAmount ; states [ probe ] = ( short ) drift ; numActive ++ ; } else { // adjusting the value of an existing key assert ( keys [ probe ] . equals ( key ) ) ; values [ probe ] += adjustAmount ; }
public class Converter { /** * / / / / Author */ static Author convert ( com . linecorp . centraldogma . common . Author author ) { } }
return AuthorConverter . TO_DATA . convert ( author ) ;
public class RestrictionBuilder { /** * public static BeanRestriction has ( String property ) { * return new Has ( property ) ; */ public static QualifierRestriction in ( int id , Object ... values ) { } }
return new HBaseIn ( id , Arrays . asList ( values ) ) ;
public class MFPInternalPushMessage { /** * / * ( non - Javadoc ) * @ see com . ibm . mobile . services . push . IBMMessage # writeToParcel ( android . os . Parcel , int ) */ @ Override public void writeToParcel ( Parcel dest , int flags ) { } }
dest . writeString ( id ) ; dest . writeString ( alert ) ; dest . writeString ( url ) ; dest . writeString ( payload ) ; dest . writeString ( mid ) ; dest . writeString ( sound ) ; dest . writeString ( String . valueOf ( bridge ) ) ; dest . writeString ( priority ) ; dest . writeString ( visibility ) ; dest . writeString ( redact ) ; dest . writeString ( category ) ; dest . writeString ( key ) ; dest . writeString ( gcmStyle ) ; dest . writeString ( iconName ) ; dest . writeInt ( notificationId ) ; dest . writeString ( lights ) ; dest . writeString ( messageType ) ; dest . writeInt ( hasTemplate ) ;
public class Message { /** * Parses a message header . Does not yet process the annotations chunks and message data . */ public static Message from_header ( byte [ ] header ) { } }
if ( header == null || header . length != HEADER_SIZE ) throw new PyroException ( "header data size mismatch" ) ; if ( header [ 0 ] != 'P' || header [ 1 ] != 'Y' || header [ 2 ] != 'R' || header [ 3 ] != 'O' ) throw new PyroException ( "invalid message" ) ; int version = ( ( header [ 4 ] & 0xff ) << 8 ) | ( header [ 5 ] & 0xff ) ; if ( version != Config . PROTOCOL_VERSION ) throw new PyroException ( "invalid protocol version: " + version ) ; int msg_type = ( ( header [ 6 ] & 0xff ) << 8 ) | ( header [ 7 ] & 0xff ) ; int flags = ( ( header [ 8 ] & 0xff ) << 8 ) | ( header [ 9 ] & 0xff ) ; int seq = ( ( header [ 10 ] & 0xff ) << 8 ) | ( header [ 11 ] & 0xff ) ; int data_size = header [ 12 ] & 0xff ; data_size <<= 8 ; data_size |= header [ 13 ] & 0xff ; data_size <<= 8 ; data_size |= header [ 14 ] & 0xff ; data_size <<= 8 ; data_size |= header [ 15 ] & 0xff ; int serializer_id = ( ( header [ 16 ] & 0xff ) << 8 ) | ( header [ 17 ] & 0xff ) ; int annotations_size = ( ( header [ 18 ] & 0xff ) << 8 ) | ( header [ 19 ] & 0xff ) ; // byte 20 and 21 are reserved . int checksum = ( ( header [ 22 ] & 0xff ) << 8 ) | ( header [ 23 ] & 0xff ) ; int actual_checksum = ( msg_type + version + data_size + annotations_size + flags + serializer_id + seq + CHECKSUM_MAGIC ) & 0xffff ; if ( checksum != actual_checksum ) throw new PyroException ( "header checksum mismatch" ) ; Message msg = new Message ( msg_type , serializer_id , flags , seq ) ; msg . data_size = data_size ; msg . annotations_size = annotations_size ; return msg ;
public class PatternCriteria { /** * / * ( non - Javadoc ) * @ see org . talend . esb . sam . server . persistence . criterias . Criteria # parseValue ( java . lang . String ) */ @ Override public Criteria [ ] parseValue ( String attribute ) { } }
PatternCriteria result = new PatternCriteria ( this . name , this . columnName , this . condition ) ; result . pattern = toSQLPattern ( attribute ) ; return new Criteria [ ] { result } ;
public class CommerceDiscountPersistenceImpl { /** * Returns the first commerce discount in the ordered set where displayDate & lt ; & # 63 ; and status = & # 63 ; . * @ param displayDate the display date * @ param status the status * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce discount * @ throws NoSuchDiscountException if a matching commerce discount could not be found */ @ Override public CommerceDiscount findByLtD_S_First ( Date displayDate , int status , OrderByComparator < CommerceDiscount > orderByComparator ) throws NoSuchDiscountException { } }
CommerceDiscount commerceDiscount = fetchByLtD_S_First ( displayDate , status , orderByComparator ) ; if ( commerceDiscount != null ) { return commerceDiscount ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "displayDate=" ) ; msg . append ( displayDate ) ; msg . append ( ", status=" ) ; msg . append ( status ) ; msg . append ( "}" ) ; throw new NoSuchDiscountException ( msg . toString ( ) ) ;
public class SeaGlassInternalFrameTitlePane { /** * Set the enable / disabled state for the buttons . */ private void enableActions ( ) { } }
restoreAction . setEnabled ( frame . isMaximum ( ) || frame . isIcon ( ) ) ; maximizeAction . setEnabled ( ( frame . isMaximizable ( ) && ! frame . isMaximum ( ) && ! frame . isIcon ( ) ) || ( frame . isMaximizable ( ) && frame . isIcon ( ) ) ) ; iconifyAction . setEnabled ( frame . isIconifiable ( ) && ! frame . isIcon ( ) ) ; closeAction . setEnabled ( frame . isClosable ( ) ) ; sizeAction . setEnabled ( false ) ; moveAction . setEnabled ( false ) ;
public class DistributedRaidFileSystem { /** * / * Initialize a Raid FileSystem */ public void initialize ( URI name , Configuration conf ) throws IOException { } }
this . conf = conf ; // init the codec from conf . Codec . initializeCodecs ( conf ) ; Class < ? > clazz = conf . getClass ( "fs.raid.underlyingfs.impl" , DistributedFileSystem . class ) ; if ( clazz == null ) { throw new IOException ( "No FileSystem for fs.raid.underlyingfs.impl." ) ; } String ignoredDirectories = conf . get ( DIRECTORIES_IGNORE_PARITY_CHECKING_KEY ) ; if ( ignoredDirectories != null && ignoredDirectories . length ( ) > 0 ) { String [ ] directories = ignoredDirectories . split ( "," ) ; directoriesIgnoreCheckParity = new ArrayList < String > ( ) ; for ( String dir : directories ) { if ( dir . length ( ) > 0 && dir . startsWith ( "/" ) ) { if ( ! dir . endsWith ( "/" ) ) { dir = dir + "/" ; } directoriesIgnoreCheckParity . add ( dir ) ; } } } this . fs = ( FileSystem ) ReflectionUtils . newInstance ( clazz , null ) ; super . initialize ( name , conf ) ;
public class ConditionalObjectResolver { @ Override public Object resolveForObjectAndContext ( Object self , Context context ) { } }
Object result = this . conditionalResolver . resolveForObjectAndContext ( self , context ) ; if ( result == null ) { return "" ; } try { boolean boolResult = ( Boolean ) result ; if ( boolResult ^ this . neg ) { return this . subExpression . renderForContext ( context ) ; } else { return "" ; } } catch ( Throwable t ) { throw new InvalidTextException ( "A conditional must resolve to a boolean" , t ) ; }
public class StyleUtils { /** * Set the icon into the marker options * @ param markerOptions marker options * @ param icon icon row * @ param density display density : { @ link android . util . DisplayMetrics # density } * @ param iconCache icon cache * @ return true if icon was set into the marker options */ public static boolean setIcon ( MarkerOptions markerOptions , IconRow icon , float density , IconCache iconCache ) { } }
boolean iconSet = false ; if ( icon != null ) { Bitmap iconImage = createIcon ( icon , density , iconCache ) ; markerOptions . icon ( BitmapDescriptorFactory . fromBitmap ( iconImage ) ) ; iconSet = true ; double anchorU = icon . getAnchorUOrDefault ( ) ; double anchorV = icon . getAnchorVOrDefault ( ) ; markerOptions . anchor ( ( float ) anchorU , ( float ) anchorV ) ; } return iconSet ;
public class DbfRow { /** * Retrieves the value of the designated field as java . math . BigDecimal . * @ param fieldName the name of the field * @ return the field value , or null ( if the dbf value is NULL ) * @ throws DbfException if there ' s no field with name fieldName */ public BigDecimal getBigDecimal ( String fieldName ) throws DbfException { } }
Object value = get ( fieldName ) ; return value == null ? null : new BigDecimal ( value . toString ( ) ) ;
public class ParaClient { /** * Searches for objects that have a property with a value matching a wildcard query . * @ param < P > type of the object * @ param type the type of object to search for . See { @ link com . erudika . para . core . ParaObject # getType ( ) } * @ param field the property name of an object * @ param wildcard wildcard query string . For example " cat * " . * @ param pager a { @ link com . erudika . para . utils . Pager } * @ return a list of objects found */ public < P extends ParaObject > List < P > findWildcard ( String type , String field , String wildcard , Pager ... pager ) { } }
MultivaluedMap < String , String > params = new MultivaluedHashMap < > ( ) ; params . putSingle ( "field" , field ) ; params . putSingle ( "q" , wildcard ) ; params . putSingle ( Config . _TYPE , type ) ; params . putAll ( pagerToParams ( pager ) ) ; return getItems ( find ( "wildcard" , params ) , pager ) ;
public class CheckParameterizables { /** * Check all supertypes of a class . * @ param cls Class to check . * @ return { @ code true } when at least one supertype is a known parameterizable * type . */ private boolean checkSupertypes ( Class < ? > cls ) { } }
for ( Class < ? > c : knownParameterizables ) { if ( c . isAssignableFrom ( cls ) ) { return true ; } } return false ;
public class RqLive { /** * Builds current read header . * @ param data Current read character * @ param baos Current read header * @ return Read header */ private static Opt < String > newHeader ( final Opt < Integer > data , final ByteArrayOutputStream baos ) { } }
Opt < String > header = new Opt . Empty < > ( ) ; if ( data . get ( ) != ' ' && data . get ( ) != '\t' ) { header = new Opt . Single < > ( new Utf8String ( baos . toByteArray ( ) ) . asString ( ) ) ; baos . reset ( ) ; } return header ;
public class XmlHelper { /** * 针对没有嵌套节点的简单处理 * @ return map集合 */ public Map < String , String > toMap ( ) { } }
Element root = doc . getDocumentElement ( ) ; Map < String , String > params = new HashMap < String , String > ( ) ; // 将节点封装成map形式 NodeList list = root . getChildNodes ( ) ; for ( int i = 0 ; i < list . getLength ( ) ; i ++ ) { Node node = list . item ( i ) ; params . put ( node . getNodeName ( ) , node . getTextContent ( ) ) ; } // 含有空白符会生成一个 # text参数 params . remove ( "#text" ) ; return params ;
public class MalisisFont { /** * Gets the rendering height of strings . * @ return the string height */ public float getStringHeight ( String text , FontOptions options ) { } }
StringWalker walker = new StringWalker ( text , options ) ; walker . walkToEnd ( ) ; return walker . lineHeight ( ) ;
public class ContentStoreImpl { /** * { @ inheritDoc } */ @ Override public Map < String , String > getSpaceProperties ( final String spaceId ) throws ContentStoreException { } }
return execute ( new Retriable ( ) { @ Override public Map < String , String > retry ( ) throws ContentStoreException { // The actual method being executed return doGetSpaceProperties ( spaceId ) ; } } ) ;
public class Line { /** * Marks this line empty . Also sets previous / next line ' s empty attributes . */ public void setEmpty ( ) { } }
this . value = "" ; this . leading = this . trailing = 0 ; this . isEmpty = true ; if ( this . previous != null ) { this . previous . nextEmpty = true ; } if ( this . next != null ) { this . next . prevEmpty = true ; }
public class PaymentService { /** * Returns and refresh data of a specific { @ link Payment } . * @ param payment * A { @ link Payment } with Id . * @ return Refreshed instance of the given { @ link Payment } . */ public Payment get ( Payment payment ) { } }
return RestfulUtils . show ( PaymentService . PATH , payment , Payment . class , super . httpClient ) ;
public class Runner { /** * Setup the Flink job with the graph input , algorithm , and output . * < p > To then execute the job call { @ link # execute } . * @ return this * @ throws Exception on error */ public Runner run ( ) throws Exception { } }
// Set up the execution environment env = ExecutionEnvironment . getExecutionEnvironment ( ) ; ExecutionConfig config = env . getConfig ( ) ; // should not have any non - Flink data types config . disableForceAvro ( ) ; config . disableForceKryo ( ) ; config . setGlobalJobParameters ( parameters ) ; parameterize ( this ) ; // configure local parameters and throw proper exception on error try { this . configure ( parameters ) ; } catch ( RuntimeException ex ) { throw new ProgramParametrizationException ( ex . getMessage ( ) ) ; } // integration tests run with with object reuse both disabled and enabled if ( disableObjectReuse . getValue ( ) ) { config . disableObjectReuse ( ) ; } else { config . enableObjectReuse ( ) ; } // Usage and configuration // algorithm and usage if ( ! parameters . has ( ALGORITHM ) ) { throw new ProgramParametrizationException ( getAlgorithmsListing ( ) ) ; } String algorithmName = parameters . get ( ALGORITHM ) ; algorithm = driverFactory . get ( algorithmName ) ; if ( algorithm == null ) { throw new ProgramParametrizationException ( "Unknown algorithm name: " + algorithmName ) ; } // input and usage if ( ! parameters . has ( INPUT ) ) { if ( ! parameters . has ( OUTPUT ) ) { // if neither input nor output is given then print algorithm usage throw new ProgramParametrizationException ( getAlgorithmUsage ( algorithmName ) ) ; } throw new ProgramParametrizationException ( "No input given" ) ; } parameterize ( algorithm ) ; String inputName = parameters . get ( INPUT ) ; Input input = inputFactory . get ( inputName ) ; if ( input == null ) { throw new ProgramParametrizationException ( "Unknown input type: " + inputName ) ; } parameterize ( input ) ; // output and usage if ( ! parameters . has ( OUTPUT ) ) { throw new ProgramParametrizationException ( "No output given" ) ; } String outputName = parameters . get ( OUTPUT ) ; output = outputFactory . get ( outputName ) ; if ( output == null ) { throw new ProgramParametrizationException ( "Unknown output type: " + outputName ) ; } parameterize ( output ) ; // Create list of input and algorithm transforms List < Transform > transforms = new ArrayList < > ( ) ; if ( input instanceof Transformable ) { transforms . addAll ( ( ( Transformable ) input ) . getTransformers ( ) ) ; } if ( algorithm instanceof Transformable ) { transforms . addAll ( ( ( Transformable ) algorithm ) . getTransformers ( ) ) ; } for ( Transform transform : transforms ) { parameterize ( transform ) ; } // unused parameters if ( parameters . getUnrequestedParameters ( ) . size ( ) > 0 ) { throw new ProgramParametrizationException ( "Unrequested parameters: " + parameters . getUnrequestedParameters ( ) ) ; } // Execute // Create input Graph graph = input . create ( env ) ; // Transform input for ( Transform transform : transforms ) { graph = ( Graph ) transform . transformInput ( graph ) ; } // Run algorithm result = algorithm . plan ( graph ) ; // Output executionName = jobName . getValue ( ) != null ? jobName . getValue ( ) + ": " : "" ; executionName += input . getIdentity ( ) + " ⇨ " + algorithmName + " ⇨ " + output . getName ( ) ; if ( transforms . size ( ) > 0 ) { // append identifiers to job name StringBuffer buffer = new StringBuffer ( executionName ) . append ( " [" ) ; for ( Transform transform : transforms ) { buffer . append ( transform . getIdentity ( ) ) ; } executionName = buffer . append ( "]" ) . toString ( ) ; } if ( output == null ) { throw new ProgramParametrizationException ( "Unknown output type: " + outputName ) ; } try { output . configure ( parameters ) ; } catch ( RuntimeException ex ) { throw new ProgramParametrizationException ( ex . getMessage ( ) ) ; } if ( result != null ) { // Transform output if algorithm returned result DataSet if ( transforms . size ( ) > 0 ) { Collections . reverse ( transforms ) ; for ( Transform transform : transforms ) { result = ( DataSet ) transform . transformResult ( result ) ; } } } return this ;
public class ClientAsynchEventThreadPool { /** * This method will send a message to the connection listeners associated * with this connection . * @ param conn May be null if invoking an async callback * @ param session May be null if invoking a connection callback * @ param exception May be null if invoking a connection callback * @ param eventId The event Id */ private static void invokeCallback ( SICoreConnection conn , ConsumerSession session , // d172528 Exception exception , int eventId ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "invokeCallback" , new Object [ ] { conn , session , exception , eventId } ) ; if ( conn != null ) // f174318 { // f174318 try { final AsyncCallbackSynchronizer asyncCallbackSynchronizer = ( ( ConnectionProxy ) conn ) . getAsyncCallbackSynchronizer ( ) ; SICoreConnectionListener [ ] myListeners = conn . getConnectionListeners ( ) ; for ( int x = 0 ; x < myListeners . length ; x ++ ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Invoking callback on: " + myListeners [ x ] ) ; // Obtain permission from the callback synchronizer to call the application asyncCallbackSynchronizer . enterAsyncExceptionCallback ( ) ; // start f174318 try { switch ( eventId ) { // This special event ID will not be received across the wire , but will // be used internally when we get notified of a JFAP error . case ( 0x0000 ) : myListeners [ x ] . commsFailure ( conn , ( SIConnectionLostException ) exception ) ; break ; case ( CommsConstants . EVENTID_ME_QUIESCING ) : // f179464 myListeners [ x ] . meQuiescing ( conn ) ; break ; case ( CommsConstants . EVENTID_ME_TERMINATED ) : // f179464 myListeners [ x ] . meTerminated ( conn ) ; // f179464 break ; // f179464 case ( CommsConstants . EVENTID_ASYNC_EXCEPTION ) : // d172528 / / f179464 myListeners [ x ] . asynchronousException ( session , exception ) ; // d172528 break ; // d172528 default : // Should never happen if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Invalid event ID: " + eventId ) ; break ; } } catch ( Exception e ) { FFDCFilter . processException ( e , CLASS_NAME + ".invokeCallback" , CommsConstants . CLIENTASYNCHEVENTTHREADPOOL_INVOKE_01 , new Object [ ] { myListeners [ x ] , conn , session , exception , "" + eventId } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Caught an exception from the callback" , e ) ; } finally { // Tell the callback synchronizer that we have completed the exception callback asyncCallbackSynchronizer . exitAsyncExceptionCallback ( ) ; } // end f174318 } // f174318 } catch ( SIException e ) { // No FFDC Code needed // We couldn ' t get hold of the connection listeners for some reason . Not a lot we can // do here except debug the failure if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Unable to get connection listeners" , e ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "invokeCallback" ) ;
public class AttributeNodeMapImpl { /** * Exclude namespace declaration */ protected int getNumAttr ( ) { } }
int num = element . tree . elemNodeNumAttributes [ element . tree . nodeRepID [ element . node ] ] ; return num >= 0 ? num : 0 ;
public class JobTracker { /** * Accept and process a new TaskTracker profile . We might * have known about the TaskTracker previously , or it might * be brand - new . All task - tracker structures have already * been updated . Just process the contained tasks and any * jobs that might be affected . */ void updateTaskStatuses ( TaskTrackerStatus status ) { } }
String trackerName = status . getTrackerName ( ) ; for ( TaskStatus report : status . getTaskReports ( ) ) { report . setTaskTracker ( trackerName ) ; TaskAttemptID taskId = report . getTaskID ( ) ; // Remove it from the expired task list if ( report . getRunState ( ) != TaskStatus . State . UNASSIGNED ) { expireLaunchingTasks . removeTask ( taskId ) ; } JobInProgress job = getJob ( taskId . getJobID ( ) ) ; if ( job == null ) { // if job is not there in the cleanup list . . . add it synchronized ( trackerToJobsToCleanup ) { Set < JobID > jobs = trackerToJobsToCleanup . get ( trackerName ) ; if ( jobs == null ) { jobs = new HashSet < JobID > ( ) ; trackerToJobsToCleanup . put ( trackerName , jobs ) ; } jobs . add ( taskId . getJobID ( ) ) ; } continue ; } if ( ! job . inited ( ) ) { // if job is not yet initialized . . . kill the attempt synchronized ( trackerToTasksToCleanup ) { Set < TaskAttemptID > tasks = trackerToTasksToCleanup . get ( trackerName ) ; if ( tasks == null ) { tasks = new HashSet < TaskAttemptID > ( ) ; trackerToTasksToCleanup . put ( trackerName , tasks ) ; } tasks . add ( taskId ) ; } continue ; } TaskInProgress tip = taskidToTIPMap . get ( taskId ) ; // Check if the tip is known to the jobtracker . In case of a restarted // jt , some tasks might join in later if ( tip != null ) { // Update the job and inform the listeners if necessary JobStatus prevStatus = ( JobStatus ) job . getStatus ( ) . clone ( ) ; // Clone TaskStatus object here , because JobInProgress // or TaskInProgress can modify this object and // the changes should not get reflected in TaskTrackerStatus . // An old TaskTrackerStatus is used later in countMapTasks , etc . job . updateTaskStatus ( tip , ( TaskStatus ) report . clone ( ) ) ; JobStatus newStatus = ( JobStatus ) job . getStatus ( ) . clone ( ) ; // Update the listeners if an incomplete job completes if ( prevStatus . getRunState ( ) != newStatus . getRunState ( ) ) { JobStatusChangeEvent event = new JobStatusChangeEvent ( job , EventType . RUN_STATE_CHANGED , prevStatus , newStatus ) ; updateJobInProgressListeners ( event ) ; } } else { LOG . info ( "Serious problem. While updating status, cannot find taskid " + report . getTaskID ( ) ) ; } // Process ' failed fetch ' notifications List < TaskAttemptID > failedFetchMaps = report . getFetchFailedMaps ( ) ; if ( failedFetchMaps != null ) { TaskAttemptID reportingAttempt = report . getTaskID ( ) ; for ( TaskAttemptID mapTaskId : failedFetchMaps ) { TaskInProgress failedFetchMap = taskidToTIPMap . get ( mapTaskId ) ; if ( failedFetchMap != null ) { // Gather information about the map which has to be failed , if need be String failedFetchTrackerName = getAssignedTracker ( mapTaskId ) ; if ( failedFetchTrackerName == null ) { failedFetchTrackerName = "Lost task tracker" ; } ( ( JobInProgress ) failedFetchMap . getJob ( ) ) . fetchFailureNotification ( reportingAttempt , failedFetchMap , mapTaskId , failedFetchTrackerName ) ; } } } }
public class ModelUtil { /** * Calculate a collection of all extending types for the given base types * @ param baseTypes the collection of types to calculate the union of all extending types */ public static Collection < ModelElementType > calculateAllExtendingTypes ( Model model , Collection < ModelElementType > baseTypes ) { } }
Set < ModelElementType > allExtendingTypes = new HashSet < ModelElementType > ( ) ; for ( ModelElementType baseType : baseTypes ) { ModelElementTypeImpl modelElementTypeImpl = ( ModelElementTypeImpl ) model . getType ( baseType . getInstanceType ( ) ) ; modelElementTypeImpl . resolveExtendingTypes ( allExtendingTypes ) ; } return allExtendingTypes ;
public class PrimitiveValue { /** * Return byte array value for this PrimitiveValue given a particular type * @ param type of this value * @ return value expressed as a byte array * @ throws IllegalStateException if not a byte array value representation */ public byte [ ] byteArrayValue ( final PrimitiveType type ) { } }
if ( representation == Representation . BYTE_ARRAY ) { return byteArrayValue ; } else if ( representation == Representation . LONG && size == 1 && type == PrimitiveType . CHAR ) { byteArrayValueForLong [ 0 ] = ( byte ) longValue ; return byteArrayValueForLong ; } throw new IllegalStateException ( "PrimitiveValue is not a byte[] representation" ) ;
public class EventReadWriteLock { /** * Temporarily suspend read locks and wait for an event to be posted . * Block until an event is posted . * N . B . this blocking method may wake up spuriously , and should be called from a loop that tests for the exit condition . * @ return < code > true < / code > if an event was posted , and * < code > false < / code > if the wait returned spuriously or timed out . * @ throws InterruptedException if the thread was interrupted while waiting . * @ throws IllegalStateException if the current thread does not hold a read lock * @ throws IllegalStateException if the current thread holds a write lock */ final boolean waitForEvent ( ) throws InterruptedException { } }
// must get the event count BEFORE releasing the read locks // to avoid a lost update final int oldEventCount = eventLock . getEventCount ( ) ; final int readLockCount = releaseReadLocks ( ) ; if ( readLockCount == 0 ) throw new IllegalStateException ( "Must hold read lock" ) ; if ( getWriteHoldCount ( ) > 0 ) throw new IllegalStateException ( "Must not hold write lock" ) ; try { return eventLock . wait ( oldEventCount ) ; } finally { acquireReadLocks ( readLockCount ) ; }
public class ContextualLoggerFactory { /** * Returns a contextual logger . * @ param name the contextual logger name * @ param context the logger context * @ return the logger */ public static ContextualLogger getLogger ( String name , LoggerContext context ) { } }
return new ContextualLogger ( LoggerFactory . getLogger ( name ) , context ) ;
public class JStormUtils { /** * use launchProcess to execute a command * @ param command command to be executed * @ throws ExecuteException * @ throws IOException */ public static void exec_command ( String command ) throws IOException { } }
launchProcess ( command , new HashMap < String , String > ( ) , false ) ;
public class NutDataInputStream { /** * Read a simple var int up to 32 bits */ public int readVarInt ( ) throws IOException { } }
boolean more ; int result = 0 ; do { int b = in . readUnsignedByte ( ) ; more = ( b & 0x80 ) == 0x80 ; result = 128 * result + ( b & 0x7F ) ; // TODO Check for int overflow } while ( more ) ; return result ;
public class DynamoDBTableMapper { /** * Saves and deletes the objects given using one or more calls to the * batchWriteItem API . * @ param objectsToWrite The objects to write . * @ param objectsToDelete The objects to delete . * @ return The list of failed batches . * @ see com . amazonaws . services . dynamodbv2 . datamodeling . DynamoDBMapper # batchWrite */ public List < DynamoDBMapper . FailedBatch > batchWrite ( Iterable < T > objectsToWrite , Iterable < T > objectsToDelete ) { } }
return mapper . batchWrite ( objectsToWrite , objectsToDelete ) ;
public class DumpProcessingController { /** * Processes the most recent dump of the sites table to extract information * about registered sites . * @ return a Sites objects that contains the extracted information , or null * if no sites dump was available ( typically in offline mode without * having any previously downloaded sites dumps ) * @ throws IOException * if there was a problem accessing the sites table dump or the * dump download directory */ public Sites getSitesInformation ( ) throws IOException { } }
MwDumpFile sitesTableDump = getMostRecentDump ( DumpContentType . SITES ) ; if ( sitesTableDump == null ) { return null ; } // Create a suitable processor for such dumps and process the file : MwSitesDumpFileProcessor sitesDumpFileProcessor = new MwSitesDumpFileProcessor ( ) ; sitesDumpFileProcessor . processDumpFileContents ( sitesTableDump . getDumpFileStream ( ) , sitesTableDump ) ; return sitesDumpFileProcessor . getSites ( ) ;
public class BaasUser { /** * Checks if this user is the currently logged in user on this device . * @ return true if < code > this = = BaasUser . current ( ) < / code > */ public boolean isCurrent ( ) { } }
BaasUser current = current ( ) ; if ( current == null ) return false ; Logger . debug ( "Current username is %s and mine is %s" , current . username , username ) ; return current . username . equals ( username ) ;
public class JaxbPUnit20 { /** * Gets the value of the sharedCacheMode property . * @ return value of the sharedCacheMode property . */ @ Override public SharedCacheMode getSharedCacheMode ( ) { } }
// Convert this SharedCacheMode from the class defined // in JAXB ( com . ibm . ws . jpa . pxml20 . PersistenceUnitCachingType ) // to JPA ( javax . persistence . SharedCacheMode ) . // Per the spec , must return UNSPECIFIED if not in xml SharedCacheMode rtnMode = SharedCacheMode . UNSPECIFIED ; PersistenceUnitCachingType jaxbMode = null ; jaxbMode = ivPUnit . getSharedCacheMode ( ) ; if ( jaxbMode == PersistenceUnitCachingType . ALL ) { rtnMode = SharedCacheMode . ALL ; } else if ( jaxbMode == PersistenceUnitCachingType . NONE ) { rtnMode = SharedCacheMode . NONE ; } else if ( jaxbMode == PersistenceUnitCachingType . ENABLE_SELECTIVE ) { rtnMode = SharedCacheMode . ENABLE_SELECTIVE ; } else if ( jaxbMode == PersistenceUnitCachingType . DISABLE_SELECTIVE ) { rtnMode = SharedCacheMode . DISABLE_SELECTIVE ; } return rtnMode ;
public class XYMoneyStep { /** * index < 0 means that we emphasize no point at all */ public void emphasizePoint ( int index ) { } }
if ( dots == null || dots . length < ( index - 1 ) ) return ; // impossible ! // if no change , nothing to do if ( emphasizedPoint == index ) return ; // de - emphasize the current emphasized point if ( emphasizedPoint >= 0 ) { dots [ emphasizedPoint ] . attr ( "r" , dotNormalSize ) ; emphasizedPoint = - 1 ; } if ( index >= 0 ) { dots [ index ] . attr ( "r" , dotBigSize ) ; emphasizedPoint = index ; }
public class VcfHeaderParser { /** * Read the VCF header from the specified readable . * @ param readable readable to read from , must not be null * @ return the VCF header read from the specified readable * @ throws IOException if an I / O error occurs */ public static VcfHeader header ( final Readable readable ) throws IOException { } }
checkNotNull ( readable ) ; ParseListener parseListener = new ParseListener ( ) ; VcfParser . parse ( readable , parseListener ) ; return parseListener . getHeader ( ) ;
public class LoadBalancerSupportImpl { @ Override public void removeIPEndpoints ( @ Nonnull String fromLoadBalancerId , @ Nonnull String ... addresses ) throws CloudException , InternalException { } }
NovaMethod method = new NovaMethod ( getProvider ( ) ) ; for ( JSONObject member : findAllMembers ( fromLoadBalancerId ) ) { for ( String address : addresses ) { try { if ( address . equals ( member . getString ( "address" ) ) ) { method . deleteNetworks ( getMembersResource ( ) , member . getString ( "id" ) ) ; } } catch ( JSONException e ) { e . printStackTrace ( ) ; } } }
public class LongBitSet { /** * this = this XOR other */ void xor ( LongBitSet other ) { } }
assert other . numWords <= numWords : "numWords=" + numWords + ", other.numWords=" + other . numWords ; int pos = Math . min ( numWords , other . numWords ) ; while ( -- pos >= 0 ) { bits [ pos ] ^= other . bits [ pos ] ; }
public class SecurityPolicyClient { /** * Patches a rule at the specified priority . * < p > Sample code : * < pre > < code > * try ( SecurityPolicyClient securityPolicyClient = SecurityPolicyClient . create ( ) ) { * Integer priority = 0; * ProjectGlobalSecurityPolicyName securityPolicy = ProjectGlobalSecurityPolicyName . of ( " [ PROJECT ] " , " [ SECURITY _ POLICY ] " ) ; * SecurityPolicyRule securityPolicyRuleResource = SecurityPolicyRule . newBuilder ( ) . build ( ) ; * Operation response = securityPolicyClient . patchRuleSecurityPolicy ( priority , securityPolicy . toString ( ) , securityPolicyRuleResource ) ; * < / code > < / pre > * @ param priority The priority of the rule to patch . * @ param securityPolicy Name of the security policy to update . * @ param securityPolicyRuleResource Represents a rule that describes one or more match conditions * along with the action to be taken when traffic matches this condition ( allow or deny ) . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation patchRuleSecurityPolicy ( Integer priority , String securityPolicy , SecurityPolicyRule securityPolicyRuleResource ) { } }
PatchRuleSecurityPolicyHttpRequest request = PatchRuleSecurityPolicyHttpRequest . newBuilder ( ) . setPriority ( priority ) . setSecurityPolicy ( securityPolicy ) . setSecurityPolicyRuleResource ( securityPolicyRuleResource ) . build ( ) ; return patchRuleSecurityPolicy ( request ) ;
public class ALU { public static Object urshift ( final Object o1 , final Object o2 ) { } }
requireNonNull ( o1 , o2 ) ; int right = requireNumber ( o2 ) . intValue ( ) ; switch ( getTypeMark ( o1 ) ) { case CHAR : return ( ( Character ) o1 ) >>> right ; case BYTE : return ( ( Byte ) o1 ) >>> right ; case SHORT : return ( ( Short ) o1 ) >>> right ; case INTEGER : return ( ( Integer ) o1 ) >>> right ; case LONG : return ( ( Long ) o1 ) >>> right ; default : throw unsupportedTypeException ( o1 , o2 ) ; }
public class ObjectEnvelopeOrdering { /** * Finds edges base on a specific collection descriptor ( 1 : n and m : n ) * and adds them to the edge map . * @ param vertex the object envelope vertex holding the collection * @ param cds the collection descriptor */ private void addCollectionEdges ( Vertex vertex , CollectionDescriptor cds ) { } }
ObjectEnvelope envelope = vertex . getEnvelope ( ) ; Object col = cds . getPersistentField ( ) . get ( envelope . getRealObject ( ) ) ; Object [ ] refObjects ; if ( col == null || ( ProxyHelper . isCollectionProxy ( col ) && ! ProxyHelper . getCollectionProxy ( col ) . isLoaded ( ) ) ) { refObjects = EMPTY_OBJECT_ARRAY ; } else { refObjects = BrokerHelper . getCollectionArray ( col ) ; } Class refClass = cds . getItemClass ( ) ; for ( int i = 0 ; i < vertices . length ; i ++ ) { Edge edge = null ; Vertex refVertex = vertices [ i ] ; ObjectEnvelope refEnvelope = refVertex . getEnvelope ( ) ; if ( refClass . isInstance ( refEnvelope . getRealObject ( ) ) ) { if ( containsObject ( refEnvelope . getRealObject ( ) , refObjects ) ) { if ( cds . isMtoNRelation ( ) ) { edge = buildConcreteMNEdge ( vertex , refVertex ) ; } else { edge = buildConcrete1NEdge ( vertex , refVertex ) ; } } else { if ( cds . isMtoNRelation ( ) ) { edge = buildPotentialMNEdge ( vertex , refVertex ) ; } else { edge = buildPotential1NEdge ( vertex , refVertex ) ; } } } if ( edge != null ) { if ( ! edgeList . contains ( edge ) ) { edgeList . add ( edge ) ; } else { edge . increaseWeightTo ( edge . getWeight ( ) ) ; } } }
public class Swaption { /** * This method returns the value of the product using a Black - Scholes model for the swap rate * The model is determined by a discount factor curve and a swap rate volatility . * @ param forwardCurve The forward curve on which to value the swap . * @ param swaprateVolatility The Black volatility . * @ return Value of this product */ public double getValue ( ForwardCurveInterface forwardCurve , double swaprateVolatility ) { } }
double swaprate = swaprates [ 0 ] ; for ( double swaprate1 : swaprates ) { if ( swaprate1 != swaprate ) { throw new RuntimeException ( "Uneven swaprates not allows for analytical pricing." ) ; } } double [ ] swapTenor = new double [ fixingDates . length + 1 ] ; System . arraycopy ( fixingDates , 0 , swapTenor , 0 , fixingDates . length ) ; swapTenor [ swapTenor . length - 1 ] = paymentDates [ paymentDates . length - 1 ] ; double forwardSwapRate = Swap . getForwardSwapRate ( new TimeDiscretization ( swapTenor ) , new TimeDiscretization ( swapTenor ) , forwardCurve ) ; double swapAnnuity = SwapAnnuity . getSwapAnnuity ( new TimeDiscretization ( swapTenor ) , forwardCurve ) ; return AnalyticFormulas . blackModelSwaptionValue ( forwardSwapRate , swaprateVolatility , exerciseDate , swaprate , swapAnnuity ) ;
public class AbstractBoottimeAddStepHandler { /** * If { @ link OperationContext # isBooting ( ) } returns { @ code true } , invokes * { @ link # performBoottime ( OperationContext , org . jboss . dmr . ModelNode , org . jboss . as . controller . registry . Resource ) } , * else invokes { @ link OperationContext # reloadRequired ( ) } . * { @ inheritDoc } */ @ Override protected final void performRuntime ( OperationContext context , ModelNode operation , Resource resource ) throws OperationFailedException { } }
if ( context . isBooting ( ) ) { performBoottime ( context , operation , resource ) ; } else { context . reloadRequired ( ) ; }
public class Transform { /** * Transforms the specified < code > ptSrc < / code > and stores the result * in < code > ptDst < / code > . * If < code > ptDst < / code > is < code > null < / code > , a new { @ link Point2D } * object is allocated and then the result of the transformation is * stored in this object . * In either case , < code > ptDst < / code > , which contains the * transformed point , is returned for convenience . * If < code > ptSrc < / code > and < code > ptDst < / code > are the same * object , the input point is correctly overwritten with * the transformed point . * @ param ptSrc the specified < code > Point2D < / code > to be transformed * @ param ptDst the specified < code > Point2D < / code > that stores the * result of transforming < code > ptSrc < / code > * @ return the < code > ptDst < / code > after transforming * < code > ptSrc < / code > and storing the result in < code > ptDst < / code > . */ public final void transform ( final Point2D ptSrc , final Point2D ptDst ) { } }
m_jso . transform ( ptSrc . getJSO ( ) , ptDst . getJSO ( ) ) ;
public class Json { /** * Writes the value as a field on the current JSON object , without writing the actual class . * @ param value May be null . * @ see # writeValue ( String , Object , Class , Class ) */ public void writeValue ( String name , Object value ) { } }
try { writer . name ( name ) ; } catch ( IOException ex ) { throw new JsonException ( ex ) ; } if ( value == null ) writeValue ( value , null , null ) ; else writeValue ( value , value . getClass ( ) , null ) ;
public class ContentMetadataKeyHierarchy { /** * Sets the hierarchyLevels value for this ContentMetadataKeyHierarchy . * @ param hierarchyLevels * The levels of the { @ code ContentMetadataKeyHierarchy } . This * attribute is readonly and * the hierarchy levels must form a continuous set of * 1 , 2 , . . . , N where N is the number * of levels in the hierarchy . */ public void setHierarchyLevels ( com . google . api . ads . admanager . axis . v201808 . ContentMetadataKeyHierarchyLevel [ ] hierarchyLevels ) { } }
this . hierarchyLevels = hierarchyLevels ;
public class AutoConfigurationImportSelector { /** * Handle any invalid excludes that have been specified . * @ param invalidExcludes the list of invalid excludes ( will always have at least one * element ) */ protected void handleInvalidExcludes ( List < String > invalidExcludes ) { } }
StringBuilder message = new StringBuilder ( ) ; for ( String exclude : invalidExcludes ) { message . append ( "\t- " ) . append ( exclude ) . append ( String . format ( "%n" ) ) ; } throw new IllegalStateException ( String . format ( "The following classes could not be excluded because they are" + " not auto-configuration classes:%n%s" , message ) ) ;
public class BaseRpcServlet { /** * 获取http请求的request中的content - type , 如没有设置则设置为默认的 * { @ link com . baidu . beidou . navi . constant . NaviCommonConstant # DEFAULT _ PROTOCAL _ CONTENT _ TYPE } * @ param httpServletRequest * @ return */ protected String getProtocolByHttpContentType ( HttpServletRequest httpServletRequest ) { } }
if ( StringUtil . isEmpty ( httpServletRequest . getContentType ( ) ) ) { throw new InvalidRequestException ( "Rpc protocol invalid" ) ; } String protocol = httpServletRequest . getContentType ( ) . split ( ";" ) [ 0 ] ; if ( protocol == null ) { protocol = NaviCommonConstant . DEFAULT_PROTOCAL_CONTENT_TYPE ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Content-type set to default value " + NaviCommonConstant . DEFAULT_PROTOCAL_CONTENT_TYPE ) ; } } else { protocol = protocol . toLowerCase ( ) ; } return protocol ;
public class EJBMDOrchestrator { /** * Determine the Home ' s method level metadata * @ param isEntity * @ param methodInterface * @ param homeMethods * @ param sessionHomeNoTxAttrMethods * @ param sessionHomeNoTxAttrMethodSignatures * @ param entityHomeNoTxAttrMethods * @ param entityHomeNoTxAttrMethodSignatures * @ param accessIntentList * @ param isoLevelList * @ param transactionList * @ param securityList * @ param excludeList * @ param activitySessionList * @ param cdo * @ param bmd * @ return the Home ' s method level meta data . */ private MethodDataWrapper initializeHomeMethodMD ( boolean isEntity , MethodInterface methodInterface , Method homeMethods [ ] , String [ ] sessionHomeNoTxAttrMethods , String [ ] sessionHomeNoTxAttrMethodSignatures , // PQ63130 String [ ] entityHomeNoTxAttrMethods , String [ ] entityHomeNoTxAttrMethodSignatures , // PQ63130 List < ? > accessIntentList , List < ? > isoLevelList , List < ContainerTransaction > transactionList , List < MethodPermission > securityList , ExcludeList excludeList , List < ActivitySessionMethod > activitySessionList , BeanMetaData bmd ) { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "initializeHomeMethodMetadata" ) ; int metaMethodElementKind = methodInterface . getValue ( ) ; // d162441 EJBMethodInfoImpl [ ] homeMethodInfos = new EJBMethodInfoImpl [ homeMethods . length ] ; int [ ] homeIsolationAttrs = new int [ homeMethods . length ] ; boolean [ ] homeReadOnlyAttrs = new boolean [ homeMethods . length ] ; // 94756 String [ ] homeMethodNames = new String [ homeMethods . length ] ; Class < ? > [ ] [ ] homeMethodParamTypes = new Class < ? > [ homeMethods . length ] [ ] ; String [ ] homeMethodSignatures = new String [ homeMethods . length ] ; String [ ] jdiHomeMethodSignatures = new String [ homeMethods . length ] ; boolean [ ] homeDenyAll = new boolean [ homeMethods . length ] ; // d366845.11.1 boolean [ ] homePermitAll = new boolean [ homeMethods . length ] ; // d366845.11.1 @ SuppressWarnings ( "unchecked" ) ArrayList < String [ ] > [ ] homeRoleArrays = new ArrayList [ homeMethods . length ] ; // d366845.11.1 TransactionAttribute [ ] homeTransactionAttrs = new TransactionAttribute [ homeMethods . length ] ; ActivitySessionAttribute [ ] homeActivitySessionAttrs = new ActivitySessionAttribute [ homeMethods . length ] ; // LIDB441.5 for ( int i = 0 ; i < homeMethods . length ; i ++ ) { homeMethodNames [ i ] = homeMethods [ i ] . getName ( ) ; homeMethodParamTypes [ i ] = homeMethods [ i ] . getParameterTypes ( ) ; homeMethodSignatures [ i ] = MethodAttribUtils . methodSignatureOnly ( homeMethods [ i ] ) ; jdiHomeMethodSignatures [ i ] = MethodAttribUtils . jdiMethodSignature ( homeMethods [ i ] ) ; // Initialize isolationAttrs to TX _ NONE ( unspecified ) by default . Portability layer // will do the correct thing to replace Tx _ None with the proper value , depending on DB type . homeIsolationAttrs [ i ] = java . sql . Connection . TRANSACTION_NONE ; // For Bean - managed Tx ( session beans only ) , initialize home method Tx attrs // to TX _ BEAN _ MANAGED . For other cases , initialize to TX _ REQUIRED . // Home methods are considered ' internal ' , and the tx attribute cannot be set // via annotations , so set the default here ; XML may still override . homeTransactionAttrs [ i ] = ( bmd . usesBeanManagedTx ) ? TransactionAttribute . TX_BEAN_MANAGED : TransactionAttribute . TX_REQUIRED ; // LIDB441.5 - For Bean - managed AS ( session beans only ) , initialize method AS attrs // to AS _ BEAN _ MANAGED . For other cases , initialize to AS _ UNKNOWN . homeActivitySessionAttrs [ i ] = ( bmd . usesBeanManagedAS ) ? ActivitySessionAttribute . AS_BEAN_MANAGED : ActivitySessionAttribute . AS_UNKNOWN ; } // Process the Security MetaData / / 366845.11.1 // Process methods to exclude via security from WCCM . If there is no excludeList , // then any methods to exclude must have been specified via annotations . / / PK93643 if ( excludeList != null ) { // Process all exclude - list elements . MethodAttribUtils . getXMLMethodsDenied ( homeDenyAll , metaMethodElementKind , homeMethodNames , homeMethodParamTypes , excludeList , bmd ) ; // PK93643 } // Process all Security Attributes from WCCM . If there is no excludeList , // then any security attributes must have been specified via annotations . / / PK93643 if ( securityList != null ) { // Process all method - permission elements . MethodAttribUtils . getXMLPermissions ( homeRoleArrays , homePermitAll , homeDenyAll , metaMethodElementKind , homeMethodNames , homeMethodParamTypes , securityList , bmd ) ; // PK93643 } // FYI . . . Homes have no annotations // Only get and check method - level Tx attributes if container is managing Tx if ( ! bmd . usesBeanManagedTx ) { // Process all Transaction Attributes from WCCM . If there is no transactionList , // assume this is an annotations only configuration scenario . if ( transactionList != null ) { MethodAttribUtils . getXMLCMTransactions ( homeTransactionAttrs , metaMethodElementKind , homeMethodNames , homeMethodParamTypes , transactionList , bmd ) ; // PK93643 } // Note that home methods may not have annotations , so no need to call // the annotation processing code here . . . defaults were set above . // If bean uses container - managed Tx , override setting to TX _ NOT _ SUPPORTED for : // - All Home interface methods of a session bean // - getEJBMetaData and getHomeHandle of the Home interface of an entity bean if ( isEntity ) { MethodAttribUtils . checkTxAttrs ( homeTransactionAttrs , homeMethodNames , homeMethodSignatures , // PQ63130 entityHomeNoTxAttrMethods , entityHomeNoTxAttrMethodSignatures , // PQ63130 TransactionAttribute . TX_NOT_SUPPORTED ) ; } else { // SessionBean MethodAttribUtils . checkTxAttrs ( homeTransactionAttrs , homeMethodNames , homeMethodSignatures , // PQ63130 sessionHomeNoTxAttrMethods , sessionHomeNoTxAttrMethodSignatures , // PQ63130 TransactionAttribute . TX_NOT_SUPPORTED ) ; } } // if ! usesBeanManagedTx // Only get method - level Activity Session attributes if container is managing AS if ( ! bmd . usesBeanManagedAS ) { MethodAttribUtils . getActivitySessions ( homeActivitySessionAttrs , metaMethodElementKind , homeMethodNames , homeMethodParamTypes , activitySessionList , bmd . enterpriseBeanName , bmd . usesBeanManagedAS ) ; // LIDB441.5 } // if ! usesBeanManagedAS if ( bmd . ivModuleVersion <= BeanMetaData . J2EE_EJB_VERSION_1_1 || bmd . cmpVersion == InternalConstants . CMP_VERSION_1_X ) { // Need Isolation level for // 1 ) All beans in Java EE 1.2 app // 2 ) CMP11 beans ONLY in Java EE 1.3 app // Get user - specified isolation level settings getIsolationLevels ( homeIsolationAttrs , // F743-18775 metaMethodElementKind , homeMethodNames , homeMethodParamTypes , isoLevelList , bmd . wccm . enterpriseBean ) ; if ( bmd . cmpVersion == InternalConstants . CMP_VERSION_1_X ) { // Get user - specified access intent settings getReadOnlyAttributes ( homeReadOnlyAttrs , // F743-18775 metaMethodElementKind , homeMethodNames , homeMethodParamTypes , accessIntentList , bmd . wccm . enterpriseBean ) ; } } // If the user specified the FbpkAlwaysReadOnly system property , set the readonly attribute on // findByPrimaryKey methods to True . This is to emulate the default WS3.5 behavior for // these methods , which is needed in some cases ( 4.0 default is false ) . This behavior is // required when the FOR UPDATE clause on the fBPK SQL query is not wanted . // Currently it ' s an all - or - nothing proposition ( all EntityBeans or none ) ; this may be enhanced // in the future to allow setting on specific bean types . // Property access & default now handled by ContainerProperties . 391302 boolean fbpkReadOnlyOverride = false ; if ( FbpkAlwaysReadOnly ) { fbpkReadOnlyOverride = true ; // If the user overrode all beans , only show the message once in the console ( rather than // for every bean type ) if ( ! BeanMetaData . fbpkReadOnlyOverrideAllBeans ) { BeanMetaData . fbpkReadOnlyOverrideAllBeans = true ; Tr . audit ( tc , "FBPK_READONLY_OVERRIDE_ALL_CNTR0061I" ) ; } } // d112604.1 begin boolean cfHonorAccessIntent = false ; // default is disable this unless customer String cfMethodSignatures [ ] [ ] = null ; if ( bmd . cmpVersion == InternalConstants . CMP_VERSION_1_X ) { // custom finder support only applies to 1 _ x beans if ( ! BeanMetaData . allowCustomFinderSQLForUpdateALLBeans ) { // static initializer , avoid this if " all " is specified String allowCustomFinderSQLForUpdateStr = AllowCustomFinderSQLForUpdate ; if ( isTraceOn && tc . isDebugEnabled ( ) ) { if ( allowCustomFinderSQLForUpdateStr != null ) { Tr . debug ( tc , ContainerConfigConstants . allowCustomFinderSQLForUpdate + " Value : " + allowCustomFinderSQLForUpdateStr ) ; } else { Tr . debug ( tc , ContainerConfigConstants . allowCustomFinderSQLForUpdate + " Value : (null)" ) ; } } if ( allowCustomFinderSQLForUpdateStr != null ) { if ( allowCustomFinderSQLForUpdateStr . equalsIgnoreCase ( "all" ) ) { BeanMetaData . allowCustomFinderSQLForUpdateALLBeans = true ; bmd . allowCustomFinderSQLForUpdateThisBean = true ; } else { StringTokenizer parser = new StringTokenizer ( allowCustomFinderSQLForUpdateStr , ":" ) ; int numTokens = parser . countTokens ( ) ; for ( int i = 0 ; i < numTokens ; i ++ ) { String compString = parser . nextToken ( ) ; /* * if ( isTraceOn & & tc . isDebugEnabled ( ) ) { * Tr . debug ( tc , " Compare [ " + compString + " , " + enterpriseBeanClassName + " ] " ) ; */ if ( compString . equals ( bmd . enterpriseBeanClassName ) ) { bmd . allowCustomFinderSQLForUpdateThisBean = true ; if ( isTraceOn && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Custom Finder SQL For Update Enabled for : " + bmd . enterpriseBeanClassName ) ; } break ; } // if } // for } // else } // processing string complete } else { // turn on for this bean if all have enabled across all beans ( e . g . a bean before this has set static initilizer ) bmd . allowCustomFinderSQLForUpdateThisBean = true ; } // This support is to be compatiable with 390 applications that expect this // behavior to be enabled . The value is set to true to disable , negate to // clean up understanding during code flow . bmd . allowWAS390CustomFinderAtBeanLevel = ! getWAS390CustomFinderBeanLevel ( ContainerConfigConstants . allowWAS390CustomFinderAtBeanLevelStr , bmd ) . booleanValue ( ) ; // determine if this bean has any per method overrides String envCustomFinderMethodsStr = getCustomFinderSignatures ( ContainerConfigConstants . envCustomFinderMethods , bmd ) ; if ( envCustomFinderMethodsStr != null ) { bmd . allowCustomFinderSQLForUpdateMethodLevel = true ; if ( isTraceOn && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Method Level Finders Defined [" + envCustomFinderMethodsStr + "]" ) ; } // initialize this vector cfMethodSignatures = bmd . getMethodLevelCustomFinderMethodSignatures ( envCustomFinderMethodsStr ) ; /* * if ( isTraceOn & & tc . isDebugEnabled ( ) ) { * for ( int lcv = 0 ; lcv < cfMethodSignatures . length ; lcv + + ) { * Tr . debug ( tc , " Returned CF Method Name [ " + lcv + " ] : " + cfMethodSignatures [ lcv ] [ CF _ METHOD _ NAME _ OFFSET ] ) ; * if ( cfMethodSignatures [ lcv ] [ CF _ METHOD _ SIG _ OFFSET ] ! = null ) { * Tr . debug ( tc , " Returned CF Method Signature [ " + lcv + " ] : " + cfMethodSignatures [ lcv ] [ CF _ METHOD _ SIG _ OFFSET ] ) ; * } / / if debug enabled */ } cfHonorAccessIntent = false ; // default is disable this unless customer // specifies CMP 11 Custom Finders Access Intent // should be honored if ( bmd . allowCustomFinderSQLForUpdateThisBean || BeanMetaData . allowCustomFinderSQLForUpdateALLBeans ) { if ( bmd . allowWAS390CustomFinderAtBeanLevel ) { // 390 previous config can turn off a bean cfHonorAccessIntent = true ; // this will prove true unless detect a 390 bean level shut off Tr . audit ( tc , "CUSTOMFINDER_SQLFORUPDATE_CNTR0078I" , new Object [ ] { bmd . enterpriseBeanClassName } ) ; // honoring this support regardless of other // of other settings } else { cfHonorAccessIntent = false ; // note , a method level override can still control this } } // Custom Finder Access Intents are not honored when optimistic // concurrency control is in effect . This avoids server and // bean level controls , also need to override method level // below in terms of optimisticConcurrencyControl . if ( bmd . optimisticConcurrencyControl && cfHonorAccessIntent ) { cfHonorAccessIntent = false ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Optimistic Conncurrency Enabled, " + bmd . enterpriseBeanClassName + " Custom Finders will be read-only" ) ; } // Custom finder support not applicable when Option A in effect . if ( bmd . optionACommitOption && cfHonorAccessIntent ) { cfHonorAccessIntent = false ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Option A Enabled, " + bmd . enterpriseBeanClassName + " Custom Finders will be read-only" ) ; } } // if a CMP 1 X bean // d112604.1 end int slotSize = bmd . container . getEJBRuntime ( ) . getMetaDataSlotSize ( MethodMetaData . class ) ; for ( int i = 0 ; i < homeMethods . length ; i ++ ) { // d112604.1 begin boolean finderDetect = false ; boolean cfForUpdateSupported = false ; boolean fbpkDetect = false ; boolean isFBPK = false ; // Need custom finders , FBPK should be checked subsequently // Spec calls out that home finders must stat with " find " , take minor perf // hit at this point . . . finderDetect = homeMethodNames [ i ] . startsWith ( "find" ) ; // d360576 // Four cases override method level enablement : cmp1x bean , optimstic concurrency enabled , // Option A in effect and if access intent is read only . // a ) access intent needs to be update before any custom finder access intent // support is enforced // b ) This will override method level checking if ( ! homeReadOnlyAttrs [ i ] && // must be a r / w method , r / o only method opts ! bmd . optimisticConcurrencyControl && // optimistic concurrency not support case ! bmd . optionACommitOption && // option A not supported bmd . cmpVersion == InternalConstants . CMP_VERSION_1_X ) { // must be a 1 _ X bean ( either module type ok ) // either enaabled at server or bean level or method specific approval in effect if ( bmd . allowCustomFinderSQLForUpdateMethodLevel || cfHonorAccessIntent ) { if ( finderDetect ) { fbpkDetect = homeMethodNames [ i ] . equals ( "findByPrimaryKey" ) ; if ( finderDetect && ! fbpkDetect ) { if ( cfHonorAccessIntent ) { // can avoid doing method compares below cfForUpdateSupported = true ; } else { cfForUpdateSupported = false ; } // ok , check case where method level override in effect and bean & server overrides // not in place , try to avoid as this is performance intensive checking strings if ( bmd . allowCustomFinderSQLForUpdateMethodLevel && ! cfForUpdateSupported ) { if ( bmd . cfMethodSignatureEqual ( homeMethodNames [ i ] , MethodAttribUtils . methodSignatureOnly ( homeMethods [ i ] ) , cfMethodSignatures ) ) { cfForUpdateSupported = true ; if ( isTraceOn && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Custom Finder Access Intent Method Level Override in Effect: " + MethodAttribUtils . methodSignature ( homeMethods [ i ] ) ) ; } } } /* * if ( cfForUpdateSupported ) { * if ( isTraceOn & & tc . isDebugEnabled ( ) ) { * Tr . debug ( tc , " Custom Finder Update Access Intent Honored : " + MethodAttribUtils . methodSignature ( homeMethods [ i ] ) ) ; */ } // if method being processed is a custom finder } // if finder } // if cfHonorAccessIntent | | method level overide in effect } // if custom finder supported in the static configuration of environment // enforcing the honoring of access intent for custom finders complete . . . // d112604.1 end // optimize fbpk Read Only override . if read - only currently , skip fbpk string comp if ( finderDetect ) { // d170166 isFBPK = homeMethodNames [ i ] . equals ( "findByPrimaryKey" ) ; if ( isFBPK && fbpkReadOnlyOverride ) { // d170166 homeReadOnlyAttrs [ i ] = true ; } } // d112604.1 end String [ ] homeRolesAllowed = null ; if ( ( homeRoleArrays [ i ] != null ) && ! ( homeRoleArrays [ i ] . isEmpty ( ) ) ) { homeRolesAllowed = homeRoleArrays [ i ] . toArray ( new String [ 0 ] ) ; // d366845.11.2 } EJBMethodInfoImpl methodInfo = bmd . createEJBMethodInfoImpl ( slotSize ) ; methodInfo . initializeInstanceData ( MethodAttribUtils . methodSignature ( homeMethods [ i ] ) , homeMethodNames [ i ] , bmd , methodInterface , homeTransactionAttrs [ i ] , false ) ; methodInfo . setMethodDescriptor ( jdiHomeMethodSignatures [ i ] ) ; methodInfo . setIsolationLevel ( homeIsolationAttrs [ i ] ) ; methodInfo . setActivitySessionAttribute ( homeActivitySessionAttrs [ i ] ) ; methodInfo . setReadOnly ( homeReadOnlyAttrs [ i ] ) ; methodInfo . setSecurityPolicy ( homeDenyAll [ i ] , homePermitAll [ i ] , homeRolesAllowed ) ; methodInfo . setCMP11CustomFinderWithForUpdateAI ( cfForUpdateSupported ) ; methodInfo . setCMP11FBPK ( isFBPK ) ; homeMethodInfos [ i ] = methodInfo ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "initializeHomeMethodMetadata" ) ; return new MethodDataWrapper ( homeMethodNames , homeMethodInfos , homeIsolationAttrs , homeReadOnlyAttrs ) ; // f111627 End
public class HashIntMap { /** * Remove an element with optional checking to see if we should shrink . * When this is called from our iterator , checkShrink = = false to avoid booching the buckets . */ protected Record < V > removeImpl ( int key , boolean checkShrink ) { } }
int index = keyToIndex ( key ) ; // go through the chain looking for a match for ( Record < V > prev = null , rec = _buckets [ index ] ; rec != null ; rec = rec . next ) { if ( rec . key == key ) { if ( prev == null ) { _buckets [ index ] = rec . next ; } else { prev . next = rec . next ; } _size -- ; if ( checkShrink ) { checkShrink ( ) ; } return rec ; } prev = rec ; } return null ;
public class StreamService { /** * { @ inheritDoc } */ public void publish ( Boolean dontStop ) { } }
// null is as good as false according to Boolean . valueOf ( ) so if null , interpret as false if ( dontStop == null || ! dontStop ) { IConnection conn = Red5 . getConnectionLocal ( ) ; if ( conn instanceof IStreamCapableConnection ) { IStreamCapableConnection streamConn = ( IStreamCapableConnection ) conn ; Number streamId = conn . getStreamId ( ) ; IClientStream stream = streamConn . getStreamById ( streamId ) ; if ( stream instanceof IBroadcastStream ) { IBroadcastStream bs = ( IBroadcastStream ) stream ; if ( bs . getPublishedName ( ) != null ) { IBroadcastScope bsScope = getBroadcastScope ( conn . getScope ( ) , bs . getPublishedName ( ) ) ; if ( bsScope != null ) { bsScope . unsubscribe ( bs . getProvider ( ) ) ; if ( conn instanceof BaseConnection ) { ( ( BaseConnection ) conn ) . unregisterBasicScope ( bsScope ) ; } } bs . close ( ) ; streamConn . deleteStreamById ( streamId ) ; } } } }
public class HttpPublishingComponentImpl { /** * ( non - Javadoc ) * @ see com . ibm . ws . javaee . ddmodel . wsbnd . HttpPublishing # getWebserviceSecurity ( ) */ @ Override public WebserviceSecurity getWebserviceSecurity ( ) { } }
if ( delegate == null ) { return this . webServiceSecurity ; } else { return this . webServiceSecurity == null ? delegate . getWebserviceSecurity ( ) : this . webServiceSecurity ; }
public class ClientAsynchEventThreadPool { /** * Dispatches a thread which will call the consumerSetChange method on the ConsumerSetChangeCallback passing in the supplied parameters . * @ param consumerSetChangeCallback * @ param isEmpty */ public void dispatchConsumerSetChangeCallbackEvent ( ConsumerSetChangeCallback consumerSetChangeCallback , boolean isEmpty ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "dispatchConsumerSetChangeCallbackEvent" , new Object [ ] { consumerSetChangeCallback , isEmpty } ) ; // Create a new ConsumerSetChangeCallbackThread and dispatch it . final ConsumerSetChangeCallbackThread thread = new ConsumerSetChangeCallbackThread ( consumerSetChangeCallback , isEmpty ) ; dispatchThread ( thread ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "dispatchConsumerSetChangeCallbackEvent" ) ;
public class Graphs { /** * Returns true if { @ code graph } has at least one cycle . A cycle is defined as a non - empty subset * of edges in a graph arranged to form a path ( a sequence of adjacent outgoing edges ) starting * and ending with the same node . * < p > This method will detect any non - empty cycle , including self - loops ( a cycle of length 1 ) . */ public static boolean hasCycle ( Graph < ? > graph ) { } }
int numEdges = graph . edges ( ) . size ( ) ; if ( numEdges == 0 ) { return false ; // An edge - free graph is acyclic by definition . } if ( ! graph . isDirected ( ) && numEdges >= graph . nodes ( ) . size ( ) ) { return true ; // Optimization for the undirected case : at least one cycle must exist . } Map < Object , NodeVisitState > visitedNodes = Maps . newHashMapWithExpectedSize ( graph . nodes ( ) . size ( ) ) ; for ( Object node : graph . nodes ( ) ) { if ( subgraphHasCycle ( graph , visitedNodes , node , null ) ) { return true ; } } return false ;
public class AbstractCompiler { /** * Crack a command line . * @ param toProcess * the command line to process * @ return the command line broken into strings . An empty or null toProcess * parameter results in a zero sized array */ private String [ ] parseArgLine ( final String toProcess ) { } }
if ( toProcess == null || toProcess . length ( ) == 0 ) { // no command ? no string return new String [ 0 ] ; } // parse with a simple finite state machine final int normal = 0 ; final int inQuote = 1 ; final int inDoubleQuote = 2 ; int state = normal ; final StringTokenizer tok = new StringTokenizer ( toProcess , "\"\' " , true ) ; final ArrayList < String > list = new ArrayList < String > ( ) ; StringBuilder current = new StringBuilder ( ) ; boolean lastTokenHasBeenQuoted = false ; while ( tok . hasMoreTokens ( ) ) { final String nextTok = tok . nextToken ( ) ; switch ( state ) { case inQuote : if ( "\'" . equals ( nextTok ) ) { lastTokenHasBeenQuoted = true ; state = normal ; } else { current . append ( nextTok ) ; } break ; case inDoubleQuote : if ( "\"" . equals ( nextTok ) ) { lastTokenHasBeenQuoted = true ; state = normal ; } else { current . append ( nextTok ) ; } break ; default : if ( "\'" . equals ( nextTok ) ) { state = inQuote ; } else if ( "\"" . equals ( nextTok ) ) { state = inDoubleQuote ; } else if ( " " . equals ( nextTok ) ) { if ( lastTokenHasBeenQuoted || current . length ( ) != 0 ) { list . add ( current . toString ( ) ) ; current = new StringBuilder ( ) ; } } else { current . append ( nextTok ) ; } lastTokenHasBeenQuoted = false ; break ; } } if ( lastTokenHasBeenQuoted || current . length ( ) != 0 ) { list . add ( current . toString ( ) ) ; } if ( state == inQuote || state == inDoubleQuote ) { throw new IllegalArgumentException ( "Unbalanced quotes in " + toProcess ) ; } final String [ ] args = new String [ list . size ( ) ] ; return list . toArray ( args ) ;
public class ReflectionUtils { /** * Silently invoke the specified methodName using reflection . * @ param object * @ param methodName * @ param parameters * @ return true if the invoke was successful */ public static Object invoke ( Object object , String methodName , Object ... parameters ) { } }
Method method = getMethodThatMatchesParameters ( object . getClass ( ) , methodName , parameters ) ; if ( method != null ) { try { return method . invoke ( object , parameters ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( "Unable to invoke method" , e ) ; } catch ( IllegalArgumentException e ) { throw new RuntimeException ( "Unable to invoke method" , e ) ; } catch ( InvocationTargetException e ) { if ( e . getCause ( ) instanceof RuntimeException ) { throw ( RuntimeException ) e . getCause ( ) ; } throw new RuntimeException ( e . getCause ( ) ) ; } } else { throw new RuntimeException ( "No method that matches [" + methodName + "] for class " + object . getClass ( ) . getSimpleName ( ) ) ; }
import java . util . ArrayList ; import java . util . Arrays ; import java . util . List ; class DivisibleTuples { /** * Provides tuples from the input list where all elements are divisible by the given divisor . * Examples : * get _ divisible _ tuples ( [ ( 6 , 24 , 12 ) , ( 7 , 9 , 6 ) , ( 12 , 18 , 21 ) ] , 6 ) - > ' [ ( 6 , 24 , 12 ) ] ' * get _ divisible _ tuples ( [ ( 5 , 25 , 30 ) , ( 4 , 2 , 3 ) , ( 7 , 8 , 9 ) ] , 5 ) - > ' [ ( 5 , 25 , 30 ) ] ' * get _ divisible _ tuples ( [ ( 7 , 9 , 16 ) , ( 8 , 16 , 4 ) , ( 19 , 17 , 18 ) ] , 4 ) - > ' [ ( 8 , 16 , 4 ) ] ' * @ param input _ list List of tuples to be filtered . * @ param divisor The number by which all elements in a tuple must be divisible . * @ return Returns a stringified list of tuples from the input list where all elements are divisible by the divisor . */ public static String getDivisibleTuples ( List < List < Integer > > input_list , int divisor ) { } }
List < List < Integer > > filtered_tuples = new ArrayList < > ( ) ; for ( List < Integer > tuple : input_list ) { boolean divisible = true ; for ( int element : tuple ) { if ( element % divisor != 0 ) { divisible = false ; break ; } } if ( divisible ) filtered_tuples . add ( tuple ) ; } return Arrays . toString ( filtered_tuples . toArray ( ) ) ;
public class ColumnMajorSparseMatrix { /** * Parses { @ link ColumnMajorSparseMatrix } from the given Matrix Market . * @ param is the input stream in Matrix Market format * @ return a parsed matrix * @ exception IOException if an I / O error occurs . */ public static ColumnMajorSparseMatrix fromMatrixMarket ( InputStream is ) throws IOException { } }
return Matrix . fromMatrixMarket ( is ) . to ( Matrices . SPARSE_COLUMN_MAJOR ) ;
public class Greeting { /** * Get the greeting message . * @ return the greeting message . */ public String greeting ( ) { } }
int current = counter . getAndAdd ( 1 ) ; if ( current % 2 != 0 ) { return format ( "Hello Rollbar number %d" , current ) ; } throw new RuntimeException ( "Fatal error at greeting number: " + current ) ;
public class OWLSEffect { /** * TODO * Currently acts as an example of how to add an SWRL expression to the OWL - S result * an instance of this class represents */ public void addExpression ( ) { } }
SWRL expression = getOWLModel ( ) . createSWRLExpression ( null ) ; if ( getOWLValueObject ( ) . owlValue ( ) . isIndividual ( ) ) { SWRLIndividual swrlIndividual = swrl ( ) . wrapIndividual ( getOWLValueObject ( ) . owlValue ( ) . castTo ( OWLIndividual . class ) ) ; Atom atom = swrl ( ) . createSameIndividualAtom ( swrlIndividual , swrlIndividual ) ; expression . setBody ( swrl ( ) . createList ( atom ) ) ; asOWLSResult ( ) . addEffect ( expression ) ; }
public class DefaultGroovyMethods { /** * Selects the minimum value found from the Iterator * using the closure to determine the correct ordering . * The iterator will become * exhausted of elements after this operation . * If the closure has two parameters * it is used like a traditional Comparator . I . e . it should compare * its two parameters for order , returning a negative integer , * zero , or a positive integer when the first parameter is less than , * equal to , or greater than the second respectively . Otherwise , * the Closure is assumed to take a single parameter and return a * Comparable ( typically an Integer ) which is then used for * further comparison . * @ param self an Iterator * @ param closure a Closure used to determine the correct ordering * @ return the minimum value * @ see # min ( java . util . Collection , groovy . lang . Closure ) * @ since 1.5.5 */ public static < T > T min ( Iterator < T > self , @ ClosureParams ( value = FromString . class , options = { } }
"T" , "T,T" } ) Closure closure ) { return min ( ( Iterable < T > ) toList ( self ) , closure ) ;
public class AffectedDeploymentOverlay { /** * It will look for all the deployments under the deploymentsRootAddress with a runtimeName in the specified list of * runtime names and then transform the operation so that every server having those deployments will redeploy the * affected deployments . * @ see # transformOperation * @ param removeOperation * @ param context * @ param deploymentsRootAddress * @ param runtimeNames * @ throws OperationFailedException */ public static void redeployLinksAndTransformOperation ( OperationContext context , ModelNode removeOperation , PathAddress deploymentsRootAddress , Set < String > runtimeNames ) throws OperationFailedException { } }
Set < String > deploymentNames = listDeployments ( context . readResourceFromRoot ( deploymentsRootAddress ) , runtimeNames ) ; Operations . CompositeOperationBuilder opBuilder = Operations . CompositeOperationBuilder . create ( ) ; if ( deploymentNames . isEmpty ( ) ) { for ( String s : runtimeNames ) { ServerLogger . ROOT_LOGGER . debugf ( "We haven't found any deployment for %s in server-group %s" , s , deploymentsRootAddress . getLastElement ( ) . getValue ( ) ) ; } } if ( removeOperation != null ) { opBuilder . addStep ( removeOperation ) ; } for ( String deploymentName : deploymentNames ) { opBuilder . addStep ( addRedeployStep ( deploymentsRootAddress . append ( DEPLOYMENT , deploymentName ) ) ) ; } List < DomainOperationTransmuter > transformers = context . getAttachment ( OperationAttachments . SLAVE_SERVER_OPERATION_TRANSMUTERS ) ; if ( transformers == null ) { context . attach ( OperationAttachments . SLAVE_SERVER_OPERATION_TRANSMUTERS , transformers = new ArrayList < > ( ) ) ; } final ModelNode slave = opBuilder . build ( ) . getOperation ( ) ; transformers . add ( new OverlayOperationTransmuter ( slave , context . getCurrentAddress ( ) ) ) ;
public class ResourceFactory { /** * 将对象以指定资源名注入到资源池中 * @ param < A > 泛型 * @ param autoSync 是否同步已被注入的资源 * @ param name 资源名 * @ param rs 资源对象 * @ return 旧资源对象 */ public < A > A register ( final boolean autoSync , final String name , final A rs ) { } }
checkResourceName ( name ) ; final Class < ? > claz = rs . getClass ( ) ; ResourceType rtype = claz . getAnnotation ( ResourceType . class ) ; if ( rtype == null ) { return ( A ) register ( autoSync , name , claz , rs ) ; } else { A old = null ; A t = ( A ) register ( autoSync , name , rtype . value ( ) , rs ) ; if ( t != null ) old = t ; return old ; }
public class Assertions { /** * Check that object is presented among provided elements and replace the object by equal element from the list . * @ param < T > type of object * @ param obj object to be checked * @ param list list of elements for checking * @ return equal element provided in the list * @ throws AssertionError if object is not found among defined ones * @ since 1.0.2 */ @ SafeVarargs @ Nullable public static < T > T assertAmong ( @ Nullable T obj , @ MayContainNull @ Nonnull final T ... list ) { } }
if ( obj == null ) { for ( final T i : assertNotNull ( list ) ) { if ( i == null ) { return i ; } } } else { final int objHash = obj . hashCode ( ) ; for ( final T i : assertNotNull ( list ) ) { if ( obj == i || ( i != null && objHash == i . hashCode ( ) && obj . equals ( i ) ) ) { return i ; } } } final AssertionError error = new AssertionError ( "Object is not found among elements" ) ; MetaErrorListeners . fireError ( "Asserion error" , error ) ; throw error ;
public class MtasSolrCollectionCache { /** * Empty . */ public void empty ( ) { } }
for ( Entry < String , String > entry : idToVersion . entrySet ( ) ) { expirationVersion . remove ( entry . getValue ( ) ) ; versionToItem . remove ( entry . getValue ( ) ) ; if ( collectionCachePath != null && ! collectionCachePath . resolve ( entry . getValue ( ) ) . toFile ( ) . delete ( ) ) { log . debug ( "couldn't delete " + entry . getValue ( ) ) ; } } idToVersion . clear ( ) ;
public class CacheableWorkspaceDataManager { /** * Try to get the TransactionManager from the cache by calling by reflection * getTransactionManager ( ) on the cache instance , by default it will return null */ private static TransactionManager getTransactionManagerFromCache ( WorkspaceStorageCache cache ) { } }
try { return ( TransactionManager ) cache . getClass ( ) . getMethod ( "getTransactionManager" , ( Class < ? > [ ] ) null ) . invoke ( cache , ( Object [ ] ) null ) ; } catch ( Exception e ) { LOG . debug ( "Could not get the transaction manager from the cache" , e ) ; } return null ;
public class JdbcRow { /** * Returns the column as a long . * @ param index 1 - based * @ return column as a long */ public long getLong ( int index ) { } }
Object value = _values [ index - 1 ] ; if ( value instanceof Long ) { return ( Long ) value ; } else if ( value instanceof Integer ) { return ( Integer ) value ; } else { return Long . valueOf ( value . toString ( ) ) ; }
public class ParticipationStatus { /** * Searches for a parameter value and creates one if it cannot be found . All * objects are guaranteed to be unique , so they can be compared with * { @ code = = } equality . * @ param value the parameter value * @ return the object */ public static ParticipationStatus get ( String value ) { } }
if ( "NEEDS ACTION" . equalsIgnoreCase ( value ) ) { // vCal return NEEDS_ACTION ; } return enums . get ( value ) ;
public class OMVRBTree { /** * Intended to be called only from OTreeSet . readObject */ void readOTreeSet ( int iSize , ObjectInputStream s , V defaultVal ) throws java . io . IOException , ClassNotFoundException { } }
buildFromSorted ( iSize , null , s , defaultVal ) ;
public class PreparedGetObject { /** * Creates { @ link Single } which will perform Get Operation lazily when somebody subscribes to it and send result to observer . * < dl > * < dt > < b > Scheduler : < / b > < / dt > * < dd > Operates on { @ link StorIOContentResolver # defaultRxScheduler ( ) } if not { @ code null } . < / dd > * < / dl > * @ return non - null { @ link Single } which will perform Get Operation . * And send result to observer . */ @ NonNull @ CheckResult @ Override public Single < Optional < T > > asRxSingle ( ) { } }
return RxJavaUtils . createSingleOptional ( storIOContentResolver , this ) ;
public class AmazonCloudFrontClient { /** * Return public key configuration informaation * @ param getPublicKeyConfigRequest * @ return Result of the GetPublicKeyConfig operation returned by the service . * @ throws AccessDeniedException * Access denied . * @ throws NoSuchPublicKeyException * The specified public key doesn ' t exist . * @ sample AmazonCloudFront . GetPublicKeyConfig * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cloudfront - 2018-11-05 / GetPublicKeyConfig " target = " _ top " > AWS * API Documentation < / a > */ @ Override public GetPublicKeyConfigResult getPublicKeyConfig ( GetPublicKeyConfigRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetPublicKeyConfig ( request ) ;
public class TokenFilter { /** * Replaces all current token values with the contents of the given map , * where each map key represents a token name , and each map value * represents a token value . * @ param tokens * A map containing the token names and corresponding values to * assign . */ public void setTokens ( Map < String , String > tokens ) { } }
tokenValues . clear ( ) ; tokenValues . putAll ( tokens ) ;
public class PlayerStatsService { /** * Retrieve the most played champions for the target player * @ param accId The player ' s id * @ param mode The mode to check * @ return Champion stats */ public List < ChampionStatInfo > retrieveTopPlayedChampions ( long accId , GameMode mode ) { } }
return client . sendRpcAndWait ( SERVICE , "retrieveTopPlayedChampions" , accId , mode ) ;
public class Canvas { /** * Draws { @ code image } at the specified location { @ code ( x , y ) } . */ public Canvas draw ( Drawable image , float x , float y ) { } }
return draw ( image , x , y , image . width ( ) , image . height ( ) ) ;
public class OAuthProfileCreator { /** * Make a request to get the data of the authenticated user for the provider . * @ param service the OAuth service * @ param accessToken the access token * @ param dataUrl url of the data * @ param verb method used to request data * @ return the user data response */ protected String sendRequestForData ( final S service , final T accessToken , final String dataUrl , Verb verb ) { } }
logger . debug ( "accessToken: {} / dataUrl: {}" , accessToken , dataUrl ) ; final long t0 = System . currentTimeMillis ( ) ; final OAuthRequest request = createOAuthRequest ( dataUrl , verb ) ; signRequest ( service , accessToken , request ) ; final String body ; final int code ; try { Response response = service . execute ( request ) ; code = response . getCode ( ) ; body = response . getBody ( ) ; } catch ( final IOException | InterruptedException | ExecutionException e ) { throw new HttpCommunicationException ( "Error getting body: " + e . getMessage ( ) ) ; } final long t1 = System . currentTimeMillis ( ) ; logger . debug ( "Request took: " + ( t1 - t0 ) + " ms for: " + dataUrl ) ; logger . debug ( "response code: {} / response body: {}" , code , body ) ; if ( code != 200 ) { throw new HttpCommunicationException ( code , body ) ; } return body ;
public class CmsJspStatusBean { /** * Returns the initialized messages object to read localized messages from . < p > * @ return the initialized messages object to read localized messages from */ protected CmsMessages getMessages ( ) { } }
if ( m_messages == null ) { // initialize the localized messages m_messages = new CmsMessages ( Messages . get ( ) . getBundleName ( ) , getLocale ( ) . toString ( ) ) ; } return m_messages ;
public class ValidatorTag { /** * < p > Create a new instance of the specified { @ link Validator } * class , and register it with the { @ link UIComponent } instance associated * with our most immediately surrounding { @ link UIComponentTag } instance , if * the { @ link UIComponent } instance was created by this execution of the * containing JSP page . < / p > * @ throws JspException if a JSP error occurs */ public int doStartTag ( ) throws JspException { } }
// Locate our parent UIComponentTag UIComponentClassicTagBase tag = UIComponentClassicTagBase . getParentUIComponentClassicTagBase ( pageContext ) ; if ( tag == null ) { // PENDING i18n throw new JspException ( "Not nested in a UIComponentTag Error for tag with handler class:" + this . getClass ( ) . getName ( ) ) ; } // Nothing to do unless this tag created a component if ( ! tag . getCreated ( ) ) { return ( SKIP_BODY ) ; } UIComponent component = tag . getComponentInstance ( ) ; if ( component == null ) { // PENDING i18n throw new JspException ( "Can't create Component from tag." ) ; } if ( ! ( component instanceof EditableValueHolder ) ) { // PENDING i18n throw new JspException ( "Not nested in a tag of proper type. Error for tag with handler class:" + this . getClass ( ) . getName ( ) ) ; } Validator validator = createValidator ( ) ; if ( validator == null ) { // noinspection NonConstantStringShouldBeStringBuffer String validateError = null ; if ( binding != null ) { validateError = binding ; } if ( validatorId != null ) { if ( validateError != null ) { validateError += " or " + validatorId ; } else { validateError = validatorId ; } } // PENDING i18n throw new JspException ( "Can't create class of type:" + "javax.faces.validator.Validator from:" + validateError ) ; } // Register an instance with the appropriate component ( ( EditableValueHolder ) component ) . addValidator ( validator ) ; return ( SKIP_BODY ) ;