signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class TimeoutException { /** * Constructs a < tt > TimeoutException < / tt > with the specified detail * message . * @ param message the detail message * @ param cause the original { @ code TimeoutException } */ public static TimeoutException newTimeoutException ( String message , java . util . concurrent . TimeoutException cause ) { } }
return new TimeoutException ( message , cause ) ;
public class CommerceSubscriptionEntryPersistenceImpl { /** * Clears the cache for all commerce subscription entries . * The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */ @ Override public void clearCache ( ) { } }
entityCache . clearCache ( CommerceSubscriptionEntryImpl . class ) ; finderCache . clearCache ( FINDER_CLASS_NAME_ENTITY ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ;
public class DefaultRegisteredServiceUserInterfaceInfo { /** * Gets description . * @ return the description */ public String getDescription ( ) { } }
val items = getDescriptions ( ) ; if ( items . isEmpty ( ) ) { return this . registeredService . getDescription ( ) ; } return StringUtils . collectionToDelimitedString ( items , "." ) ;
public class NetworkVehicleInterface { /** * Add the prefix required to parse with URI if it ' s not already there . */ private static String massageUri ( String uriString ) { } }
if ( ! uriString . startsWith ( SCHEMA_SPECIFIC_PREFIX ) ) { uriString = SCHEMA_SPECIFIC_PREFIX + uriString ; } return uriString ;
public class StrTokenizer { /** * Gets a new tokenizer instance which parses Tab Separated Value strings . * The default for CSV processing will be trim whitespace from both ends * ( which can be overridden with the setTrimmer method ) . * @ param input the string to parse * @ return a new tokenizer instance which parses Tab Separated Value strings . */ public static StrTokenizer getTSVInstance ( final String input ) { } }
final StrTokenizer tok = getTSVClone ( ) ; tok . reset ( input ) ; return tok ;
public class BoxWebHook { /** * Returns iterator over all { @ link BoxWebHook } - s . * @ param api * the API connection to be used by the resource * @ param fields * the fields to retrieve . * @ return existing { @ link BoxWebHook . Info } - s */ public static Iterable < BoxWebHook . Info > all ( final BoxAPIConnection api , String ... fields ) { } }
QueryStringBuilder builder = new QueryStringBuilder ( ) ; if ( fields . length > 0 ) { builder . appendParam ( "fields" , fields ) ; } return new BoxResourceIterable < BoxWebHook . Info > ( api , WEBHOOKS_URL_TEMPLATE . buildWithQuery ( api . getBaseURL ( ) , builder . toString ( ) ) , 64 ) { @ Override protected BoxWebHook . Info factory ( JsonObject jsonObject ) { BoxWebHook webHook = new BoxWebHook ( api , jsonObject . get ( "id" ) . asString ( ) ) ; return webHook . new Info ( jsonObject ) ; } } ;
public class DrizzleConnection { /** * Attempts to change the transaction isolation level for this < code > Connection < / code > object to the one given . The * constants defined in the interface < code > Connection < / code > are the possible transaction isolation levels . * < B > Note : < / B > If this method is called during a transaction , the result is implementation - defined . * @ param level one of the following < code > Connection < / code > constants : < code > Connection . TRANSACTION _ READ _ UNCOMMITTED < / code > , * < code > Connection . TRANSACTION _ READ _ COMMITTED < / code > , < code > Connection . TRANSACTION _ REPEATABLE _ READ < / code > , * or < code > Connection . TRANSACTION _ SERIALIZABLE < / code > . ( Note that < code > Connection . TRANSACTION _ NONE < / code > * cannot be used because it specifies that transactions are not supported . ) * @ throws java . sql . SQLException if a database access error occurs , this method is called on a closed connection or * the given parameter is not one of the < code > Connection < / code > constants * @ see java . sql . DatabaseMetaData # supportsTransactionIsolationLevel * @ see # getTransactionIsolation */ public void setTransactionIsolation ( final int level ) throws SQLException { } }
String query = "SET SESSION TRANSACTION ISOLATION LEVEL" ; switch ( level ) { case Connection . TRANSACTION_READ_UNCOMMITTED : query += " READ UNCOMMITTED" ; break ; case Connection . TRANSACTION_READ_COMMITTED : query += " READ COMMITTED" ; break ; case Connection . TRANSACTION_REPEATABLE_READ : query += " REPEATABLE READ" ; break ; case Connection . TRANSACTION_SERIALIZABLE : query += " SERIALIZABLE" ; break ; default : throw SQLExceptionMapper . getSQLException ( "Unsupported transaction isolation level" ) ; } try { protocol . executeQuery ( queryFactory . createQuery ( query ) ) ; } catch ( QueryException e ) { throw SQLExceptionMapper . get ( e ) ; }
public class TileTOCRGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case AfplibPackage . TILE_TOCRG__XOFFSET : return getXOFFSET ( ) ; case AfplibPackage . TILE_TOCRG__YOFFSET : return getYOFFSET ( ) ; case AfplibPackage . TILE_TOCRG__THSIZE : return getTHSIZE ( ) ; case AfplibPackage . TILE_TOCRG__TVSIZE : return getTVSIZE ( ) ; case AfplibPackage . TILE_TOCRG__RELRES : return getRELRES ( ) ; case AfplibPackage . TILE_TOCRG__COMPR : return getCOMPR ( ) ; case AfplibPackage . TILE_TOCRG__DATAPOS : return getDATAPOS ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class QueryRecord { /** * Is one of the sub - queries a multi - table . * @ return true if this is a query on top of an object query . */ public boolean isComplexQuery ( ) { } }
for ( int i = 0 ; i < this . getRecordlistCount ( ) ; i ++ ) { if ( this . getRecordlistAt ( i ) . getTable ( ) instanceof org . jbundle . base . db . shared . MultiTable ) return true ; } return false ;
public class MenuDrawer { /** * Sets the drawable used as the drawer indicator . * @ param drawable The drawable used as the drawer indicator . */ public void setSlideDrawable ( Drawable drawable ) { } }
mSlideDrawable = new SlideDrawable ( drawable ) ; mSlideDrawable . setIsRtl ( ViewHelper . getLayoutDirection ( this ) == LAYOUT_DIRECTION_RTL ) ; if ( mActionBarHelper != null ) { mActionBarHelper . setDisplayShowHomeAsUpEnabled ( true ) ; if ( mDrawerIndicatorEnabled ) { mActionBarHelper . setActionBarUpIndicator ( mSlideDrawable , isMenuVisible ( ) ? mDrawerOpenContentDesc : mDrawerClosedContentDesc ) ; } }
public class EsIndex { /** * Appends specified value for the given column and related pseudo columns * into list of properties . * @ param doc list of properties in json format * @ param column colum definition * @ param value column ' s value . */ private void putValue ( EsRequest doc , EsIndexColumn column , Object value ) { } }
Object columnValue = column . columnValue ( value ) ; String stringValue = column . stringValue ( value ) ; doc . put ( column . getName ( ) , columnValue ) ; if ( ! ( value instanceof ModeShapeDateTime || value instanceof Long || value instanceof Boolean ) ) { doc . put ( column . getLowerCaseFieldName ( ) , stringValue . toLowerCase ( ) ) ; doc . put ( column . getUpperCaseFieldName ( ) , stringValue . toUpperCase ( ) ) ; } doc . put ( column . getLengthFieldName ( ) , stringValue . length ( ) ) ;
public class LogConditionalObjectiveFunction { /** * This function is used to comme up with an estimate of the value / gradient based on only a small * portion of the data ( refered to as the batchSize for lack of a better term . In this case batch does * not mean All ! ! It should be thought of in the sense of " a small batch of the data " . */ @ Override public void calculateStochastic ( double [ ] x , double [ ] v , int [ ] batch ) { } }
if ( method . calculatesHessianVectorProduct ( ) && v != null ) { // This is used for Stochastic Methods that involve second order information ( SMD for example ) if ( method . equals ( StochasticCalculateMethods . AlgorithmicDifferentiation ) ) { calculateStochasticAlgorithmicDifferentiation ( x , v , batch ) ; } else if ( method . equals ( StochasticCalculateMethods . IncorporatedFiniteDifference ) ) { calculateStochasticFiniteDifference ( x , v , finiteDifferenceStepSize , batch ) ; } } else { // This is used for Stochastic Methods that don ' t need anything but the gradient ( SGD ) calculateStochasticGradientOnly ( x , batch ) ; }
public class QueryRpc { /** * Parse the " percentile " section of the query string and returns an list of * float that contains the percentile calculation paramters * the format of the section : percentile [ xx , yy , zz ] * xx , yy , zz are the floats * @ param spec * @ return */ public static final List < Float > parsePercentiles ( final String spec ) { } }
List < Float > rs = new ArrayList < Float > ( ) ; int start_pos = spec . indexOf ( '[' ) ; int end_pos = spec . indexOf ( ']' ) ; if ( start_pos == - 1 || end_pos == - 1 ) { throw new BadRequestException ( "Malformated percentile query paramater: " + spec ) ; } String [ ] floats = Tags . splitString ( spec . substring ( start_pos + 1 , end_pos ) , ',' ) ; for ( String s : floats ) { String trimed = s . trim ( ) ; rs . add ( Float . valueOf ( trimed ) ) ; } return rs ;
public class LogRepositoryComponent { /** * 16890.20771 */ private static synchronized LogRecordHandler getBinaryHandler ( ) { } }
if ( binaryHandler == null ) { binaryHandler = new LogRecordHandler ( TRACE_THRESHOLD , LogRepositoryManagerImpl . KNOWN_FORMATTERS [ 0 ] ) ; } return binaryHandler ;
public class ZonalOffset { /** * / * [ deutsch ] * < p > Konstruiert eine Verschiebung der lokalen Zeit relativ zur * UTC - Zeitzone in integralen oder fraktionalen Sekunden . < / p > * < p > Hinweis : Fraktionale Verschiebungen werden im Zeitzonenkontext * nicht verwendet , sondern nur dann , wenn ein { @ code PlainTimestamp } * zu einem { @ code Moment } oder zur & uuml ; ck konvertiert wird . < / p > * @ param total total shift in seconds defined in range * { @ code - 18 * 3600 < = total < = 18 * 3600} * @ param fraction fraction of second * @ return zonal offset in ( sub - ) second precision * @ throws IllegalArgumentException if any arguments are out of range * or have different signs * @ see # getIntegralAmount ( ) * @ see # getFractionalAmount ( ) */ public static ZonalOffset ofTotalSeconds ( int total , int fraction ) { } }
if ( fraction != 0 ) { return new ZonalOffset ( total , fraction ) ; } else if ( total == 0 ) { return UTC ; } else if ( ( total % ( 15 * 60 ) ) == 0 ) { // Viertelstundenintervall Integer value = Integer . valueOf ( total ) ; ZonalOffset result = OFFSET_CACHE . get ( value ) ; if ( result == null ) { result = new ZonalOffset ( total , 0 ) ; OFFSET_CACHE . putIfAbsent ( value , result ) ; result = OFFSET_CACHE . get ( value ) ; } return result ; } else { return new ZonalOffset ( total , 0 ) ; }
public class DropTargetHelper { /** * Remove a DropPasteWorker from the helper . * @ param worker the worker that should be removed */ public void removeDropPasteWorker ( DropPasteWorkerInterface worker ) { } }
this . dropPasteWorkerSet . remove ( worker ) ; java . util . Iterator it = this . dropPasteWorkerSet . iterator ( ) ; int newDefaultActions = 0 ; while ( it . hasNext ( ) ) newDefaultActions |= ( ( DropPasteWorkerInterface ) it . next ( ) ) . getAcceptableActions ( defaultDropTarget . getComponent ( ) ) ; defaultDropTarget . setDefaultActions ( newDefaultActions ) ;
public class Statement { /** * Creates a code chunk that assigns value to a preexisting variable with the given name . */ public static Statement assign ( Expression lhs , Expression rhs ) { } }
return Assignment . create ( lhs , rhs , null ) ;
public class MessageStoreImpl { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . msgstore . MessageStore # xmlWriteRawOn ( com . ibm . ws . sib . msgstore . FormattedWriter , boolean ) */ private final void _xmlWriteRawOn ( FormattedWriter writer , boolean callBackToItem ) throws IOException { } }
new RawDataDumper ( _persistentMessageStore , writer , callBackToItem ) . dump ( ) ;
public class Utils { /** * Replies the default value of the given parameter . * @ param member the member * @ param param the parameter . * @ param configuration the configuration . * @ return the default value or { @ code null } . */ @ SuppressWarnings ( "checkstyle:nestedifdepth" ) public static String getParameterDefaultValue ( ExecutableMemberDoc member , Parameter param , SarlConfiguration configuration ) { } }
final AnnotationDesc annotation = Utils . findFirst ( param . annotations ( ) , it -> qualifiedNameEquals ( it . annotationType ( ) . qualifiedTypeName ( ) , getKeywords ( ) . getDefaultValueAnnnotationName ( ) ) ) ; if ( annotation != null ) { final ElementValuePair [ ] pairs = annotation . elementValues ( ) ; if ( pairs != null && pairs . length > 0 ) { final String fieldId = pairs [ 0 ] . value ( ) . value ( ) . toString ( ) ; final int index = fieldId . indexOf ( '#' ) ; ClassDoc fieldContainer ; final String fieldName ; if ( index > 0 ) { final String referenceName = fieldId . substring ( 0 , index ) ; if ( qualifiedNameEquals ( referenceName , member . containingClass ( ) . qualifiedName ( ) ) ) { fieldContainer = member . containingClass ( ) ; } else { fieldContainer = findFirst ( configuration . classDocCatalog . allClasses ( getPackageName ( referenceName ) ) , false , it -> false ) ; if ( fieldContainer == null ) { fieldContainer = member . containingClass ( ) ; } } fieldName = createNameForHiddenDefaultValueAttribute ( fieldId . substring ( index + 1 ) ) ; } else { fieldContainer = member . containingClass ( ) ; fieldName = createNameForHiddenDefaultValueAttribute ( fieldId ) ; } final FieldDoc field = Utils . findFirst ( fieldContainer . fields ( ) , false , it -> simpleNameEquals ( it . name ( ) , fieldName ) ) ; if ( field != null ) { final AnnotationDesc valueAnnotation = Utils . findFirst ( field . annotations ( ) , it -> qualifiedNameEquals ( it . annotationType ( ) . qualifiedTypeName ( ) , getKeywords ( ) . getSarlSourceCodeAnnotationName ( ) ) ) ; if ( valueAnnotation != null ) { return valueAnnotation . elementValues ( ) [ 0 ] . value ( ) . value ( ) . toString ( ) ; } } } } return null ;
public class FilterTypeImpl { /** * Returns all < code > init - param < / code > elements * @ return list of < code > init - param < / code > */ public List < ParamValueType < FilterType < T > > > getAllInitParam ( ) { } }
List < ParamValueType < FilterType < T > > > list = new ArrayList < ParamValueType < FilterType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "init-param" ) ; for ( Node node : nodeList ) { ParamValueType < FilterType < T > > type = new ParamValueTypeImpl < FilterType < T > > ( this , "init-param" , childNode , node ) ; list . add ( type ) ; } return list ;
public class AssociationBuilder { /** * Populates entities related via join table for < code > entity < / code > * @ param entity * @ param entityMetadata * @ param delegator * @ param relation */ void populateRelationForM2M ( Object entity , EntityMetadata entityMetadata , PersistenceDelegator delegator , Relation relation , Object relObject , Map < String , Object > relationsMap ) { } }
// For M - M relationship of Collection type , relationship entities are // always fetched from Join Table . if ( relation . getPropertyType ( ) . isAssignableFrom ( List . class ) || relation . getPropertyType ( ) . isAssignableFrom ( Set . class ) ) { if ( relation . isRelatedViaJoinTable ( ) && ( relObject == null || ProxyHelper . isProxyOrCollection ( relObject ) ) ) { populateCollectionFromJoinTable ( entity , entityMetadata , delegator , relation ) ; } } else if ( relation . getPropertyType ( ) . isAssignableFrom ( Map . class ) ) { if ( relation . isRelatedViaJoinTable ( ) ) { // TODO : Implement Map relationships via Join Table ( not // supported as of now ) } else { populateCollectionFromMap ( entity , delegator , relation , relObject , relationsMap ) ; } }
public class JspCompilationContext { /** * = = = = = Compile and reload = = = = = */ public void compile ( ) throws JasperException , FileNotFoundException { } }
createCompiler ( ) ; if ( jspCompiler . isOutDated ( ) ) { if ( isRemoved ( ) ) { throw new FileNotFoundException ( jspUri ) ; } try { jspCompiler . removeGeneratedFiles ( ) ; jspLoader = null ; jspCompiler . compile ( ) ; jsw . setReload ( true ) ; jsw . setCompilationException ( null ) ; } catch ( JasperException ex ) { // Cache compilation exception jsw . setCompilationException ( ex ) ; if ( options . getDevelopment ( ) && options . getRecompileOnFail ( ) ) { // Force a recompilation attempt on next access jsw . setLastModificationTest ( - 1 ) ; } throw ex ; } catch ( FileNotFoundException fnfe ) { // Re - throw to let caller handle this - will result in a 404 throw fnfe ; } catch ( Exception ex ) { JasperException je = new JasperException ( Localizer . getMessage ( "jsp.error.unable.compile" ) , ex ) ; // Cache compilation exception jsw . setCompilationException ( je ) ; throw je ; } }
public class Status { /** * Return a { @ link Status } given a canonical error { @ link Code } value . */ public static Status fromCodeValue ( int codeValue ) { } }
if ( codeValue < 0 || codeValue > STATUS_LIST . size ( ) ) { return UNKNOWN . withDescription ( "Unknown code " + codeValue ) ; } else { return STATUS_LIST . get ( codeValue ) ; }
public class TypeEnter { /** * Mark sym deprecated if annotations contain @ Deprecated annotation . */ public void markDeprecated ( Symbol sym , List < JCAnnotation > annotations , Env < AttrContext > env ) { } }
// In general , we cannot fully process annotations yet , but we // can attribute the annotation types and then check to see if the // @ Deprecated annotation is present . attr . attribAnnotationTypes ( annotations , env ) ; handleDeprecatedAnnotations ( annotations , sym ) ;
public class UIComponentBase { /** * < p class = " changed _ added _ 2_1 " > Install the listener instance * referenced by argument < code > componentListener < / code > as a * listener for events of type < code > eventClass < / code > originating * from this specific instance of < code > UIComponent < / code > . The * default implementation creates an inner { @ link * SystemEventListener } instance that wraps argument * < code > componentListener < / code > as the < code > listener < / code > * argument . This inner class must call through to the argument * < code > componentListener < / code > in its implementation of { @ link * SystemEventListener # processEvent } and its implementation of * { @ link SystemEventListener # isListenerForSource } must return * true if the instance class of this < code > UIComponent < / code > is * assignable from the argument to * < code > isListenerForSource < / code > . < / p > * @ param eventClass the < code > Class < / code > of event for which * < code > listener < / code > must be fired . * @ param componentListener the implementation of { @ link * javax . faces . event . ComponentSystemEventListener } whose { @ link * javax . faces . event . ComponentSystemEventListener # processEvent } method must be called * when events of type < code > facesEventClass < / code > are fired . * @ throws < code > NullPointerException < / code > if any of the * arguments are < code > null < / code > . * @ since 2.1 */ public void subscribeToEvent ( Class < ? extends SystemEvent > eventClass , ComponentSystemEventListener componentListener ) { } }
if ( eventClass == null ) { throw new NullPointerException ( ) ; } if ( componentListener == null ) { throw new NullPointerException ( ) ; } if ( initialStateMarked ( ) ) { initialState = false ; } if ( null == listenersByEventClass ) { listenersByEventClass = new HashMap < Class < ? extends SystemEvent > , List < SystemEventListener > > ( 3 , 1.0f ) ; } SystemEventListener facesLifecycleListener = new ComponentSystemEventListenerAdapter ( componentListener , this ) ; List < SystemEventListener > listenersForEventClass = listenersByEventClass . get ( eventClass ) ; if ( listenersForEventClass == null ) { listenersForEventClass = new ArrayList < SystemEventListener > ( 3 ) ; listenersByEventClass . put ( eventClass , listenersForEventClass ) ; } if ( ! listenersForEventClass . contains ( facesLifecycleListener ) ) { listenersForEventClass . add ( facesLifecycleListener ) ; }
public class Http { /** * Sends a POST request and returns the response . * @ param endpoint The endpoint to send the request to . * @ param requestMessage A message to send in the request body . Can be null . * @ param responseClass The class to deserialise the Json response to . Can be null if no response message is expected . * @ param headers Any additional headers to send with this request . You can use { @ link org . apache . http . HttpHeaders } constants for header names . * @ param < T > The type to deserialise the response to . * @ return A { @ link Response } containing the deserialised body , if any . * @ throws IOException If an error occurs . */ public < T > Response < T > put ( Endpoint endpoint , Object requestMessage , Class < T > responseClass , NameValuePair ... headers ) throws IOException { } }
// Create the request HttpPut put = new HttpPut ( endpoint . url ( ) ) ; put . setHeaders ( combineHeaders ( headers ) ) ; // Add the request message if there is one put . setEntity ( serialiseRequestMessage ( requestMessage ) ) ; // Send the request and process the response try ( CloseableHttpResponse response = httpClient ( ) . execute ( put ) ) { T body = deserialiseResponseMessage ( response , responseClass ) ; return new Response < > ( response . getStatusLine ( ) , body ) ; }
public class StandardBullhornData { /** * { @ inheritDoc } */ @ Override public < C extends CrudResponse , T extends UpdateEntity > C updateEntity ( T entity ) { } }
return this . handleUpdateEntity ( entity ) ;
public class PropFindResponseEntity { /** * { @ inheritDoc } */ public void write ( OutputStream stream ) throws IOException { } }
this . outputStream = stream ; try { this . xmlStreamWriter = XMLOutputFactory . newInstance ( ) . createXMLStreamWriter ( outputStream , Constants . DEFAULT_ENCODING ) ; xmlStreamWriter . setNamespaceContext ( namespaceContext ) ; xmlStreamWriter . writeStartDocument ( ) ; xmlStreamWriter . writeStartElement ( "D" , "multistatus" , "DAV:" ) ; xmlStreamWriter . writeNamespace ( "D" , "DAV:" ) ; xmlStreamWriter . writeAttribute ( "xmlns:b" , "urn:uuid:c2f41010-65b3-11d1-a29f-00aa00c14882/" ) ; traverseResources ( rootResource , 0 ) ; // D : multistatus xmlStreamWriter . writeEndElement ( ) ; xmlStreamWriter . writeEndDocument ( ) ; // rootNode . accept ( this ) ; } catch ( XMLStreamException exc ) { LOG . error ( exc . getMessage ( ) , exc ) ; throw new IOException ( exc . getMessage ( ) , exc ) ; } catch ( RepositoryException exc ) { LOG . error ( exc . getMessage ( ) , exc ) ; throw new IOException ( exc . getMessage ( ) , exc ) ; } catch ( IllegalResourceTypeException exc ) { LOG . error ( exc . getMessage ( ) , exc ) ; throw new IOException ( exc . getMessage ( ) , exc ) ; } catch ( URISyntaxException exc ) { LOG . error ( exc . getMessage ( ) , exc ) ; throw new IOException ( exc . getMessage ( ) , exc ) ; }
public class StringStream { /** * XXX : encoding issues */ public int read ( byte [ ] buf , int offset , int length ) throws IOException { } }
int strlen = _length ; int start = offset ; int end = offset + length ; int index = _index ; for ( ; index < strlen && offset < end ; index ++ ) { int ch = _string . charAt ( index ) ; if ( ch < 0x80 ) buf [ offset ++ ] = ( byte ) ch ; else if ( ch < 0x800 && offset + 1 < end ) { buf [ offset ++ ] = ( byte ) ( 0xc0 | ( ch >> 6 ) ) ; buf [ offset ++ ] = ( byte ) ( 0x80 | ( ch & 0x3f ) ) ; } else if ( ch < 0x8000 && offset + 2 < end ) { buf [ offset ++ ] = ( byte ) ( 0xe0 | ( ch >> 12 ) ) ; buf [ offset ++ ] = ( byte ) ( 0x80 | ( ( ch >> 6 ) & 0x3f ) ) ; buf [ offset ++ ] = ( byte ) ( 0x80 | ( ( ch >> 6 ) & 0x3f ) ) ; } else if ( offset == start ) { throw new IllegalStateException ( "buffer length is not large enough to decode UTF-8 data" ) ; } else { break ; } } _index = index ; return start < offset ? offset - start : - 1 ;
public class StreamUtil { /** * a convenience method to reduce all the casting of HttpEntity . getContentLength ( ) to int */ public static InputStream cloneContent ( InputStream source , long readbackSize , ByteArrayOutputStream target ) throws IOException { } }
return cloneContent ( source , ( int ) Math . min ( ( long ) Integer . MAX_VALUE , readbackSize ) , target ) ;
public class TimeUtil { /** * Gets the next execution instant for a { @ link Schedule } , relative to a given instant . * < p > e . g . an hourly schedule has a next execution instant at 14:00 relative to 13:22. * @ param instant The instant to calculate the next execution instant relative to * @ param schedule The schedule of executions * @ return an instant at the next execution time */ public static Instant nextInstant ( Instant instant , Schedule schedule ) { } }
final ExecutionTime executionTime = ExecutionTime . forCron ( cron ( schedule ) ) ; final ZonedDateTime utcDateTime = instant . atZone ( UTC ) ; return executionTime . nextExecution ( utcDateTime ) . orElseThrow ( IllegalArgumentException :: new ) // with unix cron , this should not happen . toInstant ( ) ;
public class AccountingDate { /** * Obtains the current { @ code AccountingDate } from the system clock in the specified time - zone , * translated with the given AccountingChronology . * This will query the { @ link Clock # system ( ZoneId ) system clock } to obtain the current date . * Specifying the time - zone avoids dependence on the default time - zone . * Using this method will prevent the ability to use an alternate clock for testing * because the clock is hard - coded . * @ param chronology the Accounting chronology to base the date on , not null * @ param zone the zone ID to use , not null * @ return the current date using the system clock , not null * @ throws DateTimeException if the current date cannot be obtained , * NullPointerException if an AccountingChronology was not provided */ public static AccountingDate now ( AccountingChronology chronology , ZoneId zone ) { } }
return now ( chronology , Clock . system ( zone ) ) ;
public class MatrixResponse { /** * Returns the distance for the specific entry ( from - & gt ; to ) in meter . */ public double getDistance ( int from , int to ) { } }
if ( hasErrors ( ) ) { throw new IllegalStateException ( "Cannot return distance (" + from + "," + to + ") if errors occured " + getErrors ( ) ) ; } if ( from >= distances . length ) { throw new IllegalStateException ( "Cannot get 'from' " + from + " from distances with size " + distances . length ) ; } else if ( to >= distances [ from ] . length ) { throw new IllegalStateException ( "Cannot get 'to' " + to + " from distances with size " + distances [ from ] . length ) ; } return distances [ from ] [ to ] ;
public class EnvelopeSchemaConverter { /** * Deserialize payload using payload schema */ public GenericRecord deserializePayload ( byte [ ] payload , Schema payloadSchema ) throws IOException , ExecutionException { } }
Decoder decoder = this . decoderFactory . binaryDecoder ( payload , null ) ; GenericDatumReader < GenericRecord > reader = this . readers . get ( payloadSchema ) ; return reader . read ( null , decoder ) ;
public class CharEscapeUtil { /** * / * Internal methods , low - level writing ; text , default */ public void _writeString ( String text ) throws IOException { } }
int len = text . length ( ) ; if ( len > _outputEnd ) { // Let ' s reserve space for entity at begin / end _writeLongString ( text ) ; return ; } // Ok : we know String will fit in buffer ok // But do we need to flush first ? if ( ( _outputTail + len ) > _outputEnd ) { _flushBuffer ( ) ; } text . getChars ( 0 , len , _outputBuffer , _outputTail ) ; if ( _characterEscapes != null ) { _writeStringCustom ( len ) ; } else if ( _maximumNonEscapedChar != 0 ) { _writeStringASCII ( len , _maximumNonEscapedChar ) ; } else { _writeString2 ( len ) ; }
public class SelectionPageGenerator { /** * Generates the sign in page to allow a user to select from the configured social login services . If no services are * configured , the user is redirected to an error page . * @ param request * @ param response * @ param socialTaiRequest * @ throws IOException */ public void displaySelectionPage ( HttpServletRequest request , HttpServletResponse response , SocialTaiRequest socialTaiRequest ) throws IOException { } }
setRequestAndConfigInformation ( request , response , socialTaiRequest ) ; if ( selectableConfigs == null || selectableConfigs . isEmpty ( ) ) { sendDisplayError ( response , "SIGN_IN_NO_CONFIGS" , new Object [ 0 ] ) ; return ; } generateOrSendToAppropriateSelectionPage ( response ) ;
public class DnsCache { /** * Adds DnsResponse to this cache * @ param response response to add */ public void add ( DnsQuery query , DnsResponse response ) { } }
assert eventloop . inEventloopThread ( ) : "Concurrent cache adds are not allowed" ; long expirationTime = now . currentTimeMillis ( ) ; if ( response . isSuccessful ( ) ) { assert response . getRecord ( ) != null ; // where are my advanced contracts so that Intellj would know it ' s true here without an assert ? long minTtl = response . getRecord ( ) . getMinTtl ( ) * 1000 ; if ( minTtl == 0 ) { return ; } expirationTime += Math . min ( minTtl , maxTtl ) ; } else { expirationTime += response . getErrorCode ( ) == ResponseErrorCode . TIMED_OUT ? timedOutExceptionTtl : errorCacheExpiration ; } CachedDnsQueryResult cachedResult = new CachedDnsQueryResult ( response , expirationTime ) ; CachedDnsQueryResult old = cache . put ( query , cachedResult ) ; expirations . add ( cachedResult ) ; if ( old != null ) { old . response = null ; // mark old cache response as refreshed ( see performCleanup ) logger . trace ( "Refreshed cache entry for {}" , query ) ; } else { logger . trace ( "Added cache entry for {}" , query ) ; }
public class HostStorageDeviceInfoEx { /** * Collects Runtime names for all ScsiLuns on a Host where the ScsiLun type is " disk " . */ private void collectScsiRuntimeNames ( ) { } }
// To hold all the Scsi Luns that are disks ArrayList < ScsiLun > sortedScsiLuns = new ArrayList < ScsiLun > ( ) ; // Check to see that this device has luns if ( storageDeviceInfo . getScsiLun ( ) . length == 0 ) { log . trace ( "There are no Scsi LUNS on this storage device." ) ; return ; } // Collect all the scsi luns which are disks for ( ScsiLun lun : storageDeviceInfo . getScsiLun ( ) ) { if ( lun . lunType . equals ( "disk" ) ) { sortedScsiLuns . add ( lun ) ; } } // To collect the Scsi Topology information ArrayList < Map > scsiTopologyInfo = new ArrayList < Map > ( ) ; // Get the HostScsiTopology information HostScsiTopology scsiTopology = storageDeviceInfo . scsiTopology ; // Check to see that adapters are present if ( null == scsiTopology . adapter || scsiTopology . adapter . length == 0 ) { log . trace ( "There are no adapters on this storage device." ) ; return ; } // Get the adapters from scsi topology for ( HostScsiTopologyInterface adapter : scsiTopology . adapter ) { // Check to see that these adapters have targets if ( null == adapter . target || adapter . target . length == 0 ) { log . trace ( "This adapter has no targets. Adapter:" + adapter . adapter ) ; continue ; } // For each target on these adapters for ( HostScsiTopologyTarget target : adapter . target ) { // Check to see that the target has LUNs if ( null == target . lun || target . lun . length == 0 ) { log . trace ( "This target has no LUNs on it. Target:" + target . key ) ; continue ; } // For each lun on these targets for ( HostScsiTopologyLun lun : target . lun ) { // Create a Map with The target key and lun information Map < String , Object > scsiInfo = new HashMap < String , Object > ( ) ; // Add this information to map scsiInfo . put ( "key" , target . key ) ; scsiInfo . put ( "lunInfo" , lun ) ; // Add this map to Scsi info list scsiTopologyInfo . add ( scsiInfo ) ; } } } // Make sure that information is available . if ( sortedScsiLuns . size ( ) == 0 || scsiTopologyInfo . size ( ) == 0 ) { log . trace ( "There is no SCSI Lun information on this host" ) ; return ; } // For each Lun in sortedScsiLuns for ( ScsiLun lun : sortedScsiLuns ) { // For each of the ScisiTopology for ( Map info : scsiTopologyInfo ) { // Get the lun information on the target HostScsiTopologyLun scsiTopologyLun = ( HostScsiTopologyLun ) info . get ( "lunInfo" ) ; // If the target if ( scsiTopologyLun . scsiLun . equals ( lun . key ) ) { // Get the key name and split it String [ ] names = info . get ( "key" ) . toString ( ) . split ( "-" ) ; String name = names [ names . length - 1 ] ; // The name will look something like this : " vmhba0:2:0" // This needs to be split with ' : ' such that we have [ " vmhba0 " , " 2 " , " 0 " ] String [ ] nameSplit = name . split ( ":" ) ; // We then get the lun number from info // We create the RuntimeName ( A : C : T : L form ) which will finally look like : " vmhba0 : C2 : T0 : L0" // " L " is followed by the Lun Number // Create a Map with LunKey vs RuntimeName this . scsiRuntimeNames . put ( lun . key , ( nameSplit [ 0 ] + ":C" + nameSplit [ 1 ] + ":T" + nameSplit [ 2 ] + ":L" + scsiTopologyLun . getLun ( ) ) ) ; } } }
public class TimePickerDialog { /** * For keyboard mode , processes key events . * @ param keyCode the pressed key . * @ return true if the key was successfully processed , false otherwise . */ private boolean processKeyUp ( int keyCode ) { } }
if ( keyCode == KeyEvent . KEYCODE_TAB ) { if ( mInKbMode ) { if ( isTypedTimeFullyLegal ( ) ) { finishKbMode ( true ) ; } return true ; } } else if ( keyCode == KeyEvent . KEYCODE_ENTER ) { if ( mInKbMode ) { if ( ! isTypedTimeFullyLegal ( ) ) { return true ; } finishKbMode ( false ) ; } if ( mCallback != null ) { mCallback . onTimeSet ( this , mTimePicker . getHours ( ) , mTimePicker . getMinutes ( ) , mTimePicker . getSeconds ( ) ) ; } dismiss ( ) ; return true ; } else if ( keyCode == KeyEvent . KEYCODE_DEL ) { if ( mInKbMode ) { if ( ! mTypedTimes . isEmpty ( ) ) { int deleted = deleteLastTypedKey ( ) ; String deletedKeyStr ; if ( deleted == getAmOrPmKeyCode ( AM ) ) { deletedKeyStr = mAmText ; } else if ( deleted == getAmOrPmKeyCode ( PM ) ) { deletedKeyStr = mPmText ; } else { deletedKeyStr = String . format ( mLocale , "%d" , getValFromKeyCode ( deleted ) ) ; } Utils . tryAccessibilityAnnounce ( mTimePicker , String . format ( mDeletedKeyFormat , deletedKeyStr ) ) ; updateDisplay ( true ) ; } } } else if ( keyCode == KeyEvent . KEYCODE_0 || keyCode == KeyEvent . KEYCODE_1 || keyCode == KeyEvent . KEYCODE_2 || keyCode == KeyEvent . KEYCODE_3 || keyCode == KeyEvent . KEYCODE_4 || keyCode == KeyEvent . KEYCODE_5 || keyCode == KeyEvent . KEYCODE_6 || keyCode == KeyEvent . KEYCODE_7 || keyCode == KeyEvent . KEYCODE_8 || keyCode == KeyEvent . KEYCODE_9 || ( ! mIs24HourMode && ( keyCode == getAmOrPmKeyCode ( AM ) || keyCode == getAmOrPmKeyCode ( PM ) ) ) ) { if ( ! mInKbMode ) { if ( mTimePicker == null ) { // Something ' s wrong , because time picker should definitely not be null . Log . e ( TAG , "Unable to initiate keyboard mode, TimePicker was null." ) ; return true ; } mTypedTimes . clear ( ) ; tryStartingKbMode ( keyCode ) ; return true ; } // We ' re already in keyboard mode . if ( addKeyIfLegal ( keyCode ) ) { updateDisplay ( false ) ; } return true ; } return false ;
public class VirtualMediaPanel { /** * We overload this to translate mouse events into the proper coordinates before they are * dispatched to any of the mouse listeners . */ @ Override protected void processMouseEvent ( MouseEvent event ) { } }
event . translatePoint ( _vbounds . x , _vbounds . y ) ; super . processMouseEvent ( event ) ;
public class AnnotationManager { /** * Find first { @ link ru . yandex . qatools . allure . annotations . Severity } annotation * @ return { @ link ru . yandex . qatools . allure . model . SeverityLevel } or null if * annotation doesn ' t present */ public SeverityLevel getSeverity ( ) { } }
Severity severity = getAnnotation ( Severity . class ) ; return severity == null ? null : severity . value ( ) ;
public class Parser { /** * ImportSpecifierSet : : = ' { ' ( ImportSpecifier ( ' , ' ImportSpecifier ) * ( , ) ? ) ? ' } ' */ private ImmutableList < ParseTree > parseImportSpecifierSet ( ) { } }
ImmutableList . Builder < ParseTree > elements ; elements = ImmutableList . builder ( ) ; eat ( TokenType . OPEN_CURLY ) ; while ( peekIdOrKeyword ( ) ) { elements . add ( parseImportSpecifier ( ) ) ; if ( ! peek ( TokenType . CLOSE_CURLY ) ) { eat ( TokenType . COMMA ) ; } } eat ( TokenType . CLOSE_CURLY ) ; return elements . build ( ) ;
public class GeomorphUtilities { public void orizzonte5 ( double delta , int quadrata , double beta , double alfa , RandomIter elevImageIterator , WritableRaster curvatureImage , int [ ] [ ] shadow ) { } }
int rows = curvatureImage . getHeight ( ) ; int cols = curvatureImage . getWidth ( ) ; int y , I , J ; double zenith ; for ( int j = quadrata ; j >= 0 ; j -- ) { I = - 1 ; J = - 1 ; y = 0 ; for ( int jj = j ; jj < quadrata ; jj ++ ) { for ( int i = rows - ( int ) floor ( 1 / tan ( beta ) * ( jj - j ) ) - 1 ; i >= rows - ( int ) floor ( 1 / tan ( beta ) * ( jj - j + 1 ) ) - 1 && i >= 0 ; i -- ) { if ( jj >= quadrata - cols && ! isNovalue ( elevImageIterator . getSampleDouble ( jj - ( quadrata - cols ) , i , 0 ) ) ) { /* shadow . element [ i ] [ jj - ( quadrata - Z0 . nch ) ] = j ; } } } } */ if ( curvatureImage . getSampleDouble ( jj - ( quadrata - cols ) , i , 0 ) == 1 && I == - 1 ) { I = i ; J = jj - ( quadrata - cols ) ; y = 1 ; } else if ( curvatureImage . getSampleDouble ( jj - ( quadrata - cols ) , i , 0 ) == 1 && I != - 1 ) { zenith = ( elevImageIterator . getSampleDouble ( J , I , 0 ) - elevImageIterator . getSampleDouble ( jj - ( quadrata - cols ) , i , 0 ) ) / sqrt ( pow ( ( double ) ( I - i ) * ( double ) delta , ( double ) 2 ) + pow ( ( double ) ( J - ( jj - ( quadrata - cols ) ) ) * ( double ) delta , ( double ) 2 ) ) ; if ( zenith <= tan ( alfa ) ) { shadow [ i ] [ jj - ( quadrata - cols ) ] = 0 ; I = i ; J = jj - ( quadrata - cols ) ; } else { shadow [ i ] [ jj - ( quadrata - cols ) ] = 1 ; } } else if ( curvatureImage . getSampleDouble ( jj - ( quadrata - cols ) , i , 0 ) == 0 && y == 1 ) { zenith = ( elevImageIterator . getSampleDouble ( J , I , 0 ) - elevImageIterator . getSampleDouble ( jj - ( quadrata - cols ) , i , 0 ) ) / sqrt ( pow ( ( double ) ( I - i ) * ( double ) delta , ( double ) 2 ) + pow ( ( double ) ( J - ( jj - ( quadrata - cols ) ) ) * ( double ) delta , ( double ) 2 ) ) ; if ( zenith <= tan ( alfa ) ) { shadow [ i ] [ jj - ( quadrata - cols ) ] = 0 ; y = 0 ; } else { shadow [ i ] [ jj - ( quadrata - cols ) ] = 1 ; } } } } } }
public class NetworkInterfacesInner { /** * Creates or updates a network interface . * @ param resourceGroupName The name of the resource group . * @ param networkInterfaceName The name of the network interface . * @ param parameters Parameters supplied to the create or update network interface operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < NetworkInterfaceInner > createOrUpdateAsync ( String resourceGroupName , String networkInterfaceName , NetworkInterfaceInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , networkInterfaceName , parameters ) . map ( new Func1 < ServiceResponse < NetworkInterfaceInner > , NetworkInterfaceInner > ( ) { @ Override public NetworkInterfaceInner call ( ServiceResponse < NetworkInterfaceInner > response ) { return response . body ( ) ; } } ) ;
public class AdaptTo { /** * Try to adapt the adaptable to the given type and ensures that it succeeds . * @ param adaptable Adaptable * @ param type Type * @ param < T > Type * @ return Adaption result ( not null ) * @ throws UnableToAdaptException if the adaption was not successful */ @ SuppressWarnings ( "null" ) public static < T > @ NotNull T notNull ( @ NotNull Adaptable adaptable , @ NotNull Class < T > type ) { } }
T object = adaptable . adaptTo ( type ) ; if ( object == null ) { throw new UnableToAdaptException ( adaptable , type ) ; } return object ;
public class WriteFileExtensions { /** * Writes the given byte array to a file . * @ param filename * The filename from the file . * @ param byteArray * The byte array . * @ throws IOException * Signals that an I / O exception has occurred . */ public static void writeByteArrayToFile ( final String filename , final byte [ ] byteArray ) throws IOException { } }
final File file = new File ( filename ) ; writeByteArrayToFile ( file , byteArray ) ;
public class I18nUtils { /** * Convert a string based locale into a Locale Object . * Assumes the string has form " { language } _ { country } _ { variant } " . * Examples : " en " , " de _ DE " , " _ GB " , " en _ US _ WIN " , " de _ _ POSIX " , " fr _ MAC " * @ param localeString The String * @ return the Locale */ public static Locale getLocaleFromString ( String localeString ) { } }
if ( localeString == null ) { return null ; } localeString = localeString . trim ( ) ; if ( localeString . toLowerCase ( ) . equals ( "default" ) ) { return Locale . getDefault ( ) ; } // Extract language int languageIndex = localeString . indexOf ( '_' ) ; String language = null ; if ( languageIndex == - 1 ) { // No further " _ " so is " { language } " only return new Locale ( localeString , "" ) ; } else { language = localeString . substring ( 0 , languageIndex ) ; } // Extract country int countryIndex = localeString . indexOf ( '_' , languageIndex + 1 ) ; String country = null ; if ( countryIndex == - 1 ) { // No further " _ " so is " { language } _ { country } " country = localeString . substring ( languageIndex + 1 ) ; return new Locale ( language , country ) ; } else { // Assume all remaining is the variant so is " { language } _ { country } _ { variant } " country = localeString . substring ( languageIndex + 1 , countryIndex ) ; String variant = localeString . substring ( countryIndex + 1 ) ; return new Locale ( language , country , variant ) ; }
public class JBBPNamedNumericFieldMap { /** * Ask the registered external value provider for a field value . * @ param externalFieldName the name of a field , it must not be null * @ param compiledBlock the compiled block , it must not be null * @ param evaluator an evaluator which is calling the method , it can be null * @ return integer value for the field * @ throws JBBPException if there is not any external value provider */ public int getExternalFieldValue ( final String externalFieldName , final JBBPCompiledBlock compiledBlock , final JBBPIntegerValueEvaluator evaluator ) { } }
final String normalizedName = JBBPUtils . normalizeFieldNameOrPath ( externalFieldName ) ; if ( this . externalValueProvider == null ) { throw new JBBPEvalException ( "Request for '" + externalFieldName + "' but there is not any value provider" , evaluator ) ; } else { return this . externalValueProvider . provideArraySize ( normalizedName , this , compiledBlock ) ; }
public class ByteBufUtil { /** * Encode a { @ link CharSequence } in < a href = " http : / / en . wikipedia . org / wiki / ASCII " > ASCII < / a > and write * it to a { @ link ByteBuf } allocated with { @ code alloc } . * @ param alloc The allocator used to allocate a new { @ link ByteBuf } . * @ param seq The characters to write into a buffer . * @ return The { @ link ByteBuf } which contains the < a href = " http : / / en . wikipedia . org / wiki / ASCII " > ASCII < / a > encoded * result . */ public static ByteBuf writeAscii ( ByteBufAllocator alloc , CharSequence seq ) { } }
// ASCII uses 1 byte per char ByteBuf buf = alloc . buffer ( seq . length ( ) ) ; writeAscii ( buf , seq ) ; return buf ;
public class FragmentedMp4Builder { /** * Creates a ' moof ' box for a given sequence of samples . * @ param startSample low endpoint ( inclusive ) of the sample sequence * @ param endSample high endpoint ( exclusive ) of the sample sequence * @ param track source of the samples * @ param sequenceNumber the fragment index of the requested list of samples * @ return the list of TrackRun boxes . */ protected ParsableBox createMoof ( long startSample , long endSample , Track track , int sequenceNumber ) { } }
MovieFragmentBox moof = new MovieFragmentBox ( ) ; createMfhd ( startSample , endSample , track , sequenceNumber , moof ) ; createTraf ( startSample , endSample , track , sequenceNumber , moof ) ; TrackRunBox firstTrun = moof . getTrackRunBoxes ( ) . get ( 0 ) ; firstTrun . setDataOffset ( 1 ) ; // dummy to make size correct firstTrun . setDataOffset ( ( int ) ( 8 + moof . getSize ( ) ) ) ; // mdat header + moof size return moof ;
public class DialogRootView { /** * Adapts the left and right margin of a specific divider . * @ param divider * The divider , whose left and right margin should be adapted , as an instance of the * class { @ link Divider } */ private void adaptDividerMargin ( @ Nullable final Divider divider ) { } }
if ( divider != null ) { ViewGroup . LayoutParams layoutParams = divider . getLayoutParams ( ) ; if ( layoutParams instanceof LayoutParams ) { ( ( LayoutParams ) layoutParams ) . leftMargin = dividerMargin ; ( ( LayoutParams ) layoutParams ) . rightMargin = dividerMargin ; } }
public class Year { /** * Returns a copy of this { @ code Year } with the specified number of years subtracted . * This instance is immutable and unaffected by this method call . * @ param yearsToSubtract the years to subtract , may be negative * @ return a { @ code Year } based on this year with the year subtracted , not null * @ throws DateTimeException if the result exceeds the supported range */ public Year minusYears ( long yearsToSubtract ) { } }
return ( yearsToSubtract == Long . MIN_VALUE ? plusYears ( Long . MAX_VALUE ) . plusYears ( 1 ) : plusYears ( - yearsToSubtract ) ) ;
public class HadoopSparkJob { /** * Add additional namenodes specified in the Spark Configuration * ( { @ link # SPARK _ CONF _ ADDITIONAL _ NAMENODES } ) to the Props provided . * @ param props Props to add additional namenodes to . * @ see HadoopJobUtils # addAdditionalNamenodesToProps ( Props , String ) */ private void addAdditionalNamenodesFromConf ( final Props props ) { } }
final String sparkConfDir = getSparkLibConf ( ) [ 1 ] ; final File sparkConfFile = new File ( sparkConfDir , "spark-defaults.conf" ) ; try { final InputStreamReader inReader = new InputStreamReader ( new FileInputStream ( sparkConfFile ) , StandardCharsets . UTF_8 ) ; // Use Properties to avoid needing Spark on our classpath final Properties sparkProps = new Properties ( ) ; sparkProps . load ( inReader ) ; inReader . close ( ) ; final String additionalNamenodes = sparkProps . getProperty ( SPARK_CONF_ADDITIONAL_NAMENODES ) ; if ( additionalNamenodes != null && additionalNamenodes . length ( ) > 0 ) { getLog ( ) . info ( "Found property " + SPARK_CONF_ADDITIONAL_NAMENODES + " = " + additionalNamenodes + "; setting additional namenodes" ) ; HadoopJobUtils . addAdditionalNamenodesToProps ( props , additionalNamenodes ) ; } } catch ( final IOException e ) { getLog ( ) . warn ( "Unable to load Spark configuration; not adding any additional " + "namenode delegation tokens." , e ) ; }
public class TokenCompleteTextView { /** * Initialise the variables and various listeners */ private void init ( ) { } }
if ( initialized ) return ; // Initialise variables setTokenizer ( new CharacterTokenizer ( Arrays . asList ( ',' , ';' ) , "," ) ) ; Editable text = getText ( ) ; assert null != text ; spanWatcher = new TokenSpanWatcher ( ) ; textWatcher = new TokenTextWatcher ( ) ; hiddenContent = null ; countSpan = new CountSpan ( ) ; // Initialise TextChangedListeners addListeners ( ) ; setTextIsSelectable ( false ) ; setLongClickable ( false ) ; // In theory , get the soft keyboard to not supply suggestions . very unreliable setInputType ( getInputType ( ) | InputType . TYPE_TEXT_FLAG_NO_SUGGESTIONS | InputType . TYPE_TEXT_FLAG_AUTO_COMPLETE ) ; setHorizontallyScrolling ( false ) ; // Listen to IME action keys setOnEditorActionListener ( this ) ; // Initialise the text filter ( listens for the split chars ) setFilters ( new InputFilter [ ] { new InputFilter ( ) { @ Override public CharSequence filter ( CharSequence source , int start , int end , Spanned dest , int destinationStart , int destinationEnd ) { if ( internalEditInProgress ) { return null ; } // Token limit check if ( tokenLimit != - 1 && getObjects ( ) . size ( ) == tokenLimit ) { return "" ; } // Detect split characters , remove them and complete the current token instead if ( tokenizer . containsTokenTerminator ( source ) ) { // Only perform completion if we don ' t allow free form text , or if there ' s enough // content to believe this should be a token if ( preventFreeFormText || currentCompletionText ( ) . length ( ) > 0 ) { performCompletion ( ) ; return "" ; } } // We need to not do anything when we would delete the prefix if ( destinationStart < prefix . length ( ) ) { // when setText is called , which should only be called during restoring , // destinationStart and destinationEnd are 0 . If not checked , it will clear out // the prefix . // This is why we need to return null in this if condition to preserve state . if ( destinationStart == 0 && destinationEnd == 0 ) { return null ; } else if ( destinationEnd <= prefix . length ( ) ) { // Don ' t do anything return prefix . subSequence ( destinationStart , destinationEnd ) ; } else { // Delete everything up to the prefix return prefix . subSequence ( destinationStart , prefix . length ( ) ) ; } } return null ; } } } ) ; initialized = true ;
public class JcrAccessControlList { /** * Lists all privileges defined by this access list for the given user . * @ param context the security context of the user ; never null * @ return list of privilege objects . */ public Privilege [ ] getPrivileges ( SecurityContext context ) { } }
ArrayList < Privilege > privs = new ArrayList < Privilege > ( ) ; for ( AccessControlEntryImpl ace : principals . values ( ) ) { // add privileges granted for everyone if ( ace . getPrincipal ( ) . equals ( SimplePrincipal . EVERYONE ) ) { privs . addAll ( Arrays . asList ( ace . getPrivileges ( ) ) ) ; } // add privileges granted for given user if ( ace . getPrincipal ( ) . getName ( ) . equals ( username ( context . getUserName ( ) ) ) ) { privs . addAll ( Arrays . asList ( ace . getPrivileges ( ) ) ) ; } // add privileges granted for given role if ( context . hasRole ( ace . getPrincipal ( ) . getName ( ) ) ) { privs . addAll ( Arrays . asList ( ace . getPrivileges ( ) ) ) ; } } Privilege [ ] res = new Privilege [ privs . size ( ) ] ; privs . toArray ( res ) ; return res ;
public class Frame { /** * Pop a value off of the Java operand stack . * @ return the value that was popped * @ throws DataflowAnalysisException * if the Java operand stack is empty */ public ValueType popValue ( ) throws DataflowAnalysisException { } }
if ( ! isValid ( ) ) { throw new DataflowAnalysisException ( "accessing top or bottom frame" ) ; } if ( slotList . size ( ) == numLocals ) { throw new DataflowAnalysisException ( "operand stack empty" ) ; } return slotList . remove ( slotList . size ( ) - 1 ) ;
public class DatabaseProviderVertx { /** * Use an externally configured DataSource , Flavor , and optionally a shutdown hook . * The shutdown hook may be null if you don ' t want calls to Builder . close ( ) to attempt * any shutdown . The DataSource and Flavor are mandatory . */ @ CheckReturnValue public static Builder fromPool ( Vertx vertx , Pool pool ) { } }
WorkerExecutor executor = vertx . createSharedWorkerExecutor ( "DbWorker-" + poolNameCounter . getAndAdd ( 1 ) , pool . size ) ; return new BuilderImpl ( executor , ( ) -> { try { executor . close ( ) ; } catch ( Exception e ) { log . warn ( "Problem closing database worker executor" , e ) ; } if ( pool . poolShutdown != null ) { pool . poolShutdown . close ( ) ; } } , ( ) -> { try { return pool . dataSource . getConnection ( ) ; } catch ( Exception e ) { throw new DatabaseException ( "Unable to obtain a connection from DriverManager" , e ) ; } } , new OptionsDefault ( pool . flavor ) ) ;
public class HiveJdbcConnector { /** * Helper method that executes a series of " set ? = ? " queries for the Hive connection in { @ link HiveJdbcConnector # conn } . * @ param props specifies which set methods to run . For example , if the config contains " hive . mapred . min . split . size = 100" * then " set mapred . min . split . size = 100 " will be executed . * @ throws SQLException is thrown if there is a problem executing the " set " queries */ private void setHiveProperties ( Properties props ) throws SQLException { } }
Preconditions . checkNotNull ( this . conn , "The Hive connection must be set before any queries can be run" ) ; try ( PreparedStatement preparedStatement = this . conn . prepareStatement ( "set ?=?" ) ) { Enumeration < ? > enumeration = props . propertyNames ( ) ; while ( enumeration . hasMoreElements ( ) ) { String propertyName = ( String ) enumeration . nextElement ( ) ; if ( propertyName . startsWith ( HIVE_CONFIG_KEY_PREFIX ) ) { preparedStatement . setString ( 1 , propertyName ) ; preparedStatement . setString ( 2 , props . getProperty ( propertyName ) ) ; preparedStatement . execute ( ) ; } } }
public class sent_mails { /** * Use this API to fetch filtered set of sent _ mails resources . * filter string should be in JSON format . eg : " vm _ state : DOWN , name : [ a - z ] + " */ public static sent_mails [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
sent_mails obj = new sent_mails ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; sent_mails [ ] response = ( sent_mails [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class HeaderDefinition { /** * Checks this header definition consistency , in other words if all the mandatory properties of the definition have * been set . * @ throws IllegalStateException If a mandatory property has not been set . */ public void validate ( ) { } }
check ( "firstLine" , this . firstLine ) ; check ( "beforeEachLine" , this . beforeEachLine ) ; check ( "endLine" , this . endLine ) ; check ( "firstLineDetectionPattern" , this . firstLineDetectionPattern ) ; check ( "lastLineDetectionPattern" , this . lastLineDetectionPattern ) ; check ( "isMultiline" , this . isMultiline ) ; check ( "allowBlankLines" , this . allowBlankLines ) ; // skip line can be null
public class RSA { /** * 从文件加载公钥 * @ param file 公钥文件 */ public static PublicKey loadPublicKey ( File file ) { } }
FileInputStream inputStream = null ; try { inputStream = new FileInputStream ( file ) ; return loadPublicKey ( inputStream ) ; } catch ( FileNotFoundException e ) { throw new RuntimeException ( "文件不存在" ) ; } finally { try { inputStream . close ( ) ; } catch ( Exception e2 ) { } }
public class QrCodeEncoder { /** * Creates a QR - Code which encodes data in the alphanumeric format * @ param alphaNumeric String containing only alphanumeric values . * @ return The QR - Code */ public QrCodeEncoder addAlphanumeric ( String alphaNumeric ) { } }
byte values [ ] = alphanumericToValues ( alphaNumeric ) ; MessageSegment segment = new MessageSegment ( ) ; segment . message = alphaNumeric ; segment . data = values ; segment . length = values . length ; segment . mode = QrCode . Mode . ALPHANUMERIC ; segment . encodedSizeBits += 4 ; segment . encodedSizeBits += 11 * ( segment . length / 2 ) ; if ( segment . length % 2 == 1 ) { segment . encodedSizeBits += 6 ; } segments . add ( segment ) ; return this ;
public class MultiLayerNetwork { /** * Perform layerwise unsupervised training on a single pre - trainable layer in the network ( VAEs , Autoencoders , etc ) * for the specified number of epochs < br > * If the specified layer index ( 0 to numLayers - 1 ) is not a pretrainable layer , this is a no - op . * @ param layerIdx Index of the layer to train ( 0 to numLayers - 1) * @ param iter Training data * @ param numEpochs Number of epochs to fit the specified layer for */ public void pretrainLayer ( int layerIdx , DataSetIterator iter , int numEpochs ) { } }
Preconditions . checkState ( numEpochs > 0 , "Number of epochs (%s) must be a positive number" , numEpochs ) ; if ( flattenedGradients == null ) { initGradientsView ( ) ; } if ( layerIdx >= layers . length ) { throw new IllegalArgumentException ( "Cannot pretrain layer: layerIdx (" + layerIdx + ") >= numLayers (" + layers . length + ")" ) ; } Layer layer = layers [ layerIdx ] ; if ( ! layer . isPretrainLayer ( ) ) return ; if ( numEpochs > 1 && ! iter . resetSupported ( ) ) throw new IllegalStateException ( "Cannot fit multiple epochs (" + numEpochs + ") on an iterator that doesn't support resetting" ) ; if ( ! iter . hasNext ( ) && iter . resetSupported ( ) ) { iter . reset ( ) ; } log . info ( "Starting unsupervised training on layer " + layerIdx + " for " + numEpochs + " epochs" ) ; for ( int i = 0 ; i < numEpochs ; i ++ ) { if ( i > 0 ) iter . reset ( ) ; while ( iter . hasNext ( ) ) { DataSet next = iter . next ( ) ; input = next . getFeatures ( ) ; pretrainLayer ( layerIdx , input ) ; } } int ec = getLayer ( layerIdx ) . conf ( ) . getEpochCount ( ) + 1 ; getLayer ( layerIdx ) . conf ( ) . setEpochCount ( ec ) ;
public class Doc { /** * Gets the text value of the first tag in doc that matches tagName */ public String getTagValue ( String tagName ) { } }
Tag [ ] tags = getTagMap ( ) . get ( tagName ) ; if ( tags == null || tags . length == 0 ) { return null ; } return tags [ tags . length - 1 ] . getText ( ) ;
public class SingletonStoreConfigurationBuilder { /** * If pushStateWhenCoordinator is true , this property sets the maximum number of milliseconds * that the process of pushing the in - memory state to the underlying cache loader should take . */ public SingletonStoreConfigurationBuilder < S > pushStateTimeout ( long l , TimeUnit unit ) { } }
return pushStateTimeout ( unit . toMillis ( l ) ) ;
public class RdbAdapter { /** * 初始化方法 * @ param configuration 外部适配器配置信息 */ @ Override public void init ( OuterAdapterConfig configuration , Properties envProperties ) { } }
this . envProperties = envProperties ; Map < String , MappingConfig > rdbMappingTmp = ConfigLoader . load ( envProperties ) ; // 过滤不匹配的key的配置 rdbMappingTmp . forEach ( ( key , mappingConfig ) -> { if ( ( mappingConfig . getOuterAdapterKey ( ) == null && configuration . getKey ( ) == null ) || ( mappingConfig . getOuterAdapterKey ( ) != null && mappingConfig . getOuterAdapterKey ( ) . equalsIgnoreCase ( configuration . getKey ( ) ) ) ) { rdbMapping . put ( key , mappingConfig ) ; } } ) ; if ( rdbMapping . isEmpty ( ) ) { throw new RuntimeException ( "No rdb adapter found for config key: " + configuration . getKey ( ) ) ; } for ( Map . Entry < String , MappingConfig > entry : rdbMapping . entrySet ( ) ) { String configName = entry . getKey ( ) ; MappingConfig mappingConfig = entry . getValue ( ) ; if ( ! mappingConfig . getDbMapping ( ) . getMirrorDb ( ) ) { String key ; if ( envProperties != null && ! "tcp" . equalsIgnoreCase ( envProperties . getProperty ( "canal.conf.mode" ) ) ) { key = StringUtils . trimToEmpty ( mappingConfig . getDestination ( ) ) + "-" + StringUtils . trimToEmpty ( mappingConfig . getGroupId ( ) ) + "_" + mappingConfig . getDbMapping ( ) . getDatabase ( ) + "-" + mappingConfig . getDbMapping ( ) . getTable ( ) ; } else { key = StringUtils . trimToEmpty ( mappingConfig . getDestination ( ) ) + "_" + mappingConfig . getDbMapping ( ) . getDatabase ( ) + "-" + mappingConfig . getDbMapping ( ) . getTable ( ) ; } Map < String , MappingConfig > configMap = mappingConfigCache . computeIfAbsent ( key , k1 -> new ConcurrentHashMap < > ( ) ) ; configMap . put ( configName , mappingConfig ) ; } else { // mirrorDB String key = StringUtils . trimToEmpty ( mappingConfig . getDestination ( ) ) + "." + mappingConfig . getDbMapping ( ) . getDatabase ( ) ; mirrorDbConfigCache . put ( key , MirrorDbConfig . create ( configName , mappingConfig ) ) ; } } // 初始化连接池 Map < String , String > properties = configuration . getProperties ( ) ; dataSource = new DruidDataSource ( ) ; dataSource . setDriverClassName ( properties . get ( "jdbc.driverClassName" ) ) ; dataSource . setUrl ( properties . get ( "jdbc.url" ) ) ; dataSource . setUsername ( properties . get ( "jdbc.username" ) ) ; dataSource . setPassword ( properties . get ( "jdbc.password" ) ) ; dataSource . setInitialSize ( 1 ) ; dataSource . setMinIdle ( 1 ) ; dataSource . setMaxActive ( 30 ) ; dataSource . setMaxWait ( 60000 ) ; dataSource . setTimeBetweenEvictionRunsMillis ( 60000 ) ; dataSource . setMinEvictableIdleTimeMillis ( 300000 ) ; dataSource . setUseUnfairLock ( true ) ; // List < String > array = new ArrayList < > ( ) ; // array . add ( " set names utf8mb4 ; " ) ; // dataSource . setConnectionInitSqls ( array ) ; try { dataSource . init ( ) ; } catch ( SQLException e ) { logger . error ( "ERROR ## failed to initial datasource: " + properties . get ( "jdbc.url" ) , e ) ; } String threads = properties . get ( "threads" ) ; // String commitSize = properties . get ( " commitSize " ) ; boolean skipDupException = BooleanUtils . toBoolean ( configuration . getProperties ( ) . getOrDefault ( "skipDupException" , "true" ) ) ; rdbSyncService = new RdbSyncService ( dataSource , threads != null ? Integer . valueOf ( threads ) : null , skipDupException ) ; rdbMirrorDbSyncService = new RdbMirrorDbSyncService ( mirrorDbConfigCache , dataSource , threads != null ? Integer . valueOf ( threads ) : null , rdbSyncService . getColumnsTypeCache ( ) , skipDupException ) ; rdbConfigMonitor = new RdbConfigMonitor ( ) ; rdbConfigMonitor . init ( configuration . getKey ( ) , this , envProperties ) ;
public class DeviceProxyDAODefaultImpl { private void poll_object ( final DeviceProxy deviceProxy , final String objectName , final String objectType , final int period ) throws DevFailed { } }
final DevVarLongStringArray lsa = new DevVarLongStringArray ( ) ; lsa . lvalue = new int [ 1 ] ; lsa . svalue = new String [ 3 ] ; lsa . svalue [ 0 ] = deviceProxy . devname ; lsa . svalue [ 1 ] = objectType ; lsa . svalue [ 2 ] = objectName . toLowerCase ( ) ; lsa . lvalue [ 0 ] = period ; // Send command on administration device . if ( deviceProxy . getAdm_dev ( ) == null ) { import_admin_device ( deviceProxy , "poll_object" ) ; } if ( DeviceProxy . isCheck_idl ( ) && deviceProxy . getAdm_dev ( ) . get_idl_version ( ) < 2 ) { Except . throw_non_supported_exception ( "TangoApi_IDL_NOT_SUPPORTED" , "Not supported by the IDL version used by device" , deviceProxy . getFull_class_name ( ) + ".poll_object()" ) ; } final DeviceData argin = new DeviceData ( ) ; argin . insert ( lsa ) ; // Try to add polling period . try { deviceProxy . getAdm_dev ( ) . command_inout ( "AddObjPolling" , argin ) ; } catch ( final DevFailed e ) { // check : if already polled , just update period polling for ( final DevError error : e . errors ) { if ( error . reason . equals ( "API_AlreadyPolled" ) ) { deviceProxy . getAdm_dev ( ) . command_inout ( "UpdObjPollingPeriod" , argin ) ; return ; } } // Not this exception then re - throw it Except . throw_communication_failed ( e , "TangoApi_CANNOT_POLL_OBJECT" , "Cannot poll object " + objectName , deviceProxy . getFull_class_name ( ) + ".poll_object()" ) ; }
public class TransfVec { /** * Compose given origVector with given transformation . Always returns a new vector . * Original vector is kept if keepOrig is true . * @ param origVec * @ param transfMap * @ param keepOrig * @ return a new instance of { @ link TransfVec } composing transformation of origVector and tranfsMap */ public static Vec compose ( TransfVec origVec , int [ ] [ ] transfMap , String [ ] domain , boolean keepOrig ) { } }
// Do a mapping from INT - > ENUM - > this vector ENUM int [ ] [ ] domMap = Utils . compose ( new int [ ] [ ] { origVec . _values , origVec . _indexes } , transfMap ) ; Vec result = origVec . masterVec ( ) . makeTransf ( domMap [ 0 ] , domMap [ 1 ] , domain ) ; ; if ( ! keepOrig ) DKV . remove ( origVec . _key ) ; return result ;
public class JOptionPanes { /** * Create a new input dialog that performs validation . * @ param parent The optional parent * @ param title The title * @ param mainComponent The main component that is shown in the * dialog . This must be a component that contains the text component . * @ param textComponent The text component * @ param validInputPredicate The predicate that says whether the * input is valid * @ return < code > JOptionPane . OK _ OPTION < / code > if OK was pressed . Any * other value otherwise . */ public static int showValidatedTextInputDialog ( Window parent , String title , JComponent mainComponent , JTextComponent textComponent , Predicate < String > validInputPredicate ) { } }
JButton okButton = new JButton ( "Ok" ) ; String text = textComponent . getText ( ) ; boolean valid = validInputPredicate . test ( text ) ; okButton . setEnabled ( valid ) ; JButton cancelButton = new JButton ( "Cancel" ) ; Object [ ] options = new Object [ ] { okButton , cancelButton } ; JOptionPane optionPane = new JOptionPane ( mainComponent , JOptionPane . PLAIN_MESSAGE , JOptionPane . OK_CANCEL_OPTION , null , options , okButton ) ; okButton . addActionListener ( e -> optionPane . setValue ( okButton ) ) ; cancelButton . addActionListener ( e -> optionPane . setValue ( cancelButton ) ) ; AncestorListener focussingAncestorListener = new AncestorListener ( ) { @ Override public void ancestorAdded ( AncestorEvent event ) { textComponent . requestFocus ( ) ; } @ Override public void ancestorRemoved ( AncestorEvent event ) { // Nothing to do here } @ Override public void ancestorMoved ( AncestorEvent event ) { // Nothing to do here } } ; textComponent . addAncestorListener ( focussingAncestorListener ) ; DocumentListener documentListener = new DocumentListener ( ) { @ Override public void insertUpdate ( DocumentEvent e ) { updateButtonState ( ) ; } @ Override public void removeUpdate ( DocumentEvent e ) { updateButtonState ( ) ; } @ Override public void changedUpdate ( DocumentEvent e ) { updateButtonState ( ) ; } protected void updateButtonState ( ) { String text = textComponent . getText ( ) ; boolean valid = validInputPredicate . test ( text ) ; okButton . setEnabled ( valid ) ; } } ; Document document = textComponent . getDocument ( ) ; document . addDocumentListener ( documentListener ) ; JDialog dialog = optionPane . createDialog ( parent , title ) ; dialog . pack ( ) ; dialog . setResizable ( true ) ; dialog . setVisible ( true ) ; document . removeDocumentListener ( documentListener ) ; textComponent . removeAncestorListener ( focussingAncestorListener ) ; Object selectedValue = optionPane . getValue ( ) ; if ( selectedValue == null ) { return JOptionPane . CLOSED_OPTION ; } return Arrays . asList ( options ) . indexOf ( selectedValue ) ;
public class IOUtils { /** * A JavaNLP specific convenience routine for obtaining the current * scratch directory for the machine you ' re currently running on . */ public static File getJNLPLocalScratch ( ) { } }
try { String machineName = InetAddress . getLocalHost ( ) . getHostName ( ) . split ( "\\." ) [ 0 ] ; String username = System . getProperty ( "user.name" ) ; return new File ( "/" + machineName + "/scr1/" + username ) ; } catch ( Exception e ) { return new File ( "./scr/" ) ; // default scratch }
public class BaseConnectionSource { /** * Save this connection as our special connection to be returned by the { @ link # getSavedConnection ( ) } method . * @ return True if the connection was saved or false if it was already saved . */ protected boolean saveSpecial ( DatabaseConnection connection ) throws SQLException { } }
// check for a connection already saved NestedConnection currentSaved = specialConnection . get ( ) ; if ( currentSaved == null ) { specialConnection . set ( new NestedConnection ( connection ) ) ; return true ; } else { if ( currentSaved . connection != connection ) { throw new SQLException ( "trying to save connection " + connection + " but already have saved connection " + currentSaved . connection ) ; } // we must have a save call within another save currentSaved . increment ( ) ; return false ; }
public class TableColumnCache { /** * < p > Return an Iterator over the < code > UIColumn < / code > children of the * specified < code > UIData < / code > that have a < code > rendered < / code > property * of < code > true < / code > . < / p > * @ param table the table from which to extract children * @ return the List of all UIColumn children */ private static List < HtmlColumn > buildCache ( UIComponent table ) { } }
if ( table instanceof UIData ) { final int childCount = table . getChildCount ( ) ; if ( childCount > 0 ) { final List < HtmlColumn > results = new ArrayList < > ( childCount ) ; for ( UIComponent kid : table . getChildren ( ) ) { if ( ( kid instanceof UIColumn ) && kid . isRendered ( ) ) { results . add ( ( HtmlColumn ) kid ) ; } } return results ; } else { return Collections . emptyList ( ) ; } } else { int count ; final Object value = table . getAttributes ( ) . get ( "cachedColumns" ) ; if ( ( value != null ) && ( value instanceof Integer ) ) { count = ( ( Integer ) value ) ; } else { count = 2 ; } if ( count < 1 ) { count = 1 ; } final List < HtmlColumn > result = new ArrayList < > ( count ) ; for ( int i = 0 ; i < count ; i ++ ) { result . add ( new HtmlColumn ( ) ) ; } return result ; }
public class JavaParser { /** * src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 503:1 : modifier : ( annotation | ' public ' | ' protected ' | ' private ' | ' static ' | ' abstract ' | ' final ' | ' native ' | ' synchronized ' | ' transient ' | ' volatile ' | ' strictfp ' ) ; */ public final void modifier ( ) throws RecognitionException { } }
int modifier_StartIndex = input . index ( ) ; try { if ( state . backtracking > 0 && alreadyParsedRule ( input , 46 ) ) { return ; } // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 504:5 : ( annotation | ' public ' | ' protected ' | ' private ' | ' static ' | ' abstract ' | ' final ' | ' native ' | ' synchronized ' | ' transient ' | ' volatile ' | ' strictfp ' ) int alt64 = 12 ; switch ( input . LA ( 1 ) ) { case 58 : { alt64 = 1 ; } break ; case 102 : { alt64 = 2 ; } break ; case 101 : { alt64 = 3 ; } break ; case 100 : { alt64 = 4 ; } break ; case 106 : { alt64 = 5 ; } break ; case 63 : { alt64 = 6 ; } break ; case 83 : { alt64 = 7 ; } break ; case 96 : { alt64 = 8 ; } break ; case 110 : { alt64 = 9 ; } break ; case 114 : { alt64 = 10 ; } break ; case 119 : { alt64 = 11 ; } break ; case 107 : { alt64 = 12 ; } break ; default : if ( state . backtracking > 0 ) { state . failed = true ; return ; } NoViableAltException nvae = new NoViableAltException ( "" , 64 , 0 , input ) ; throw nvae ; } switch ( alt64 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 504:9 : annotation { pushFollow ( FOLLOW_annotation_in_modifier1573 ) ; annotation ( ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; case 2 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 505:9 : ' public ' { match ( input , 102 , FOLLOW_102_in_modifier1583 ) ; if ( state . failed ) return ; } break ; case 3 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 506:9 : ' protected ' { match ( input , 101 , FOLLOW_101_in_modifier1593 ) ; if ( state . failed ) return ; } break ; case 4 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 507:9 : ' private ' { match ( input , 100 , FOLLOW_100_in_modifier1603 ) ; if ( state . failed ) return ; } break ; case 5 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 508:9 : ' static ' { match ( input , 106 , FOLLOW_106_in_modifier1613 ) ; if ( state . failed ) return ; } break ; case 6 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 509:9 : ' abstract ' { match ( input , 63 , FOLLOW_63_in_modifier1623 ) ; if ( state . failed ) return ; } break ; case 7 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 510:9 : ' final ' { match ( input , 83 , FOLLOW_83_in_modifier1633 ) ; if ( state . failed ) return ; } break ; case 8 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 511:9 : ' native ' { match ( input , 96 , FOLLOW_96_in_modifier1643 ) ; if ( state . failed ) return ; } break ; case 9 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 512:9 : ' synchronized ' { match ( input , 110 , FOLLOW_110_in_modifier1653 ) ; if ( state . failed ) return ; } break ; case 10 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 513:9 : ' transient ' { match ( input , 114 , FOLLOW_114_in_modifier1663 ) ; if ( state . failed ) return ; } break ; case 11 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 514:9 : ' volatile ' { match ( input , 119 , FOLLOW_119_in_modifier1673 ) ; if ( state . failed ) return ; } break ; case 12 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 515:9 : ' strictfp ' { match ( input , 107 , FOLLOW_107_in_modifier1683 ) ; if ( state . failed ) return ; } break ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving if ( state . backtracking > 0 ) { memoize ( input , 46 , modifier_StartIndex ) ; } }
public class CronTabList { /** * Checks if this crontab entry looks reasonable , * and if not , return an warning message . * The point of this method is to catch syntactically correct * but semantically suspicious combinations , like */ public String checkSanity ( ) { } }
for ( CronTab tab : tabs ) { String s = tab . checkSanity ( ) ; if ( s != null ) return s ; } return null ;
public class BufferedWriter { /** * Writes a portion of an array of characters . * < p > Ordinarily this method stores characters from the given array into * this stream ' s buffer , flushing the buffer to the underlying stream as * needed . If the requested length is at least as large as the buffer , * however , then this method will flush the buffer and write the characters * directly to the underlying stream . Thus redundant * < code > BufferedWriter < / code > s will not copy data unnecessarily . * @ param cbuf A character array * @ param off Offset from which to start reading characters * @ param len Number of characters to write * @ exception IOException If an I / O error occurs */ public void write ( char cbuf [ ] , int off , int len ) throws IOException { } }
synchronized ( lock ) { ensureOpen ( ) ; if ( ( off < 0 ) || ( off > cbuf . length ) || ( len < 0 ) || ( ( off + len ) > cbuf . length ) || ( ( off + len ) < 0 ) ) { throw new IndexOutOfBoundsException ( ) ; } else if ( len == 0 ) { return ; } if ( len >= nChars ) { /* If the request length exceeds the size of the output buffer , flush the buffer and then write the data directly . In this way buffered streams will cascade harmlessly . */ flushBuffer ( ) ; out . write ( cbuf , off , len ) ; return ; } int b = off , t = off + len ; while ( b < t ) { int d = min ( nChars - nextChar , t - b ) ; System . arraycopy ( cbuf , b , cb , nextChar , d ) ; b += d ; nextChar += d ; if ( nextChar >= nChars ) flushBuffer ( ) ; } }
public class DeleteClusterRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteClusterRequest deleteClusterRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteClusterRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteClusterRequest . getClusterId ( ) , CLUSTERID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JTMConfigurationProvider { /** * Called by DS to inject location service ref */ protected synchronized void setLocationService ( WsLocationAdmin locSvc ) { } }
this . locationService = locSvc ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "setLocationService, locSvc " + locSvc ) ;
public class DomConfigurationFactory { /** * Fill codes of scopes . * @ param scopeNodeList node list with scopes definitions * @ param scopes scopes * @ param codes codes * @ throws TextProcessorFactoryException any problem */ private void fillScopeCodes ( NodeList scopeNodeList , Map < String , Scope > scopes , Map < String , Code > codes ) { } }
for ( int i = 0 ; i < scopeNodeList . getLength ( ) ; i ++ ) { Element scopeElement = ( Element ) scopeNodeList . item ( i ) ; Scope scope = scopes . get ( scopeElement . getAttribute ( TAG_SCOPE_ATTR_NAME ) ) ; // Add codes to scope Set < Code > scopeCodes = new HashSet < Code > ( ) ; // bind exists codes NodeList coderefs = scopeElement . getElementsByTagNameNS ( SCHEMA_LOCATION , TAG_CODEREF ) ; for ( int j = 0 ; j < coderefs . getLength ( ) ; j ++ ) { Element ref = ( Element ) coderefs . item ( j ) ; String codeName = ref . getAttribute ( TAG_CODEREF_ATTR_NAME ) ; Code code = codes . get ( codeName ) ; if ( code == null ) { throw new TextProcessorFactoryException ( "Can't find code \"" + codeName + "\"." ) ; } scopeCodes . add ( code ) ; } // Add inline codes NodeList inlineCodes = scopeElement . getElementsByTagNameNS ( SCHEMA_LOCATION , TAG_CODE ) ; for ( int j = 0 ; j < inlineCodes . getLength ( ) ; j ++ ) { // Inline element code Element ice = ( Element ) inlineCodes . item ( j ) ; scopeCodes . add ( parseCode ( ice , scopes ) ) ; } // Set codes to scope scope . setCodes ( scopeCodes ) ; }
public class InfoPropertiesInfoContributor { /** * Replace the { @ code value } for the specified key if the value is not { @ code null } . * @ param content the content to expose * @ param key the property to replace * @ param value the new value */ protected void replaceValue ( Map < String , Object > content , String key , Object value ) { } }
if ( content . containsKey ( key ) && value != null ) { content . put ( key , value ) ; }
public class ChartLongValue { /** * { @ inheritDoc } */ public int compareTo ( Object o ) { } }
if ( o instanceof ChartLongValue ) { return ( ( ChartLongValue ) o ) . value . compareTo ( value ) ; } else if ( o instanceof String ) { return ( ( String ) o ) . compareTo ( value ) ; } return 1 ;
public class AvailablePhoneNumberCountryReader { /** * Retrieve the target page from the Twilio API . * @ param targetUrl API - generated URL for the requested results page * @ param client TwilioRestClient with which to make the request * @ return AvailablePhoneNumberCountry ResourceSet */ @ Override @ SuppressWarnings ( "checkstyle:linelength" ) public Page < AvailablePhoneNumberCountry > getPage ( final String targetUrl , final TwilioRestClient client ) { } }
this . pathAccountSid = this . pathAccountSid == null ? client . getAccountSid ( ) : this . pathAccountSid ; Request request = new Request ( HttpMethod . GET , targetUrl ) ; return pageForRequest ( client , request ) ;
public class DOInfoReader { /** * 获得所有有 @ RelatedColumn注解的列 , 包括继承的父类中的 , 顺序父类先 * @ param clazz * @ return 不会返回null */ public static List < Field > getRelatedColumns ( Class < ? > clazz ) { } }
if ( clazz == null ) { return new ArrayList < Field > ( ) ; } List < Class < ? > > classLink = new ArrayList < Class < ? > > ( ) ; Class < ? > curClass = clazz ; while ( curClass != null ) { classLink . add ( curClass ) ; curClass = curClass . getSuperclass ( ) ; } // 父类优先 List < Field > result = new ArrayList < Field > ( ) ; for ( int i = classLink . size ( ) - 1 ; i >= 0 ; i -- ) { Field [ ] fields = classLink . get ( i ) . getDeclaredFields ( ) ; for ( Field field : fields ) { if ( field . getAnnotation ( RelatedColumn . class ) != null ) { result . add ( field ) ; } } } return result ;
public class MapModel { /** * Return the selected feature if there is 1 selected feature . * @ return the selected feature or null if none or multiple features are selected */ public String getSelectedFeature ( ) { } }
if ( getNrSelectedFeatures ( ) == 1 ) { for ( VectorLayer layer : getVectorLayers ( ) ) { if ( layer . getSelectedFeatures ( ) . size ( ) > 0 ) { return layer . getSelectedFeatures ( ) . iterator ( ) . next ( ) ; } } } return null ;
public class CursorList { /** * Helper for keeping { @ link # mPosition } in bounds . * @ return false if mPosition was modified , true otherwise */ private boolean clampPosition ( ) { } }
if ( mPosition < 0 ) { mPosition = - 1 ; return false ; } else if ( mPosition > mList . size ( ) ) { // TODO should this be > = instead of > mPosition = mList . size ( ) ; return false ; } return true ;
public class BeanUtils { /** * 构建指定字段的说明 * @ param field 字段 * @ param clazz 该字段所属的class * @ return 该字段的说明 , 构建异常时返回null */ public static CustomPropertyDescriptor buildDescriptor ( Field field , Class < ? > clazz ) { } }
FieldCache fieldCache = new FieldCache ( field , clazz ) ; // 首先检查缓存 if ( FIELD_DESC_CACHE . containsKey ( fieldCache ) ) { return FIELD_DESC_CACHE . get ( fieldCache ) ; } String name = field . getName ( ) ; CustomPropertyDescriptor customPropertyDescriptor = null ; try { if ( isFinal ( field ) ) { log . debug ( "字段{}是final类型,尝试为该字段创建说明" , name ) ; customPropertyDescriptor = tryBuildFinal ( field , clazz ) ; } else { log . debug ( "字段不是final类型,开始构建字段{}的说明" , name ) ; customPropertyDescriptor = convert ( field , new PropertyDescriptor ( name , clazz ) , clazz ) ; } } catch ( IntrospectionException e ) { // 挣扎一下 , 尝试自己构建 ( 针对继承方法有效 ) log . info ( "尝试自定义构建PropertyDescriptor" ) ; Method readMethod ; Method writeMethod = null ; String methodName = StringUtils . toFirstUpperCase ( name ) ; readMethod = getMethod ( "get" + methodName , clazz ) ; if ( readMethod == null ) { readMethod = getMethod ( "is" + methodName , clazz ) ; } if ( readMethod != null ) { writeMethod = getMethod ( "set" + methodName , clazz , readMethod . getReturnType ( ) ) ; } if ( writeMethod == null ) { log . warn ( "说明构建失败,忽略{}字段" , field . getName ( ) , e ) ; } else { log . info ( "自定义构建PropertyDescriptor成功" ) ; try { customPropertyDescriptor = convert ( field , new PropertyDescriptor ( name , readMethod , writeMethod ) , clazz ) ; } catch ( IntrospectionException e1 ) { log . info ( "构建失败,忽略字段[{}]" , field . getName ( ) , e1 ) ; } } } FIELD_DESC_CACHE . put ( fieldCache , customPropertyDescriptor ) ; return customPropertyDescriptor ;
public class BoxFactory { /** * Checks the child boxes of the specified root box wheter they require creating an anonymous * parent box . * @ param root the box whose child boxes are checked * @ param type the required display type of the child boxes . The remaining child boxes are skipped . * @ param reqtype1 the first required display type of the root . If the root type doesn ' t correspond * to any of the required types , an anonymous parent is created for the selected children . * @ param reqtype2 the second required display type of the root . * @ param reqtype3 the third required display type of the root . * @ param name the element name of the created anonymous box * @ param display the display type of the created anonymous box */ private void createAnonymousBoxes ( ElementBox root , CSSProperty . Display type , CSSProperty . Display reqtype1 , CSSProperty . Display reqtype2 , CSSProperty . Display reqtype3 , String name , String display ) { } }
if ( root . getDisplay ( ) != reqtype1 && root . getDisplay ( ) != reqtype2 && root . getDisplay ( ) != reqtype3 ) { Vector < Box > nest = new Vector < Box > ( ) ; ElementBox adiv = null ; for ( int i = 0 ; i < root . getSubBoxNumber ( ) ; i ++ ) { Box sub = root . getSubBox ( i ) ; if ( sub instanceof BlockBox && ( ( BlockBox ) sub ) . isPositioned ( ) ) { // positioned boxes are left untouched nest . add ( sub ) ; } else if ( sub instanceof ElementBox ) { ElementBox subel = ( ElementBox ) sub ; if ( subel . getDisplay ( ) != type ) { adiv = null ; nest . add ( sub ) ; } else { if ( adiv == null ) { Element elem = createAnonymousElement ( root . getElement ( ) . getOwnerDocument ( ) , name , display ) ; adiv = createBox ( root , elem , display ) ; adiv . isblock = true ; adiv . isempty = true ; adiv . setContainingBlockBox ( sub . getContainingBlockBox ( ) ) ; adiv . setClipBlock ( sub . getClipBlock ( ) ) ; nest . add ( adiv ) ; } if ( sub . isDisplayed ( ) && ! sub . isEmpty ( ) ) { adiv . isempty = false ; adiv . displayed = true ; } sub . setParent ( adiv ) ; sub . setContainingBlockBox ( adiv ) ; adiv . addSubBox ( sub ) ; } } else return ; // first box is TextBox = > all the boxes are TextBox , nothing to do . } root . nested = nest ; root . endChild = nest . size ( ) ; }
public class CPOptionWrapper { /** * Returns the localized description of this cp option in the language , optionally using the default language if no localization exists for the requested language . * @ param languageId the ID of the language * @ param useDefault whether to use the default language if no localization exists for the requested language * @ return the localized description of this cp option */ @ Override public String getDescription ( String languageId , boolean useDefault ) { } }
return _cpOption . getDescription ( languageId , useDefault ) ;
public class RecoverableUnitImpl { /** * Writes to the underlying recovery log information from the recoverable unit * sections . The amount of information written depends on the input argument * ' rewriteRequired ' . If this flag is false then only information that has not * not previously been written will be passed to the underlying recover log . * If this flag is true then all information will be passed to the underlying * recovery log . Either way , the the underlying recovery log will contain an up * to date copy of the information retained in the target * This extension of the standard writeSections method is required for * keypoint support * The information is written to the underlying recovery log , but not forced * through to persisent storage . After this call , the information is not * guaranteed to be retrieved during any post - failure recovery processing . * To ensure that this information will be recovered , a force operation * should be used instead ( eg RecoverableUnitImpl . forceSections ) * This call my be used as part of an optomization when several recoverable units * need to be pushed to disk . For example , the following sequence will ensure that * recoverable units 1 through 4 are all persisted to physical storage : - * < ul > * < li > RecoverableUnit1 . writeSections ( . . ) < / li > * < li > RecoverableUnit2 . writeSections ( . . ) < / li > * < li > RecoverableUnit3 . writeSections ( . . ) < / li > * < li > RecoverableUnit4 . forceSections ( . . ) < / li > * < / ul > * This internal version of the method is not exposed on the interfaces and can only * be called from within the RLS . Client services invoke the simpler version of the * method ( with no arguments ) which deligates down to this method . * @ param rewriteRequired Boolean flag indicating if a rewrite is required . * @ exception InternalLogException An unexpected error has occured . */ void writeSections ( boolean rewriteRequired ) throws InternalLogException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "writeSections" , new java . lang . Object [ ] { this , new Boolean ( rewriteRequired ) } ) ; // If the parent recovery log instance has experienced a serious internal error then prevent // this operation from executing . if ( _recLog . failed ( ) ) { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "writeSections" , "InternalLogException" ) ; throw new InternalLogException ( null ) ; } // If the log was not open then throw an exception if ( _logHandle == null ) { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "writeSections" , "InternalLogException" ) ; throw new InternalLogException ( null ) ; } _controlLock . getSharedLock ( LOCK_REQUEST_ID_RUI_WRITESECTIONS ) ; // If there is data stored within this recoverable unit that has not yet been // persisted to disk or there is existing data and a rewrite is being performed // then ( re ) persist the required data . if ( ( _unwrittenDataSize > 0 ) || ( rewriteRequired && ( _totalDataSize > 0 ) ) ) { try { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Writing recoverable unit '" + _identity + "'" ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Unwritten data size = " + _unwrittenDataSize + " total data size = " + _totalDataSize ) ; int requiredRecordSize = _recordHeaderSize ; if ( rewriteRequired ) { requiredRecordSize += _totalDataSize ; } else { requiredRecordSize += _unwrittenDataSize ; } // Obtain a WritableLogRecord that provides direct access to the underlying recovery log . // The WritableLogRecord will write the required log record header to the underlying // recovery log . final WriteableLogRecord logRecord = _logHandle . getWriteableLogRecord ( requiredRecordSize ) ; // In some situations , there will not be enough space in the underlying recovery to obtain a // WritableLogRecord of the required size . The recovery log will need to perform " housekeeping " // to clean up the recovery log before this latest record can be written . In such // situations , the getWritableLogRecord ( ) will trigger a keypoint operation before returning . // Given that the keypoint operation will actually cause all the information within this // recoverable unit to be ( re ) written to disk , this method need take no further action . This // condition is indicated by the return of a null log record . if ( logRecord != null ) { writeRecordHeader ( logRecord , RECORDTYPENORMAL ) ; // Obtain an iterator that can be used to access each of the recoverable unit sections in turn . Iterator recoverableUnitSectionsIterator = _recoverableUnitSections . values ( ) . iterator ( ) ; while ( recoverableUnitSectionsIterator . hasNext ( ) ) { RecoverableUnitSectionImpl section = ( RecoverableUnitSectionImpl ) ( recoverableUnitSectionsIterator . next ( ) ) ; // Now direct the recoverable unit section to write its content . If the recoverable unit // section has no data to write then this will be a no - op . section . format ( rewriteRequired , logRecord ) ; } // Finally write a negative recoverable unit section id to indicate the there are no // more sections . logRecord . putInt ( END_OF_SECTIONS ) ; // Tell the WritableLogRecord that we have finished adding recoverable unit sections . This // will cause it to add the appropriate record tail to the underlying recovery log . logRecord . close ( ) ; // Flag the fact that this recoverable unit has now been written to the underlying recovery log . _storedOnDisk = true ; _logHandle . writeLogRecord ( logRecord ) ; } } catch ( IOException exc ) { FFDCFilter . processException ( exc , "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.writeSections" , "383" , this ) ; if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "An unexpected error IO occurred whilst formatting the recovery log buffer" , exc ) ; _recLog . markFailed ( exc ) ; /* @ MD19484C */ try { _controlLock . releaseSharedLock ( LOCK_REQUEST_ID_RUI_WRITESECTIONS ) ; } catch ( Throwable exc2 ) { FFDCFilter . processException ( exc2 , "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.writeSections" , "392" , this ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "writeSections" , "InternalLogException" ) ; throw new InternalLogException ( exc2 ) ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "writeSections" , "InternalLogException" ) ; throw new InternalLogException ( exc ) ; } catch ( InternalLogException exc ) { FFDCFilter . processException ( exc , "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.writeSections" , "587" , this ) ; if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "An InternalLogException exception occured whilst formatting the recovery log buffer" , exc ) ; _recLog . markFailed ( exc ) ; /* @ MD19484C */ try { _controlLock . releaseSharedLock ( LOCK_REQUEST_ID_RUI_WRITESECTIONS ) ; } catch ( Throwable exc2 ) { FFDCFilter . processException ( exc2 , "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.writeSections" , "392" , this ) ; // The shared lock release has failed whilst procesing the initial InternalLogExcption failure . Because // this may be a LogFullException ( which extends InternalLogException ) , rather than re - generating the // exception just allow the original to return . } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "writeSections" , exc ) ; throw exc ; } catch ( Throwable exc ) { FFDCFilter . processException ( exc , "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.writeSections" , "402" , this ) ; if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "An unexpected error occurred whilst formatting the recovery log buffer" , exc ) ; _recLog . markFailed ( exc ) ; /* @ MD19484C */ try { _controlLock . releaseSharedLock ( LOCK_REQUEST_ID_RUI_WRITESECTIONS ) ; } catch ( Throwable exc2 ) { FFDCFilter . processException ( exc2 , "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.writeSections" , "411" , this ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "writeSections" , "InternalLogException" ) ; throw new InternalLogException ( exc2 ) ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "writeSections" , "InternalLogException" ) ; throw new InternalLogException ( exc ) ; } } else { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "RecoverableUnitImpl has no RecoverableUnitSections that need to be added to the disk record" ) ; } try { _controlLock . releaseSharedLock ( LOCK_REQUEST_ID_RUI_WRITESECTIONS ) ; } catch ( NoSharedLockException exc ) { FFDCFilter . processException ( exc , "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.writeSections" , "474" , this ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "writeSections" , "InternalLogException" ) ; throw new InternalLogException ( exc ) ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "writeSections" ) ;
public class ExceptionUtils { /** * Helper method that will check if argument is an { @ link IOException } , * and if so , ( re ) throw it ; otherwise just return . * @ param t the Throwable to possibly propagate * @ return the Throwable * @ throws IOException rethrow the IOException */ public static Throwable throwIfIOE ( Throwable t ) throws IOException { } }
if ( t instanceof IOException ) { throw ( IOException ) t ; } return t ;
public class InternalXbaseParser { /** * InternalXbase . g : 692:1 : ruleXLiteral : ( ( rule _ _ XLiteral _ _ Alternatives ) ) ; */ public final void ruleXLiteral ( ) throws RecognitionException { } }
int stackSize = keepStackSize ( ) ; try { // InternalXbase . g : 696:2 : ( ( ( rule _ _ XLiteral _ _ Alternatives ) ) ) // InternalXbase . g : 697:2 : ( ( rule _ _ XLiteral _ _ Alternatives ) ) { // InternalXbase . g : 697:2 : ( ( rule _ _ XLiteral _ _ Alternatives ) ) // InternalXbase . g : 698:3 : ( rule _ _ XLiteral _ _ Alternatives ) { if ( state . backtracking == 0 ) { before ( grammarAccess . getXLiteralAccess ( ) . getAlternatives ( ) ) ; } // InternalXbase . g : 699:3 : ( rule _ _ XLiteral _ _ Alternatives ) // InternalXbase . g : 699:4 : rule _ _ XLiteral _ _ Alternatives { pushFollow ( FOLLOW_2 ) ; rule__XLiteral__Alternatives ( ) ; state . _fsp -- ; if ( state . failed ) return ; } if ( state . backtracking == 0 ) { after ( grammarAccess . getXLiteralAccess ( ) . getAlternatives ( ) ) ; } } } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { restoreStackSize ( stackSize ) ; } return ;
public class Assert { /** * Asserts that an object is < code > null < / code > */ public static void assertNull ( Object object , String format , Object ... args ) { } }
if ( imp != null && object != null ) { imp . assertFailed ( String . format ( format , args ) ) ; }
public class UIComponentBase { /** * < p class = " changed _ added _ 2_0 " > For each of the attached objects on * this instance that implement { @ link PartialStateHolder } , call * { @ link PartialStateHolder # markInitialState } on the attached object . < / p > * @ since 2.0 */ @ Override public void markInitialState ( ) { } }
super . markInitialState ( ) ; if ( listeners != null ) { listeners . markInitialState ( ) ; } if ( listenersByEventClass != null ) { for ( List < SystemEventListener > listener : listenersByEventClass . values ( ) ) { if ( listener instanceof PartialStateHolder ) { ( ( PartialStateHolder ) listener ) . markInitialState ( ) ; } } } if ( behaviors != null ) { for ( Entry < String , List < ClientBehavior > > entry : behaviors . entrySet ( ) ) { for ( ClientBehavior behavior : entry . getValue ( ) ) { if ( behavior instanceof PartialStateHolder ) { ( ( PartialStateHolder ) behavior ) . markInitialState ( ) ; } } } }
public class AbstractFeatureAggregator { /** * Returns the index of the centroid which is closer to the given descriptor . * @ param descriptor * @ return */ protected int computeNearestCentroid ( double [ ] descriptor ) { } }
int centroidIndex = - 1 ; double minDistance = Double . MAX_VALUE ; for ( int i = 0 ; i < numCentroids ; i ++ ) { double distance = 0 ; for ( int j = 0 ; j < descriptorLength ; j ++ ) { distance += ( codebook [ i ] [ j ] - descriptor [ j ] ) * ( codebook [ i ] [ j ] - descriptor [ j ] ) ; // when distance becomes greater than minDistance // break the inner loop and check the next centroid ! ! ! if ( distance >= minDistance ) { break ; } } if ( distance < minDistance ) { minDistance = distance ; centroidIndex = i ; } } return centroidIndex ;
public class MethodParameter { /** * Return the generic type of the method / constructor parameter . * @ return the parameter type ( never { @ code null } ) */ public Type getGenericParameterType ( ) { } }
if ( this . genericParameterType == null ) { if ( this . parameterIndex < 0 ) { this . genericParameterType = ( this . method != null ? this . method . getGenericReturnType ( ) : null ) ; } else { this . genericParameterType = ( this . method != null ? this . method . getGenericParameterTypes ( ) [ this . parameterIndex ] : this . constructor . getGenericParameterTypes ( ) [ this . parameterIndex ] ) ; } } return this . genericParameterType ;
public class MessageProcessor { /** * setConfig is implemented from the interface JsEngineComponent and is used * to set properties on the MP * @ param config * the JsEObject config of the messaging engine * @ see com . ibm . ws . sib . admin . JsEngineComponent . setConfig */ @ Override public void setConfig ( LWMConfig meConfig ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "setConfig" , new Object [ ] { meConfig } ) ; _highMessageThreshold = ( ( JsMessagingEngine ) meConfig ) . getMEThreshold ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "setConfig" ) ;
public class MappingUtils { /** * カラム名 ( 小文字 ) をキーとしたMapにカラムマッピング情報を取得 * @ param entityType エンティティ型 * @ param kind SQL種別 * @ return カラムマッピング情報 */ public static Map < String , MappingColumn > getMappingColumnMap ( final Class < ? > entityType , final SqlKind kind ) { } }
return Arrays . stream ( MappingUtils . getMappingColumns ( entityType , kind ) ) . collect ( Collectors . toMap ( MappingColumn :: getCamelName , c -> c ) ) ;
public class Matrix3x2f { /** * Set the values within this matrix to the supplied float values . The result looks like this : * m00 , m10 , m20 < br > * m01 , m11 , m21 < br > * @ param m00 * the new value of m00 * @ param m01 * the new value of m01 * @ param m10 * the new value of m10 * @ param m11 * the new value of m11 * @ param m20 * the new value of m20 * @ param m21 * the new value of m21 * @ return this */ public Matrix3x2f set ( float m00 , float m01 , float m10 , float m11 , float m20 , float m21 ) { } }
this . m00 = m00 ; this . m01 = m01 ; this . m10 = m10 ; this . m11 = m11 ; this . m20 = m20 ; this . m21 = m21 ; return this ;
public class UpdateIntegrationResult { /** * A key - value map specifying request parameters that are passed from the method request to the backend . The key is * an integration request parameter name and the associated value is a method request parameter value or static * value that must be enclosed within single quotes and pre - encoded as required by the backend . The method request * parameter value must match the pattern of method . request . { location } . { name } , where { location } is querystring , * path , or header ; and { name } must be a valid and unique method request parameter name . * @ param requestParameters * A key - value map specifying request parameters that are passed from the method request to the backend . The * key is an integration request parameter name and the associated value is a method request parameter value * or static value that must be enclosed within single quotes and pre - encoded as required by the backend . The * method request parameter value must match the pattern of method . request . { location } . { name } , where * { location } is querystring , path , or header ; and { name } must be a valid and unique method request parameter * name . * @ return Returns a reference to this object so that method calls can be chained together . */ public UpdateIntegrationResult withRequestParameters ( java . util . Map < String , String > requestParameters ) { } }
setRequestParameters ( requestParameters ) ; return this ;
public class AbstractAggregatorImpl { /** * Sets response status and headers for an error response based on the information in the * specified exception . If development mode is enabled , then returns a 200 status with a * console . error ( ) message specifying the exception message * @ param req * the request object * @ param resp * The response object * @ param t * The exception object * @ param status * The response status */ protected void exceptionResponse ( HttpServletRequest req , HttpServletResponse resp , Throwable t , int status ) { } }
final String sourceMethod = "exceptionResponse" ; // $ NON - NLS - 1 $ resp . addHeader ( "Cache-control" , "no-store" ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ Level logLevel = ( t instanceof BadRequestException || t instanceof NotFoundException ) ? Level . WARNING : Level . SEVERE ; logException ( req , logLevel , sourceMethod , t ) ; if ( getOptions ( ) . isDevelopmentMode ( ) || getOptions ( ) . isDebugMode ( ) ) { // In development mode , display server exceptions on the browser console String msg = StringUtil . escapeForJavaScript ( MessageFormat . format ( Messages . ExceptionResponse , new Object [ ] { t . getClass ( ) . getName ( ) , t . getMessage ( ) != null ? StringUtil . escapeForJavaScript ( t . getMessage ( ) ) : "" // $ NON - NLS - 1 $ } ) ) ; String content = "console.error('" + msg + "');" ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ try { CopyUtil . copy ( new StringReader ( content ) , resp . getOutputStream ( ) ) ; } catch ( IOException e1 ) { if ( log . isLoggable ( Level . SEVERE ) ) { log . log ( Level . SEVERE , e1 . getMessage ( ) , e1 ) ; } resp . setStatus ( HttpServletResponse . SC_INTERNAL_SERVER_ERROR ) ; } } else { resp . setStatus ( status ) ; }
public class TarArchive { /** * Extract an entry from the archive . This method assumes that the tarIn stream has been properly set with a call to * getNextEntry ( ) . * @ param destDir * The destination directory into which to extract . * @ param entry * The TarEntry returned by tarIn . getNextEntry ( ) . */ private void extractEntry ( File destDir , TarEntry entry ) throws IOException { } }
if ( this . verbose ) { if ( this . progressDisplay != null ) { this . progressDisplay . showTarProgressMessage ( entry . getName ( ) ) ; } } String name = entry . getName ( ) ; name = name . replace ( '/' , File . separatorChar ) ; File destFile = new File ( destDir , name ) ; if ( entry . isDirectory ( ) ) { if ( ! destFile . exists ( ) ) { if ( ! destFile . mkdirs ( ) ) { throw new IOException ( "error making directory path '" + destFile . getPath ( ) + "'" ) ; } } } else { File subDir = new File ( destFile . getParent ( ) ) ; if ( ! subDir . exists ( ) ) { if ( ! subDir . mkdirs ( ) ) { throw new IOException ( "error making directory path '" + subDir . getPath ( ) + "'" ) ; } } if ( this . keepOldFiles && destFile . exists ( ) ) { if ( this . verbose ) { if ( this . progressDisplay != null ) { this . progressDisplay . showTarProgressMessage ( "not overwriting " + entry . getName ( ) ) ; } } } else { boolean asciiTrans = false ; FileOutputStream out = new FileOutputStream ( destFile ) ; if ( this . asciiTranslate ) { MimeType mime = null ; String contentType = null ; try { contentType = FileTypeMap . getDefaultFileTypeMap ( ) . getContentType ( destFile ) ; mime = new MimeType ( contentType ) ; if ( mime . getPrimaryType ( ) . equalsIgnoreCase ( "text" ) ) { asciiTrans = true ; } else if ( this . transTyper != null ) { if ( this . transTyper . isAsciiFile ( entry . getName ( ) ) ) { asciiTrans = true ; } } } catch ( MimeTypeParseException ex ) { } if ( this . debug ) { System . err . println ( "EXTRACT TRANS? '" + asciiTrans + "' ContentType='" + contentType + "' PrimaryType='" + mime . getPrimaryType ( ) + "'" ) ; } } PrintWriter outw = null ; if ( asciiTrans ) { outw = new PrintWriter ( out ) ; } byte [ ] rdbuf = new byte [ 32 * 1024 ] ; for ( ; ; ) { int numRead = this . tarIn . read ( rdbuf ) ; if ( numRead == - 1 ) { break ; } if ( asciiTrans ) { for ( int off = 0 , b = 0 ; b < numRead ; ++ b ) { if ( rdbuf [ b ] == 10 ) { String s = new String ( rdbuf , off , ( b - off ) ) ; outw . println ( s ) ; off = b + 1 ; } } } else { out . write ( rdbuf , 0 , numRead ) ; } } if ( asciiTrans ) { outw . close ( ) ; } else { out . close ( ) ; } } }