signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class FlightRecorderInputStream { /** * To record the bytes we ' ve skipped , convert the call to read . */ @ Override public long skip ( long n ) throws IOException { } }
byte [ ] buf = new byte [ ( int ) Math . min ( n , 64 * 1024 ) ] ; return read ( buf , 0 , buf . length ) ;
public class FilterInstanceWrapper { /** * Inovkes the wrapped filter ' s doFilter method * @ param request the servlet request object * @ param response the servlet response object * @ param chain the filter chain object */ public void doFilter ( ServletRequest request , ServletResponse response , FilterChain chain ) throws ServletException , IOException { } }
try { // invoke the wrapped filter if ( _filterState == FILTER_STATE_AVAILABLE ) { nServicing . incrementAndGet ( ) ; try { if ( request . isAsyncSupported ( ) ) { // 141092 boolean isAsyncSupported = this . _filterConfig . isAsyncSupported ( ) ; if ( ! isAsyncSupported ) { WebContainerRequestState reqState = WebContainerRequestState . getInstance ( true ) ; if ( reqState != null ) { reqState . setAttribute ( "resourceNotSupportAsync" , "filter[ " + this . _filterName + " ]" ) ; } } // 141092 ServletUtil . unwrapRequest ( request ) . setAsyncSupported ( isAsyncSupported ) ; } // LIDB - 3598 : begin // _ filterInstance . doFilter ( request , response , chain ) ; if ( _eventSource != null && _eventSource . hasFilterInvocationListeners ( ) ) { FilterInvocationEvent event = getFilterInvocationEvent ( request ) ; _eventSource . onFilterStartDoFilter ( event ) ; _filterInstance . doFilter ( request , response , chain ) ; _eventSource . onFilterFinishDoFilter ( event ) ; } else { _filterInstance . doFilter ( request , response , chain ) ; } // LIDB - 3598 : end } catch ( ServletException se ) { throw se ; } catch ( IOException ioe ) // 174668 { throw ioe ; } // 174668 catch ( Throwable th ) { throw th ; } finally { nServicing . decrementAndGet ( ) ; } } else { throw new ServletException ( MessageFormat . format ( "Filter [{0}]: filter is unavailable." , new Object [ ] { _filterName } ) ) ; } } catch ( ServletException se ) { if ( _eventSource != null && _eventSource . hasFilterErrorListeners ( ) ) { FilterErrorEvent errorEvent = getFilterErrorEvent ( se ) ; _eventSource . onFilterDoFilterError ( errorEvent ) ; } // start 140014 if ( _filterState != FILTER_STATE_DESTROYING && _filterState != FILTER_STATE_DESTROYED ) com . ibm . wsspi . webcontainer . util . FFDCWrapper . processException ( se , "com.ibm.ws.webcontainer.filter.FilterInstanceWrapper.doFilter" , "144" , this ) ; else if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) logger . logp ( Level . FINE , CLASS_NAME , "doFilter" , "Can not invoke filter because application is destroyed" , se ) ; // end 140014 throw se ; } catch ( RuntimeException re ) { throw re ; } catch ( FileNotFoundException fnfe ) { // same as throwable without traces if ( _eventSource != null && _eventSource . hasFilterErrorListeners ( ) ) { FilterErrorEvent errorEvent = getFilterErrorEvent ( fnfe ) ; _eventSource . onFilterDoFilterError ( errorEvent ) ; } throw new ServletException ( fnfe ) ; } catch ( IOException ioe ) { // 174668 if ( ( com . ibm . ws . webcontainer . osgi . WebContainer . getServletContainerSpecLevel ( ) >= 31 ) && ioe . getMessage ( ) != null && ioe . getMessage ( ) . contains ( "SRVE0918E" ) ) { throw ioe ; } if ( _eventSource != null && _eventSource . hasFilterErrorListeners ( ) ) { FilterErrorEvent errorEvent = getFilterErrorEvent ( ioe ) ; _eventSource . onFilterDoFilterError ( errorEvent ) ; } com . ibm . wsspi . webcontainer . util . FFDCWrapper . processException ( ioe , "com.ibm.ws.webcontainer.filter.FilterInstanceWrapper.doFilter" , "260" , this ) ; if ( com . ibm . ws . webcontainer . osgi . WebContainer . getServletContainerSpecLevel ( ) >= 31 ) { if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) logger . logp ( Level . FINE , CLASS_NAME , "doFilter" , "rethrow IOE" , ioe ) ; throw ioe ; } else // to prevent regression for servlet 3.0 . . same as Throwable ( ) below { if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) logger . logp ( Level . FINE , CLASS_NAME , "doFilter" , "wrap IOE with ServletException" , ioe ) ; throw new ServletException ( ioe ) ; } } // 174668 catch ( Throwable th ) { logger . logp ( Level . SEVERE , CLASS_NAME , "service" , "uncaught.filter.exception" , new Object [ ] { _filterName , th } ) ; if ( _eventSource != null && _eventSource . hasFilterErrorListeners ( ) ) { FilterErrorEvent errorEvent = getFilterErrorEvent ( th ) ; _eventSource . onFilterDoFilterError ( errorEvent ) ; } com . ibm . wsspi . webcontainer . util . FFDCWrapper . processException ( th , "com.ibm.ws.webcontainer.filter.FilterInstanceWrapper.doFilter" , "149" , this ) ; throw new ServletException ( th ) ; }
public class ScheduleService { /** * IMPORTANT : this method is only meant for TOP level usage ( never use this within a transaction ) . It gobbles exception . */ public Stage rerunJobs ( final Stage stage , final List < String > jobNames , final HttpOperationResult result ) { } }
final StageIdentifier identifier = stage . getIdentifier ( ) ; HealthStateType healthStateForStage = HealthStateType . general ( HealthStateScope . forStage ( identifier . getPipelineName ( ) , identifier . getStageName ( ) ) ) ; if ( jobNames == null || jobNames . isEmpty ( ) ) { String message = "No job was selected to re-run." ; result . badRequest ( message , message , healthStateForStage ) ; return null ; } try { Stage resultStage = lockAndRerunStage ( identifier . getPipelineName ( ) , identifier . getPipelineCounter ( ) , identifier . getStageName ( ) , ( pipelineName , stageName , context ) -> { StageConfig stageConfig = goConfigService . stageConfigNamed ( identifier . getPipelineName ( ) , identifier . getStageName ( ) ) ; String latestMd5 = goConfigService . getCurrentConfig ( ) . getMd5 ( ) ; try { return instanceFactory . createStageForRerunOfJobs ( stage , jobNames , context , stageConfig , timeProvider , latestMd5 ) ; } catch ( CannotRerunJobException e ) { result . notFound ( e . getMessage ( ) , e . getMessage ( ) , healthStateForStage ) ; throw e ; } } , new ResultUpdatingErrorHandler ( result ) ) ; result . accepted ( String . format ( "Request to rerun jobs accepted" , identifier ) , "" , healthStateForStage ) ; return resultStage ; } catch ( RuntimeException e ) { if ( result . canContinue ( ) ) { String message = String . format ( "Job rerun request for job(s) [%s] could not be completed because of unexpected failure. Cause: %s" , StringUtils . join ( jobNames . toArray ( ) , ", " ) , e . getMessage ( ) ) ; result . internalServerError ( message , healthStateForStage ) ; LOGGER . error ( message , e ) ; } return null ; }
public class EventManager { /** * Initialize ZMQ event system if not already done , * subscribe to the interface change event end * returns the connection parameters . * @ param deviceName The specified event device name * @ return the connection parameters . */ public DevVarLongStringArray subscribe ( final String deviceName ) throws DevFailed { } }
xlogger . entry ( ) ; // If first time start the ZMQ management if ( ! isInitialized ) { initialize ( ) ; } // check if event is already subscribed final String fullName = EventUtilities . buildDeviceEventName ( deviceName , EventType . INTERFACE_CHANGE_EVENT ) ; EventImpl eventImpl = eventImplMap . get ( fullName ) ; if ( eventImpl == null ) { // If not already manage , create EventImpl object and add it to the map eventImpl = new EventImpl ( DeviceImpl . SERVER_VERSION , fullName ) ; eventImplMap . put ( fullName , eventImpl ) ; } else { eventImpl . updateSubscribeTime ( ) ; } return buildConnectionParameters ( fullName ) ;
public class FileExtensions { /** * Gets the absolut path without the filename . * @ param file * the file . * @ return ' s the absolut path without filename . */ public static String getAbsolutPathWithoutFilename ( final File file ) { } }
final String absolutePath = file . getAbsolutePath ( ) ; int lastSlash_index = absolutePath . lastIndexOf ( "/" ) ; if ( lastSlash_index < 0 ) { lastSlash_index = absolutePath . lastIndexOf ( "\\" ) ; } return absolutePath . substring ( 0 , lastSlash_index + 1 ) ;
public class ObjectType { /** * Gets the node corresponding to the definition of the specified property . * This could be the node corresponding to declaration of the property or the * node corresponding to the first reference to this property , e . g . , * " this . propertyName " in a constructor . Note this is mainly intended to be * an estimate of where in the source code a property is defined . Sometime * the returned node is not even part of the global AST but in the AST of the * JsDoc that defines a type . * @ param propertyName the name of the property * @ return the { @ code Node } corresponding to the property or null . */ public final Node getPropertyNode ( String propertyName ) { } }
Property p = getSlot ( propertyName ) ; return p == null ? null : p . getNode ( ) ;
public class Router { /** * Specify a middleware that will be called for a matching HTTP CONNECT * @ param regex A regular expression * @ param handlers The middleware to call */ public Router connect ( @ NotNull final Pattern regex , @ NotNull final IMiddleware ... handlers ) { } }
addRegEx ( "CONNECT" , regex , handlers , connectBindings ) ; return this ;
public class FileInfo { /** * < code > optional string ufsPath = 4 ; < / code > */ public java . lang . String getUfsPath ( ) { } }
java . lang . Object ref = ufsPath_ ; if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; if ( bs . isValidUtf8 ( ) ) { ufsPath_ = s ; } return s ; }
public class BccClient { /** * Deleting the specified image . * Only the customized image can be deleted , * otherwise , it ' s will get < code > 403 < / code > errorCode . * @ param request The request containing all options for deleting the specified image . */ public void deleteImage ( DeleteImageRequest request ) { } }
checkNotNull ( request , "request should not be null." ) ; checkStringNotEmpty ( request . getImageId ( ) , "request imageId should not be empty." ) ; InternalRequest internalRequest = this . createRequest ( request , HttpMethodName . DELETE , IMAGE_PREFIX , request . getImageId ( ) ) ; invokeHttpClient ( internalRequest , AbstractBceResponse . class ) ;
public class HybridBinarizer { /** * Calculates the final BitMatrix once for all requests . This could be called once from the * constructor instead , but there are some advantages to doing it lazily , such as making * profiling easier , and not doing heavy lifting when callers don ' t expect it . */ @ Override public BitMatrix getBlackMatrix ( ) throws NotFoundException { } }
if ( matrix != null ) { return matrix ; } LuminanceSource source = getLuminanceSource ( ) ; int width = source . getWidth ( ) ; int height = source . getHeight ( ) ; if ( width >= MINIMUM_DIMENSION && height >= MINIMUM_DIMENSION ) { byte [ ] luminances = source . getMatrix ( ) ; int subWidth = width >> BLOCK_SIZE_POWER ; if ( ( width & BLOCK_SIZE_MASK ) != 0 ) { subWidth ++ ; } int subHeight = height >> BLOCK_SIZE_POWER ; if ( ( height & BLOCK_SIZE_MASK ) != 0 ) { subHeight ++ ; } int [ ] [ ] blackPoints = calculateBlackPoints ( luminances , subWidth , subHeight , width , height ) ; BitMatrix newMatrix = new BitMatrix ( width , height ) ; calculateThresholdForBlock ( luminances , subWidth , subHeight , width , height , blackPoints , newMatrix ) ; matrix = newMatrix ; } else { // If the image is too small , fall back to the global histogram approach . matrix = super . getBlackMatrix ( ) ; } return matrix ;
public class ResourceResolver { /** * Searches resource loaders for one that supports the given prefix . * @ param prefix The prefix the loader should support . ( classpath : , file : , etc ) * @ return An optional resource loader */ public @ Nonnull Optional < ResourceLoader > getSupportingLoader ( @ Nonnull String prefix ) { } }
ArgumentUtils . requireNonNull ( "prefix" , prefix ) ; return resourceLoaders . stream ( ) . filter ( rl -> rl . supportsPrefix ( prefix ) ) . findFirst ( ) ;
public class CommonsPool2ConfigConverter { /** * Converts { @ link GenericObjectPoolConfig } properties to an immutable { @ link BoundedPoolConfig } . Applies max total , min / max * idle and test on borrow / create / release configuration . * @ param config must not be { @ literal null } . * @ return the converted { @ link BoundedPoolConfig } . */ public static BoundedPoolConfig bounded ( GenericObjectPoolConfig < ? > config ) { } }
LettuceAssert . notNull ( config , "GenericObjectPoolConfig must not be null" ) ; return BoundedPoolConfig . builder ( ) . maxTotal ( config . getMaxTotal ( ) ) . maxIdle ( config . getMaxIdle ( ) ) . minIdle ( config . getMinIdle ( ) ) . testOnAcquire ( config . getTestOnBorrow ( ) ) . testOnCreate ( config . getTestOnCreate ( ) ) . testOnRelease ( config . getTestOnReturn ( ) ) . build ( ) ;
public class CmsManyToOneMap { /** * Removes the entry with the given key . < p > * @ param key the key */ public void remove ( K key ) { } }
V removedValue = m_forwardMap . remove ( key ) ; if ( removedValue != null ) { m_reverseMap . remove ( removedValue , key ) ; }
public class DefaultShardManagerBuilder { /** * Sets the { @ link org . slf4j . MDC MDC } mappings provider to use in JDA . * < br > If sharding is enabled JDA will automatically add a { @ code jda . shard } context with the format { @ code [ SHARD _ ID / TOTAL ] } * where { @ code SHARD _ ID } and { @ code TOTAL } are the shard configuration . * Additionally it will provide context for the id via { @ code jda . shard . id } and the total via { @ code jda . shard . total } . * < p > < b > The manager will call this with a shardId and it is recommended to provide a different context map for each shard ! < / b > * < br > This automatically switches { @ link # setContextEnabled ( boolean ) } to true if the provided function is not null ! * @ param provider * The provider for < b > modifiable < / b > context maps to use in JDA , or { @ code null } to reset * @ return The DefaultShardManagerBuilder instance . Useful for chaining . * @ see < a href = " https : / / www . slf4j . org / api / org / slf4j / MDC . html " target = " _ blank " > MDC Javadoc < / a > */ public DefaultShardManagerBuilder setContextMap ( IntFunction < ? extends ConcurrentMap < String , String > > provider ) { } }
this . contextProvider = provider ; if ( provider != null ) this . enableContext = true ; return this ;
public class DualYearOfEraElement { /** * ~ Methoden - - - - - */ @ Override public void print ( ChronoDisplay context , Appendable buffer , AttributeQuery attributes ) throws IOException , ChronoException { } }
NumberSystem numsys = getNumberSystem ( attributes ) ; TextWidth width = attributes . get ( Attributes . TEXT_WIDTH , TextWidth . NARROW ) ; int minDigits ; switch ( width ) { case NARROW : minDigits = 1 ; break ; case SHORT : minDigits = 2 ; break ; case ABBREVIATED : minDigits = 3 ; break ; default : minDigits = 4 ; } char zeroChar = ( attributes . contains ( Attributes . ZERO_DIGIT ) ? attributes . get ( Attributes . ZERO_DIGIT ) . charValue ( ) : ( numsys . isDecimal ( ) ? numsys . getDigits ( ) . charAt ( 0 ) : '0' ) ) ; this . print ( context , buffer , attributes , numsys , zeroChar , minDigits , 10 ) ;
public class IPv4 { /** * Parses the provided CIDR string and produces a closed { @ link Range } encapsulating all IPv4 addresses between * the network and broadcast addresses in the subnet represented by the CIDR . */ public static Range < IPv4 > cidrRange ( String cidr ) { } }
try { CidrInfo cidrInfo = parseCIDR ( cidr ) ; if ( cidrInfo . getNetwork ( ) instanceof Inet4Address && cidrInfo . getBroadcast ( ) instanceof Inet4Address ) { return closed ( new IPv4 ( ( Inet4Address ) cidrInfo . getNetwork ( ) ) , new IPv4 ( ( Inet4Address ) cidrInfo . getBroadcast ( ) ) ) ; } } catch ( Exception ignored ) { } throw new IllegalArgumentException ( format ( "Invalid IPv4 cidr representation %s" , cidr ) ) ;
public class FixedShardsDistribution { /** * Associates segments to each shard . * @ param shardsNumPerServer numbers of shards allocated for each server * @ param segmentsPerServer the primary owned segments of each server * @ param nodes the members of the cluster */ private void populateSegments ( int [ ] shardsNumPerServer , List < Set < Integer > > segmentsPerServer , List < Address > nodes ) { } }
int shardId = 0 ; int n = 0 ; Set < Integer > remainingSegments = new HashSet < > ( ) ; for ( Address node : nodes ) { Collection < Integer > primarySegments = segmentsPerServer . get ( n ) ; int shardQuantity = shardsNumPerServer [ n ] ; if ( shardQuantity == 0 ) { remainingSegments . addAll ( segmentsPerServer . get ( n ++ ) ) ; continue ; } shardsPerAddressMap . computeIfAbsent ( node , a -> new HashSet < > ( shardQuantity ) ) ; List < Set < Integer > > segments = this . split ( primarySegments , shardsNumPerServer [ n ++ ] ) ; for ( Collection < Integer > shardSegments : segments ) { String id = String . valueOf ( shardId ++ ) ; shardSegments . forEach ( seg -> shardPerSegmentMap . put ( seg , id ) ) ; shardsPerAddressMap . get ( node ) . add ( id ) ; addressPerShardMap . put ( id , node ) ; } } if ( ! remainingSegments . isEmpty ( ) ) { Iterator < String > shardIterator = Stream . iterate ( 0 , i -> ( i + 1 ) % numShards ) . map ( String :: valueOf ) . iterator ( ) ; for ( Integer segment : remainingSegments ) { shardPerSegmentMap . put ( segment , shardIterator . next ( ) ) ; } }
public class GeoPackageGeometryData { /** * Get the Well - Known Binary Geometry bytes * @ return bytes */ public byte [ ] getWkbBytes ( ) { } }
int wkbByteCount = bytes . length - wkbGeometryIndex ; byte [ ] wkbBytes = new byte [ wkbByteCount ] ; System . arraycopy ( bytes , wkbGeometryIndex , wkbBytes , 0 , wkbByteCount ) ; return wkbBytes ;
public class Example03_PatientResourceProvider { /** * Simple " search " implementation * */ @ Search public List < Patient > search ( ) { } }
List < Patient > retVal = new ArrayList < Patient > ( ) ; retVal . addAll ( myPatients . values ( ) ) ; return retVal ;
public class MPDUtility { /** * Writes a large byte array to a file . * @ param fileName output file name * @ param data target data */ public static final void fileDump ( String fileName , byte [ ] data ) { } }
try { FileOutputStream os = new FileOutputStream ( fileName ) ; os . write ( data ) ; os . close ( ) ; } catch ( IOException ex ) { ex . printStackTrace ( ) ; }
public class HashIndex { /** * This is the analogue of < code > loadFromFilename < / code > , and is intended to be included in a routine * that unpacks a text - serialized form of an object that incorporates an Index . * NOTE : presumes that the next readLine ( ) will read in the first line of the * portion of the text file representing the saved Index . Currently reads until it * encounters a blank line , consuming that line and returning the Index . * TODO : figure out how best to terminate : currently a blank line is considered to be a terminator . * @ param br The Reader to read the index from * @ return An Index read from a file */ public static Index < String > loadFromReader ( BufferedReader br ) throws IOException { } }
HashIndex < String > index = new HashIndex < String > ( ) ; String line = br . readLine ( ) ; // terminate if EOF reached , or if a blank line is encountered . while ( ( line != null ) && ( line . length ( ) > 0 ) ) { int start = line . indexOf ( '=' ) ; if ( start == - 1 || start == line . length ( ) - 1 ) { continue ; } index . add ( line . substring ( start + 1 ) ) ; line = br . readLine ( ) ; } return index ;
public class AuthRundeckStorageTree { /** * Map containing path and name given a path * @ param path path * @ return map */ private Map < String , String > authResForPath ( Path path ) { } }
HashMap < String , String > authResource = new HashMap < String , String > ( ) ; authResource . put ( PATH_RES_KEY , path . getPath ( ) ) ; authResource . put ( NAME_RES_KEY , path . getName ( ) ) ; return authResource ;
public class CmsXmlContainerPageFactory { /** * Returns the cached container page . < p > * @ param cms the cms context * @ param resource the container page resource * @ param keepEncoding if to keep the encoding while unmarshalling * @ return the cached container page , or < code > null < / code > if not found */ private static CmsXmlContainerPage getCache ( CmsObject cms , CmsResource resource , boolean keepEncoding ) { } }
if ( resource instanceof I_CmsHistoryResource ) { return null ; } return getCache ( ) . getCacheContainerPage ( getCache ( ) . getCacheKey ( resource . getStructureId ( ) , keepEncoding ) , cms . getRequestContext ( ) . getCurrentProject ( ) . isOnlineProject ( ) ) ;
public class DefaultGroovyMethods { /** * Converts the given collection to another type . A default concrete * type is used for List , Set , or SortedSet . If the given type has * a constructor taking a collection , that is used . Otherwise , the * call is deferred to { @ link # asType ( Object , Class ) } . If this * collection is already of the given type , the same instance is * returned . * @ param col a collection * @ param clazz the desired class * @ return the object resulting from this type conversion * @ see # asType ( java . lang . Object , java . lang . Class ) * @ since 1.0 */ @ SuppressWarnings ( "unchecked" ) public static < T > T asType ( Collection col , Class < T > clazz ) { } }
if ( col . getClass ( ) == clazz ) { return ( T ) col ; } if ( clazz == List . class ) { return ( T ) asList ( ( Iterable ) col ) ; } if ( clazz == Set . class ) { if ( col instanceof Set ) return ( T ) col ; return ( T ) new LinkedHashSet ( col ) ; } if ( clazz == SortedSet . class ) { if ( col instanceof SortedSet ) return ( T ) col ; return ( T ) new TreeSet ( col ) ; } if ( clazz == Queue . class ) { if ( col instanceof Queue ) return ( T ) col ; return ( T ) new LinkedList ( col ) ; } if ( clazz == Stack . class ) { if ( col instanceof Stack ) return ( T ) col ; final Stack stack = new Stack ( ) ; stack . addAll ( col ) ; return ( T ) stack ; } if ( clazz != String [ ] . class && ReflectionCache . isArray ( clazz ) ) { try { return ( T ) asArrayType ( col , clazz ) ; } catch ( GroovyCastException e ) { /* ignore */ } } Object [ ] args = { col } ; try { return ( T ) InvokerHelper . invokeConstructorOf ( clazz , args ) ; } catch ( Exception e ) { // ignore , the constructor that takes a Collection as an argument may not exist } if ( Collection . class . isAssignableFrom ( clazz ) ) { try { Collection result = ( Collection ) InvokerHelper . invokeConstructorOf ( clazz , null ) ; result . addAll ( col ) ; return ( T ) result ; } catch ( Exception e ) { // ignore , the no arg constructor might not exist . } } return asType ( ( Object ) col , clazz ) ;
public class AstNodeFactory { /** * Utility method to determine if an { @ link AstNode } contains a specific mixin type . * @ param node the AstNode * @ param mixinType the target mixin type { @ link String } ; may not be null ; * @ return true if the mixinType exists for this node */ public boolean hasMixinType ( AstNode node , String mixinType ) { } }
CheckArg . isNotNull ( node , "node" ) ; CheckArg . isNotNull ( mixinType , "mixinType" ) ; return node . getMixins ( ) . contains ( mixinType ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EEnum getIfcSwitchingDeviceTypeEnum ( ) { } }
if ( ifcSwitchingDeviceTypeEnumEEnum == null ) { ifcSwitchingDeviceTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1080 ) ; } return ifcSwitchingDeviceTypeEnumEEnum ;
public class LoadedClassCache { /** * Returns a class object . If the class is new , a new Class object is * created , otherwise the cached object is returned . * @ param cl * the classloader * @ param className * the class name * @ return the class object associated to the given class name * @ throws ClassNotFoundException * if the class can ' t be loaded */ public static Class getClass ( final ClassLoader cl , final String className ) throws ClassNotFoundException { } }
if ( LOADED_PLUGINS . get ( cl ) == null ) { LOADED_PLUGINS . put ( cl , new ClassesData ( ) ) ; } ClassesData cd = LOADED_PLUGINS . get ( cl ) ; Class clazz = cd . getClass ( className ) ; if ( clazz == null ) { clazz = cl . loadClass ( className ) ; saveClass ( cl , clazz ) ; } return clazz ;
public class PathElement { /** * A valid key contains alphanumerics and underscores , cannot start with a * number , and cannot start or end with { @ code - } . */ private static boolean isValidKey ( final String s ) { } }
// Equivalent to this regex \ * | [ _ a - zA - Z ] ( ? : [ - _ a - zA - Z0-9 ] * [ _ a - zA - Z0-9 ] ) but faster if ( s == null ) { return false ; } if ( s . equals ( WILDCARD_VALUE ) ) { return true ; } int lastIndex = s . length ( ) - 1 ; if ( lastIndex == - 1 ) { return false ; } if ( ! isValidKeyStartCharacter ( s . charAt ( 0 ) ) ) { return false ; } for ( int i = 1 ; i < lastIndex ; i ++ ) { if ( ! isValidKeyCharacter ( s . charAt ( i ) ) ) { return false ; } } if ( lastIndex > 0 && ! isValidKeyEndCharacter ( s . charAt ( lastIndex ) ) ) { return false ; } return true ;
public class OnLineStatistics { /** * Effectively removes a sample with the given value and weight from the total . * Removing values that have not been added may yield results that have no meaning * < br > < br > * NOTE : { @ link # getSkewness ( ) } and { @ link # getKurtosis ( ) } are not currently updated correctly * @ param x the value of the sample * @ param weight the weight of the sample * @ throws ArithmeticException if a negative weight is given */ public void remove ( double x , double weight ) { } }
if ( weight < 0 ) throw new ArithmeticException ( "Can not remove a negative weight" ) ; else if ( weight == 0 ) return ; double n1 = n ; n -= weight ; double delta = x - mean ; double delta_n = delta * weight / n ; double delta_n2 = delta_n * delta_n ; double term1 = delta * delta_n * n1 ; mean -= delta_n ; m2 -= weight * delta * ( x - mean ) ; // TODO m3 and m4 arent getting updated correctly m3 -= term1 * delta_n * ( n - 2 + weight ) - 3 * delta_n * m2 ; m4 -= term1 * delta_n2 * ( n * n - 3 * n + 3 ) + 6 * delta_n2 * m2 - 4 * delta_n * m3 ;
public class TableRef { /** * Creates a table with a custom throughput . The provision type is Custom and the provision load is ignored . * < pre > * StorageRef storage = new StorageRef ( " your _ app _ key " , " your _ token " ) ; * TableRef tableRef = storage . table ( " your _ table " ) ; * / / Create table ' myTable ' with the following schema ( with custom provisioning ) * tableRef . create ( new Key ( new KeySchema ( " id " , StorageRef . StorageDataType . STRING ) , new KeySchema ( " timestamp " , StorageRef . StorageDataType . NUMBER ) ) , * new Throughput ( 1,1 ) , new OnTableCreation ( ) { * & # 064 ; Override * public void run ( String table , Double creationDate , String status ) { * Log . d ( " TableRef " , " Table with name : " + table + " , created at : " + new Date ( ( long ) ( creationDate * 1000.0 ) ) . toString ( ) + " , with status : " + status ) ; * } , new OnError ( ) { * & # 064 ; Override * public void run ( Integer code , String errorMessage ) { * Log . e ( " TableRef " , " Error creating table : " + errorMessage ) ; * < / pre > * @ param key The schema of the primary and secondary ( optional ) keys . * @ param throughput The number of read and write operations per second . * @ param onTableCreation * @ param onError * @ return Table reference */ public TableRef create ( Key key , Throughput throughput , OnTableCreation onTableCreation , OnError onError ) { } }
PostBodyBuilder pbb = new PostBodyBuilder ( context ) ; pbb . addObject ( "table" , this . name ) ; pbb . addObject ( "provisionType" , StorageProvisionType . CUSTOM . getValue ( ) ) ; pbb . addObject ( "key" , key . map ( ) ) ; pbb . addObject ( "throughput" , throughput . map ( ) ) ; Rest r = new Rest ( context , RestType . CREATETABLE , pbb , null ) ; r . onError = onError ; r . onTableCreation = onTableCreation ; context . processRest ( r ) ; return this ;
public class AptControlImplementation { /** * Does this control impl on one of it superclasses implement java . io . Serializable ? * @ return true if this control impl or one of its superclasses implements java . io . Serializable . */ protected boolean isSerializable ( ) { } }
for ( InterfaceType superIntf : _implDecl . getSuperinterfaces ( ) ) { if ( superIntf . toString ( ) . equals ( "java.io.Serializable" ) ) { return true ; } } // check to see if the superclass is serializable return _superClass != null && _superClass . isSerializable ( ) ;
public class AttributeListHelper { /** * Add to the attributes of the parent for saving . * @ param attributes some attributes to add to the list */ private void addToAttributes ( final List < ApiAttribute > attributes ) { } }
for ( final ApiObject object : attributes ) { final ApiModelToGedObjectVisitor visitor = createVisitor ( ) ; object . accept ( visitor ) ; }
public class GoogleRecognitionServiceImpl { /** * Manage recognizer cancellation runnable . * @ param action ( int ) ( 0 - stop , 1 - restart ) */ private void updateStopRunnable ( final int action ) { } }
if ( stopRunnable != null ) { if ( action == 0 ) { handler . removeCallbacks ( stopRunnable ) ; } else if ( action == 1 ) { handler . removeCallbacks ( stopRunnable ) ; handler . postDelayed ( stopRunnable , STOP_DELAY ) ; } }
public class BuiltinSlotTypeMetadataMarshaller { /** * Marshall the given parameter object . */ public void marshall ( BuiltinSlotTypeMetadata builtinSlotTypeMetadata , ProtocolMarshaller protocolMarshaller ) { } }
if ( builtinSlotTypeMetadata == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( builtinSlotTypeMetadata . getSignature ( ) , SIGNATURE_BINDING ) ; protocolMarshaller . marshall ( builtinSlotTypeMetadata . getSupportedLocales ( ) , SUPPORTEDLOCALES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AbstractProfileProfileAligner { /** * Sets the query { @ link Profile } . * @ param query the first { @ link Profile } of the pair to align */ public void setQuery ( Profile < S , C > query ) { } }
this . query = query ; queryFuture = null ; reset ( ) ;
public class AvroRowDeserializationSchema { private Row convertAvroRecordToRow ( Schema schema , RowTypeInfo typeInfo , IndexedRecord record ) { } }
final List < Schema . Field > fields = schema . getFields ( ) ; final TypeInformation < ? > [ ] fieldInfo = typeInfo . getFieldTypes ( ) ; final int length = fields . size ( ) ; final Row row = new Row ( length ) ; for ( int i = 0 ; i < length ; i ++ ) { final Schema . Field field = fields . get ( i ) ; row . setField ( i , convertAvroType ( field . schema ( ) , fieldInfo [ i ] , record . get ( i ) ) ) ; } return row ;
public class QueuePlugin { /** * Add a delay in the named queue . */ @ SuppressWarnings ( "unchecked" ) public T delay ( int milliseconds , String name , Function ... funcs ) { } }
for ( Element e : elements ( ) ) { queue ( e , name , new DelayFunction ( e , name , milliseconds , funcs ) ) ; } return ( T ) this ;
public class NioGroovyMethods { /** * Write the text to the Path , using the specified encoding . If the given * charset is " UTF - 16BE " or " UTF - 16LE " ( or an equivalent alias ) and * < code > writeBom < / code > is < code > true < / code > , the requisite byte order * mark is written to the file before the text . * @ param self a Path * @ param text the text to write to the Path * @ param charset the charset used * @ param writeBom whether to write a BOM * @ throws java . io . IOException if an IOException occurs . * @ since 2.5.0 */ public static void write ( Path self , String text , String charset , boolean writeBom ) throws IOException { } }
Writer writer = null ; try { OutputStream out = Files . newOutputStream ( self ) ; if ( writeBom ) { IOGroovyMethods . writeUTF16BomIfRequired ( out , charset ) ; } writer = new OutputStreamWriter ( out , Charset . forName ( charset ) ) ; writer . write ( text ) ; writer . flush ( ) ; Writer temp = writer ; writer = null ; temp . close ( ) ; } finally { closeWithWarning ( writer ) ; }
public class NotificationBoard { /** * Set the margin of the header . * @ param l * @ param t * @ param r * @ param b */ public void setHeaderMargin ( int l , int t , int r , int b ) { } }
mHeader . setMargin ( l , t , r , b ) ;
public class PropertyEditorBase { /** * Initializes the property editor . * @ param target The target object . * @ param propInfo The PropertyInfo instance reflecting the property being edited on the target . * @ param propGrid The property grid owning this property editor . */ protected void init ( Object target , PropertyInfo propInfo , PropertyGrid propGrid ) { } }
this . target = target ; this . propInfo = propInfo ; this . propGrid = propGrid ; this . index = propGrid . getEditorCount ( ) ; wireController ( ) ;
public class AmazonEC2Client { /** * Resets a network interface attribute . You can specify only one attribute at a time . * @ param resetNetworkInterfaceAttributeRequest * Contains the parameters for ResetNetworkInterfaceAttribute . * @ return Result of the ResetNetworkInterfaceAttribute operation returned by the service . * @ sample AmazonEC2 . ResetNetworkInterfaceAttribute * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / ResetNetworkInterfaceAttribute " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ResetNetworkInterfaceAttributeResult resetNetworkInterfaceAttribute ( ResetNetworkInterfaceAttributeRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeResetNetworkInterfaceAttribute ( request ) ;
public class DataSourceService { /** * Indicates whether or not thread identity , sync - to - thread , and RRS transactions are supported . * The result is a 3 element array , of which , * < ul > * < li > The first element indicates support for thread identity . 2 = REQUIRED , 1 = ALLOWED , 0 = NOT ALLOWED . < / li > * < li > The second element indicates support for " synch to thread " for the * allocateConnection , i . e . , push an ACEE corresponding to the current java * Subject on the native OS thread . 1 = supported , 0 = not supported . < / li > * < li > The third element indicates support for RRS transactions . 1 = supported , 0 = not supported . < / li > * < / ul > * Prerequisite : the invoker must hold a read or write lock on this instance . * @ param identifier identifier for the class loader from which to load vendor classes ( for XA recovery path ) . Otherwise , null . * @ return boolean array indicating whether or not each of the aforementioned capabilities are supported . */ @ Override public int [ ] getThreadIdentitySecurityAndRRSSupport ( String identifier ) { } }
WSManagedConnectionFactoryImpl mcf1 ; if ( jdbcDriverSvc . loadFromApp ( ) ) { final boolean trace = TraceComponent . isAnyTracingEnabled ( ) ; // data source class is loaded from thread context class loader if ( identifier == null ) { ClassLoader tccl = priv . getContextClassLoader ( ) ; identifier = connectorSvc . getClassLoaderIdentifierService ( ) . getClassLoaderIdentifier ( tccl ) ; // TODO better error handling when thread context class loader does not have an identifier } mcf1 = mcfPerClassLoader . get ( identifier ) ; } else mcf1 = mcf ; DatabaseHelper dbHelper = mcf1 . getHelper ( ) ; return new int [ ] { dbHelper . getThreadIdentitySupport ( ) , dbHelper . getThreadSecurity ( ) ? 1 : 0 , dbHelper . getRRSTransactional ( ) ? 1 : 0 } ;
public class KeenQueryClient { /** * Sends a request to the server in this client ' s project , using the given URL and request * data via the given HTTP method and authenticated with the given key . * The request data will be serialized into JSON using the client ' s * { @ link io . keen . client . java . KeenJsonHandler } . * @ param url The URL to which the given data should be sent . * @ param method The HTTP Method to use . * @ param authKey The key to use for authentication of this request . * @ param requestData The request data , which will be serialized into JSON and sent in the * request body . * @ return The response from the server in the " result " map . * @ throws IOException If there was an error communicating with the server . */ private Map < String , Object > sendRequest ( final URL url , final String method , final String authKey , final Map < String , ? > requestData ) throws IOException { } }
boolean useOutputSource = true ; if ( HttpMethods . GET . equals ( method ) || HttpMethods . DELETE . equals ( method ) ) { if ( null != requestData && ! requestData . isEmpty ( ) ) { throw new IllegalStateException ( "Trying to send a GET request with a request " + "body, which would result in sending a POST." ) ; } useOutputSource = false ; } // Build an output source which simply writes the serialized JSON to the output . OutputSource source = ( ! useOutputSource ? null : new OutputSource ( ) { @ Override public void writeTo ( OutputStream out ) throws IOException { OutputStreamWriter writer = new OutputStreamWriter ( out , ENCODING ) ; // in queries , requestData may be null . if ( requestData != null && ! requestData . isEmpty ( ) ) { jsonHandler . writeJson ( writer , requestData ) ; } } } ) ; // If logging is enabled , log the request being sent . String requestId = UUID . randomUUID ( ) . toString ( ) ; if ( KeenLogging . isLoggingEnabled ( ) ) { try { String request = "" ; if ( requestData != null && ! requestData . isEmpty ( ) ) { StringWriter writer = new StringWriter ( ) ; jsonHandler . writeJson ( writer , requestData ) ; request = writer . toString ( ) ; } KeenLogging . log ( String . format ( Locale . US , "Request ID: %s. Sent '%s' request '%s' to URL '%s'" , requestId , method , request , url . toString ( ) ) ) ; } catch ( IOException e ) { KeenLogging . log ( "Couldn't log request written to file: " , e ) ; } } // Send the request . Request request = new Request ( url , method , authKey , source , null , connectTimeout , readTimeout ) ; Response response = httpHandler . execute ( request ) ; if ( KeenLogging . isLoggingEnabled ( ) ) { KeenLogging . log ( String . format ( "Request ID: %s. Received response: '%s'" , requestId , response . body ) ) ; } if ( ! response . isSuccess ( ) ) { throw new ServerException ( response . body ) ; } if ( ( null == response . body || response . body . trim ( ) . isEmpty ( ) ) && HttpURLConnection . HTTP_NO_CONTENT != response . statusCode ) { throw new ServerException ( "Empty response when response was expected." ) ; } Map < String , Object > responseMap ; if ( HttpURLConnection . HTTP_NO_CONTENT == response . statusCode ) { responseMap = Collections . emptyMap ( ) ; } else { // Parse the response into a map . StringReader reader = new StringReader ( response . body ) ; responseMap = jsonHandler . readJson ( reader ) ; } // Check for an error code if no result was provided . if ( null == responseMap . get ( KeenQueryConstants . RESULT ) ) { // double check if result is null because there ' s an error ( shouldn ' t happen ) if ( responseMap . containsKey ( KeenQueryConstants . ERROR_CODE ) ) { Object errorCode = responseMap . get ( KeenQueryConstants . ERROR_CODE ) ; Object message = responseMap . get ( KeenQueryConstants . MESSAGE ) ; String errorMessage = "Error response received from server" ; if ( errorCode != null ) { errorMessage += " " + errorCode . toString ( ) ; } if ( message != null ) { errorMessage += ": " + message . toString ( ) ; } throw new KeenQueryClientException ( errorMessage ) ; } } // Return the entire response map return responseMap ;
public class PELoader { /** * For testing purposes only . * @ param args * @ throws IOException */ public static void main ( String [ ] args ) throws IOException , AWTException { } }
logger . entry ( ) ; File file = new File ( "/home/karsten/samples/65535sects.exe" ) ; ReportCreator reporter = ReportCreator . apply ( file ) ; reporter . printReport ( ) ; // File file2 = new File ( " / home / katja / samples / tesla2 " ) ; // List < File > list = new ArrayList < > ( ) ; // list . add ( file ) ; // list . add ( file2 ) ; // TODO create Unit test for resource type with name ! // for ( File file : folder . listFiles ( ) ) { // if ( ! file . isDirectory ( ) & & new PESignature ( file ) . exists ( ) ) { // System . out . println ( " Report for " + file . getName ( ) ) ; // System . out . println ( ) ; // DiffReportCreator . newInstance ( list ) . printReport ( ) ; // VisualizerBuilder builder = new VisualizerBuilder ( ) ; // Visualizer vi = builder . build ( ) ; // final BufferedImage entropyImage = vi . createEntropyImage ( file ) ; // final BufferedImage structureImage = vi . createImage ( file ) ; // final BufferedImage appendedImage = ImageUtil . appendImages ( // entropyImage , structureImage ) ; // show ( appendedImage ) ;
public class CircularSeekBar { /** * Set the max of the CircularSeekBar . * If the new max is less than the current progress , then the progress will be set to zero . * If the progress is changed as a result , then any listener will receive a onProgressChanged event . * @ param max The new max for the CircularSeekBar . */ public void setMax ( int max ) { } }
if ( ! ( max <= 0 ) ) { // Check to make sure it ' s greater than zero if ( max <= mProgress ) { mProgress = 0 ; // If the new max is less than current progress , set progress to zero if ( mOnCircularSeekBarChangeListener != null ) { mOnCircularSeekBarChangeListener . onProgressChanged ( this , mProgress , false ) ; } } mMax = max ; recalculateAll ( ) ; invalidate ( ) ; }
public class Routable { /** * Maps a filter to be executed after any matching routes even if the route throws any exception * @ param filter The filter */ public void afterAfter ( Filter filter ) { } }
addFilter ( HttpMethod . afterafter , FilterImpl . create ( SparkUtils . ALL_PATHS , filter ) ) ;
public class CharSet { /** * < p > Does the { @ code CharSet } contain the specified * character { @ code ch } . < / p > * @ param ch the character to check for * @ return { @ code true } if the set contains the characters */ public boolean contains ( final char ch ) { } }
for ( final CharRange range : set ) { if ( range . contains ( ch ) ) { return true ; } } return false ;
public class CmsModelPageHelper { /** * Creates a new model group page . < p > * @ param name the page name * @ param description the page description * @ param copyId structure id of the resource to use as a model for the model page , if any ( may be null ) * @ return the new resource * @ throws CmsException in case something goes wrong */ public CmsResource createModelGroupPage ( String name , String description , CmsUUID copyId ) throws CmsException { } }
CmsResource newPage = null ; CmsResourceTypeConfig config = m_adeConfig . getResourceType ( CmsResourceTypeXmlContainerPage . MODEL_GROUP_TYPE_NAME ) ; if ( ( config != null ) && ! config . isDisabled ( ) ) { if ( copyId == null ) { newPage = config . createNewElement ( m_cms , m_rootResource . getRootPath ( ) ) ; } else { CmsResource copyResource = m_cms . readResource ( copyId ) ; newPage = config . createNewElement ( m_cms , copyResource , m_rootResource . getRootPath ( ) ) ; } m_cms . lockResource ( newPage ) ; CmsProperty titleProp = new CmsProperty ( CmsPropertyDefinition . PROPERTY_TITLE , name , null ) ; CmsProperty descriptionProp = new CmsProperty ( CmsPropertyDefinition . PROPERTY_DESCRIPTION , description , null ) ; m_cms . writePropertyObject ( m_cms . getSitePath ( newPage ) , titleProp ) ; m_cms . writePropertyObject ( m_cms . getSitePath ( newPage ) , descriptionProp ) ; tryUnlock ( newPage ) ; } return newPage ;
public class CmsJspLoader { /** * Generates the taglib directives for a collection of taglib identifiers . < p > * @ param taglibs the taglib identifiers * @ return a string containing taglib directives */ protected String generateTaglibInclusions ( Collection < String > taglibs ) { } }
StringBuffer buffer = new StringBuffer ( ) ; for ( String taglib : taglibs ) { String uri = m_taglibs . get ( taglib ) ; if ( uri != null ) { buffer . append ( "<%@ taglib prefix=\"" + taglib + "\" uri=\"" + uri + "\" %>" ) ; } } return buffer . toString ( ) ;
public class QueryServiceImpl { /** * Splits a list of qualified ( meta - ) annotation names into a proper java * list . * @ param rawCorpusNames The qualified names separated by " , " . * @ return */ private List < MatrixQueryData . QName > splitMatrixKeysFromRaw ( String raw ) { } }
LinkedList < MatrixQueryData . QName > result = new LinkedList < > ( ) ; String [ ] split = raw . split ( "," ) ; for ( String s : split ) { String [ ] nameSplit = s . trim ( ) . split ( ":" , 2 ) ; MatrixQueryData . QName qname = new MatrixQueryData . QName ( ) ; if ( nameSplit . length == 2 ) { qname . namespace = nameSplit [ 0 ] . trim ( ) ; qname . name = nameSplit [ 1 ] . trim ( ) ; } else { qname . name = nameSplit [ 0 ] . trim ( ) ; } result . add ( qname ) ; } return result ;
public class Gtin8Validator { /** * { @ inheritDoc } check if given string is a valid gtin . * @ see javax . validation . ConstraintValidator # isValid ( java . lang . Object , * javax . validation . ConstraintValidatorContext ) */ @ Override public final boolean isValid ( final Object pvalue , final ConstraintValidatorContext pcontext ) { } }
final String valueAsString = Objects . toString ( pvalue , null ) ; if ( StringUtils . isEmpty ( valueAsString ) ) { return true ; } if ( ! StringUtils . isNumeric ( valueAsString ) ) { // EAN8 must be numeric , but that ' s handled by digits annotation return true ; } if ( valueAsString . length ( ) != GTIN8_LENGTH ) { // EAN8 size is wrong , but that ' s handled by size annotation return true ; } // calculate and check checksum ( GTIN8 / EAN8) return CHECK_GTIN8 . isValid ( valueAsString ) ;
public class BootstrapContextImpl { /** * Returns the component name of the JCAContextProvider for the specified work context class . * @ param workContextClass a WorkContext implementation class or ExecutionContext . * @ return the component name of the JCAContextProvider . */ String getJCAContextProviderName ( Class < ? > workContextClass ) { } }
ServiceReference < JCAContextProvider > ref = null ; for ( Class < ? > cl = workContextClass ; ref == null && cl != null ; cl = cl . getSuperclass ( ) ) ref = contextProviders . getReference ( cl . getName ( ) ) ; String name = ref == null ? null : ( String ) ref . getProperty ( JCAContextProvider . CONTEXT_NAME ) ; if ( name == null && ref != null ) name = ( String ) ref . getProperty ( "component.name" ) ; return name ;
public class AbstractQueryDecorator { /** * ( non - Javadoc ) * @ see org . springframework . data . solr . core . query . Query # setStatsOptions ( org . springframework . data . solr . core . query . StatsOptions ) */ @ Override public < T extends Query > T setStatsOptions ( StatsOptions statsOptions ) { } }
return query . setStatsOptions ( statsOptions ) ;
public class ISO8859_1Reader { /** * Reads the next character . */ public int read ( char [ ] buf , int offset , int length ) throws IOException { } }
for ( int i = 0 ; i < length ; i ++ ) { int ch = is . read ( ) ; if ( ch < 0 ) return i > 0 ? i : - 1 ; else { buf [ offset + i ] = ( char ) ch ; } } return length ;
public class SparseArrayContracts { /** * The { @ code Object . hashCode ( ) } contract for a { @ link SparseArray } . * < pre > * int hash = 0; * for ( SparseArrayEntry < ? > entry : sparseArrayEntries ) { * hash + = entry . hashCode ( ) ; * return hash ; * < / pre > * @ param sparseArrayEntries * @ return */ public static int hashCodeOf ( Iterable < SparseArrayEntry < ? > > sparseArrayEntries ) { } }
int hash = 0 ; for ( SparseArrayEntry < ? > entry : sparseArrayEntries ) { hash += entry . hashCode ( ) ; } return hash ;
public class SchemaVersionOne { /** * / * ( non - Javadoc ) * @ see net . agkn . hll . serialization . ISchemaVersion # getSerializer ( HLLType , int , int ) */ @ Override public IWordSerializer getSerializer ( HLLType type , int wordLength , int wordCount ) { } }
return new BigEndianAscendingWordSerializer ( wordLength , wordCount , paddingBytes ( type ) ) ;
public class Campaign { /** * Gets the advertisingChannelSubType value for this Campaign . * @ return advertisingChannelSubType * Optional refinement of advertisingChannelType . Must be a valid * sub - type of the parent channel * type . May only be set for new campaigns and cannot * be changed once set . * < span class = " constraint ReadOnly " > This field is read * only and will be ignored when sent to the API for the following { @ link * Operator } s : SET . < / span > */ public com . google . api . ads . adwords . axis . v201809 . cm . AdvertisingChannelSubType getAdvertisingChannelSubType ( ) { } }
return advertisingChannelSubType ;
public class DNSLookup { /** * Checks if a host name has a valid record . * @ param hostName * The hostname * @ param dnsType * The kind of record ( A , AAAA , MX , . . . ) * @ return Whether the record is available or not * @ throws DNSLookupException * Appears on a fatal error like dnsType invalid or initial * context error . */ public static boolean hasRecords ( String hostName , String dnsType ) throws DNSLookupException { } }
return DNSLookup . doLookup ( hostName , dnsType ) > 0 ;
public class ParsedQuery { /** * Gets a value by the key specified . Unlike ' get ' , checks that the parameter is not null */ public String getString ( String key ) { } }
String value = get ( key ) ; Utils . require ( value != null , key + " parameter is not set" ) ; return value ;
public class SesameGraphBuilder { /** * / * ( non - Javadoc ) * @ see org . openprovenance . prov . rdf . GraphBuilder # qualifiedNameToURI ( org . openprovenance . prov . model . QualifiedName ) */ @ Override public URIImpl qualifiedNameToURI ( QualifiedName name ) { } }
String unescapedLocalName = qnU . unescapeProvLocalName ( name . getLocalPart ( ) ) ; return new URIImpl ( name . getNamespaceURI ( ) + unescapedLocalName ) ;
public class CmsADEConfigData { /** * Internal method for getting the function references . < p > * @ return the function references */ protected List < CmsFunctionReference > internalGetFunctionReferences ( ) { } }
CmsADEConfigData parentData = parent ( ) ; if ( ( parentData == null ) ) { if ( m_data . isModuleConfig ( ) ) { return Collections . unmodifiableList ( m_data . getFunctionReferences ( ) ) ; } else { return Lists . newArrayList ( ) ; } } else { return parentData . internalGetFunctionReferences ( ) ; }
public class VMMetricsView { /** * Converts the long uptime to an human readable format , examples : * 2 d , 0 hour , 34 min , 2s * 12 hours , 12 min , 22s */ private String humanReadable ( long uptime ) { } }
uptime = uptime / 1000 ; int sec = ( int ) uptime % 60 ; uptime /= 60 ; int min = ( int ) uptime % 60 ; uptime /= 60 ; int hour = ( int ) uptime % 24 ; uptime /= 24 ; int day = ( int ) uptime ; String str = "" ; if ( day > 0 ) if ( day > 1 ) str += day + " days, " ; else str += day + " day, " ; // prints 0 hour in case days exists . Otherwise prints 2 days , 34 min , sounds weird . if ( hour > 0 || ( day > 0 ) ) if ( hour > 1 ) str += hour + " hours, " ; else str += hour + " hour, " ; if ( min > 0 ) str += min + " min, " ; if ( sec > 0 ) str += sec + " s" ; return str ;
public class FactoryDerivativeSparse { /** * Creates a sparse Laplacian filter . * @ see DerivativeLaplacian * @ param imageType The type of image which is to be processed . * @ param border How the border should be handled . If null { @ link BorderType # EXTENDED } will be used . * @ return Filter for performing a sparse laplacian . */ public static < T extends ImageGray < T > > ImageFunctionSparse < T > createLaplacian ( Class < T > imageType , ImageBorder < T > border ) { } }
if ( border == null ) { border = FactoryImageBorder . single ( imageType , BorderType . EXTENDED ) ; } if ( GeneralizedImageOps . isFloatingPoint ( imageType ) ) { ImageConvolveSparse < GrayF32 , Kernel2D_F32 > r = FactoryConvolveSparse . convolve2D ( GrayF32 . class , DerivativeLaplacian . kernel_F32 ) ; r . setImageBorder ( ( ImageBorder_F32 ) border ) ; return ( ImageFunctionSparse < T > ) r ; } else { ImageConvolveSparse r = FactoryConvolveSparse . convolve2D ( GrayI . class , DerivativeLaplacian . kernel_I32 ) ; r . setImageBorder ( border ) ; return ( ImageFunctionSparse < T > ) r ; }
public class CastorMarshaller { /** * Convert the given { @ code XMLException } to an appropriate exception from the * { @ code org . springframework . oxm } hierarchy . * < p > A boolean flag is used to indicate whether this exception occurs during marshalling or * unmarshalling , since Castor itself does not make this distinction in its exception hierarchy . * @ param ex Castor { @ code XMLException } that occurred * @ param marshalling indicates whether the exception occurs during marshalling ( { @ code true } ) , * or unmarshalling ( { @ code false } ) * @ return the corresponding { @ code XmlMappingException } */ protected XmlMappingException convertCastorException ( XMLException ex , boolean marshalling ) { } }
if ( ex instanceof ValidationException ) { return new ValidationFailureException ( "Castor validation exception" , ex ) ; } else if ( ex instanceof MarshalException ) { if ( marshalling ) { return new MarshallingFailureException ( "Castor marshalling exception" , ex ) ; } else { return new UnmarshallingFailureException ( "Castor unmarshalling exception" , ex ) ; } } else { // fallback return new UncategorizedMappingException ( "Unknown Castor exception" , ex ) ; }
public class EventDistributor { /** * with this method you can register EventPublisher add a Source of Events to the System . * This method represents a higher level of abstraction ! Use the EventManager to fire Events ! * This method is intended for use cases where you have an entire new source of events ( e . g . network ) * @ param identification the Identification of the Source * @ return An Optional Object which may or may not contains an EventPublisher * @ throws IllegalIDException not yet implemented */ public Optional < EventCallable > registerEventPublisher ( Identification identification ) throws IllegalIDException { } }
if ( registered . containsKey ( identification ) ) return Optional . empty ( ) ; EventPublisher eventPublisher = new EventPublisher ( events ) ; registered . put ( identification , eventPublisher ) ; return Optional . of ( eventPublisher ) ;
public class DiffBuilder { /** * Test if two { @ code Objects } s are equal . * @ param fieldName * the field name * @ param lhs * the left hand { @ code Object } * @ param rhs * the right hand { @ code Object } * @ return this * @ throws IllegalArgumentException * if field name is { @ code null } */ public DiffBuilder append ( final String fieldName , final Object lhs , final Object rhs ) { } }
validateFieldNameNotNull ( fieldName ) ; if ( objectsTriviallyEqual ) { return this ; } if ( lhs == rhs ) { return this ; } Object objectToTest ; if ( lhs != null ) { objectToTest = lhs ; } else { // rhs cannot be null , as lhs ! = rhs objectToTest = rhs ; } if ( objectToTest . getClass ( ) . isArray ( ) ) { if ( objectToTest instanceof boolean [ ] ) { return append ( fieldName , ( boolean [ ] ) lhs , ( boolean [ ] ) rhs ) ; } if ( objectToTest instanceof byte [ ] ) { return append ( fieldName , ( byte [ ] ) lhs , ( byte [ ] ) rhs ) ; } if ( objectToTest instanceof char [ ] ) { return append ( fieldName , ( char [ ] ) lhs , ( char [ ] ) rhs ) ; } if ( objectToTest instanceof double [ ] ) { return append ( fieldName , ( double [ ] ) lhs , ( double [ ] ) rhs ) ; } if ( objectToTest instanceof float [ ] ) { return append ( fieldName , ( float [ ] ) lhs , ( float [ ] ) rhs ) ; } if ( objectToTest instanceof int [ ] ) { return append ( fieldName , ( int [ ] ) lhs , ( int [ ] ) rhs ) ; } if ( objectToTest instanceof long [ ] ) { return append ( fieldName , ( long [ ] ) lhs , ( long [ ] ) rhs ) ; } if ( objectToTest instanceof short [ ] ) { return append ( fieldName , ( short [ ] ) lhs , ( short [ ] ) rhs ) ; } return append ( fieldName , ( Object [ ] ) lhs , ( Object [ ] ) rhs ) ; } // Not array type if ( lhs != null && lhs . equals ( rhs ) ) { return this ; } diffs . add ( new Diff < Object > ( fieldName ) { private static final long serialVersionUID = 1L ; @ Override public Object getLeft ( ) { return lhs ; } @ Override public Object getRight ( ) { return rhs ; } } ) ; return this ;
public class BugResolution { /** * If getApplicabilityVisitor ( ) is overwritten , this checks * to see if this resolution applies to the code at the given marker . * @ param marker * @ return true if this resolution should be visible to the user at the given marker */ public boolean isApplicable ( IMarker marker ) { } }
ASTVisitor prescanVisitor = getApplicabilityVisitor ( ) ; if ( prescanVisitor instanceof ApplicabilityVisitor ) { // this has an implicit null check return findApplicability ( prescanVisitor , marker ) ; } return true ;
public class UserAttrs { /** * Returns user - defined - attribute * @ param path * @ param attribute user : attribute name . user : can be omitted . * @ param options * @ return * @ throws IOException */ public static final String getStringAttribute ( Path path , String attribute , LinkOption ... options ) throws IOException { } }
attribute = attribute . startsWith ( "user:" ) ? attribute : "user:" + attribute ; byte [ ] attr = ( byte [ ] ) Files . getAttribute ( path , attribute , options ) ; if ( attr == null ) { return null ; } return new String ( attr , UTF_8 ) ;
public class BackupLongTermRetentionPoliciesInner { /** * Creates or updates a database backup long term retention policy . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param databaseName The name of the database * @ param parameters The required parameters to update a backup long term retention policy * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < BackupLongTermRetentionPolicyInner > createOrUpdateAsync ( String resourceGroupName , String serverName , String databaseName , BackupLongTermRetentionPolicyInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , databaseName , parameters ) . map ( new Func1 < ServiceResponse < BackupLongTermRetentionPolicyInner > , BackupLongTermRetentionPolicyInner > ( ) { @ Override public BackupLongTermRetentionPolicyInner call ( ServiceResponse < BackupLongTermRetentionPolicyInner > response ) { return response . body ( ) ; } } ) ;
public class ItemImpl { /** * Checking if this item has valid item state , i . e . wasn ' t removed ( and * saved ) . * @ return true or throws an InvalidItemStateException exception otherwise * @ throws InvalidItemStateException */ protected boolean checkValid ( ) throws InvalidItemStateException { } }
try { session . checkLive ( ) ; } catch ( RepositoryException e ) { throw new InvalidItemStateException ( "This kind of operation is forbidden after a session.logout()." , e ) ; } if ( data == null ) { throw new InvalidItemStateException ( "Invalid item state. Item was removed or discarded." ) ; } session . updateLastAccessTime ( ) ; return true ;
public class InboundTransferTask { /** * Cancels a set of segments and marks them as finished . * If all segments are cancelled then the whole task is cancelled , as if { @ linkplain # cancel ( ) } was called . * @ param cancelledSegments the segments to be cancelled */ public void cancelSegments ( IntSet cancelledSegments ) { } }
if ( isCancelled ) { throw new IllegalArgumentException ( "The task is already cancelled." ) ; } if ( trace ) { log . tracef ( "Partially cancelling inbound state transfer from node %s, segments %s" , source , cancelledSegments ) ; } synchronized ( segments ) { // healthy paranoia if ( ! segments . containsAll ( cancelledSegments ) ) { throw new IllegalArgumentException ( "Some of the specified segments cannot be cancelled because they were not previously requested" ) ; } unfinishedSegments . removeAll ( cancelledSegments ) ; if ( unfinishedSegments . isEmpty ( ) ) { isCancelled = true ; } } sendCancelCommand ( cancelledSegments ) ; if ( isCancelled ) { notifyCompletion ( false ) ; }
public class ContextManager { /** * Set the context pool configuration . < p / > * The context pool parameters are not required when < code > enableContextPool = = false < / code > . If < code > enableContextPool = = true < / code > and any of the context pool parameters * are null , that parameter will be set to the default value . * @ param enableContextPool Whether the context pool is enabled . * @ param initPoolSize The initial context pool size . * @ param prefPoolSize The preferred context pool size . Not required when < code > enableContextPool = = false < / code > . * @ param maxPoolSize The maximum context pool size . A size of ' 0 ' means the maximum size is unlimited . Not required when < code > enableContextPool = = false < / code > . * @ param poolTimeOut The context pool timeout in milliseconds . This is the amount of time a context is valid for in * the context pool is valid for until it is discarded . Not required when < code > enableContextPool = = false < / code > . * @ param poolWaitTime The context pool wait time in milliseconds . This is the amount of time to wait when getDirContext ( ) is called * and no context is available from the pool before checking again . Not required when < code > enableContextPool = = false < / code > . * @ throws InvalidInitPropertyException If < code > initPoolSize > maxPoolSize < / code > or < code > prefPoolSize > maxPoolSize < / code > when < code > maxPoolSize ! = 0 < / code > . */ public void setContextPool ( boolean enableContextPool , Integer initPoolSize , Integer prefPoolSize , Integer maxPoolSize , Long poolTimeOut , Long poolWaitTime ) throws InvalidInitPropertyException { } }
final String METHODNAME = "setContextPool" ; this . iContextPoolEnabled = enableContextPool ; if ( iContextPoolEnabled ) { this . iInitPoolSize = initPoolSize == null ? DEFAULT_INIT_POOL_SIZE : initPoolSize ; this . iMaxPoolSize = maxPoolSize == null ? DEFAULT_MAX_POOL_SIZE : maxPoolSize ; this . iPrefPoolSize = prefPoolSize == null ? DEFAULT_PREF_POOL_SIZE : prefPoolSize ; this . iPoolTimeOut = poolTimeOut == null ? DEFAULT_POOL_TIME_OUT : poolTimeOut ; this . iPoolWaitTime = poolWaitTime == null ? DEFAULT_POOL_WAIT_TIME : poolWaitTime ; if ( iMaxPoolSize != 0 && iMaxPoolSize < iInitPoolSize ) { String msg = Tr . formatMessage ( tc , WIMMessageKey . INIT_POOL_SIZE_TOO_BIG , WIMMessageHelper . generateMsgParms ( Integer . valueOf ( iInitPoolSize ) , Integer . valueOf ( iMaxPoolSize ) ) ) ; throw new InvalidInitPropertyException ( WIMMessageKey . INIT_POOL_SIZE_TOO_BIG , msg ) ; } if ( iMaxPoolSize != 0 && iPrefPoolSize != 0 && iMaxPoolSize < iPrefPoolSize ) { String msg = Tr . formatMessage ( tc , WIMMessageKey . PREF_POOL_SIZE_TOO_BIG , WIMMessageHelper . generateMsgParms ( Integer . valueOf ( iInitPoolSize ) , Integer . valueOf ( iMaxPoolSize ) ) ) ; throw new InvalidInitPropertyException ( WIMMessageKey . PREF_POOL_SIZE_TOO_BIG , msg ) ; } } else { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , METHODNAME + " Context Pool is disabled." ) ; } }
public class ConcurrentLinkedList { /** * Removes and returns the item at the head of the queue . Concurrent calls to * this method and offerAndGetNode ( ) do not mutually block , however , concurrent * calls to this method are serialized . */ public E poll ( ) { } }
if ( mSize . get ( ) == 0 ) return null ; mPollLock . lock ( ) ; try { return removeHead ( ) ; } finally { mPollLock . unlock ( ) ; }
public class VertxCompletableFuture { /** * Returns a new CompletableFuture that is completed when all of the given CompletableFutures complete . If any of * the given CompletableFutures complete exceptionally , then the returned CompletableFuture also does so , with a * CompletionException holding this exception as its cause . Otherwise , the results , if any , of the given * CompletableFutures are not reflected in the returned CompletableFuture , but may be obtained by inspecting them * individually . If no CompletableFutures are provided , returns a CompletableFuture completed with the value * { @ code null } . * < p > Among the applications of this method is to await completion * of a set of independent CompletableFutures before continuing a * program , as in : { @ code CompletableFuture . allOf ( c1 , c2 , c3 ) . join ( ) ; } . * Unlike the original { @ link CompletableFuture # allOf ( CompletableFuture [ ] ) } this method invokes the dependent * stages into the Vert . x context . * @ param vertx the Vert . x instance to retrieve the context * @ param futures the CompletableFutures * @ return a new CompletableFuture that is completed when all of the given CompletableFutures complete * @ throws NullPointerException if the array or any of its elements are { @ code null } */ public static VertxCompletableFuture < Void > allOf ( Vertx vertx , CompletableFuture < ? > ... futures ) { } }
CompletableFuture < Void > all = CompletableFuture . allOf ( futures ) ; return VertxCompletableFuture . from ( vertx , all ) ;
public class SystemPropertiesUtil { /** * 合并系统变量 ( - D ) , 环境变量 和默认值 , 以系统变量优先 */ public static Boolean getBoolean ( String propertyName , String envName , Boolean defaultValue ) { } }
checkEnvName ( envName ) ; Boolean propertyValue = BooleanUtil . toBooleanObject ( System . getProperty ( propertyName ) , null ) ; if ( propertyValue != null ) { return propertyValue ; } else { propertyValue = BooleanUtil . toBooleanObject ( System . getenv ( envName ) , null ) ; return propertyValue != null ? propertyValue : defaultValue ; }
public class InMemoryCookieStore { /** * Get all URIs , which are associated with at least one cookie * of this cookie store . */ public List < URI > getURIs ( ) { } }
List < URI > uris = new ArrayList < URI > ( ) ; lock . lock ( ) ; try { List < URI > result = new ArrayList < URI > ( uriIndex . keySet ( ) ) ; result . remove ( null ) ; return Collections . unmodifiableList ( result ) ; } finally { uris . addAll ( uriIndex . keySet ( ) ) ; lock . unlock ( ) ; }
public class Saml10ObjectBuilder { /** * New attribute statement . * @ param subject the subject * @ param attributes the attributes * @ param attributeNamespace the attribute namespace * @ return the attribute statement */ public AttributeStatement newAttributeStatement ( final Subject subject , final Map < String , Object > attributes , final String attributeNamespace ) { } }
val attrStatement = newSamlObject ( AttributeStatement . class ) ; attrStatement . setSubject ( subject ) ; for ( val e : attributes . entrySet ( ) ) { if ( e . getValue ( ) instanceof Collection < ? > && ( ( Collection < ? > ) e . getValue ( ) ) . isEmpty ( ) ) { LOGGER . info ( "Skipping attribute [{}] because it does not have any values." , e . getKey ( ) ) ; continue ; } val attribute = newSamlObject ( Attribute . class ) ; attribute . setAttributeName ( e . getKey ( ) ) ; if ( StringUtils . isNotBlank ( attributeNamespace ) ) { attribute . setAttributeNamespace ( attributeNamespace ) ; } addAttributeValuesToSaml1Attribute ( e . getKey ( ) , e . getValue ( ) , attribute . getAttributeValues ( ) ) ; attrStatement . getAttributes ( ) . add ( attribute ) ; } return attrStatement ;
public class LocalSession { /** * / * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . common . session . AbstractSession # onSessionClose ( ) */ @ Override protected void onSessionClose ( ) { } }
// Rollback updates try { if ( hasPendingUpdates ( ) ) rollbackUpdates ( true , true , null ) ; } catch ( JMSException e ) { ErrorTools . log ( e , log ) ; } super . onSessionClose ( ) ;
public class Monetary { /** * Returns all factory instances that match the query . * @ param query the factory query , not null . * @ return the instances found , never null . */ public static Collection < MonetaryAmountFactory < ? > > getAmountFactories ( MonetaryAmountFactoryQuery query ) { } }
return Optional . ofNullable ( monetaryAmountsSingletonQuerySpi ( ) ) . orElseThrow ( ( ) -> new MonetaryException ( "No MonetaryAmountsSingletonQuerySpi loaded, query functionality is not available." ) ) . getAmountFactories ( query ) ;
public class A_CmsListDialog { /** * Returns the current selected item . < p > * @ return the current selected item */ public CmsListItem getSelectedItem ( ) { } }
try { return getList ( ) . getItem ( CmsStringUtil . splitAsArray ( getParamSelItems ( ) , CmsHtmlList . ITEM_SEPARATOR ) [ 0 ] . trim ( ) ) ; } catch ( Exception e ) { try { return getList ( ) . getItem ( "" ) ; } catch ( Exception e1 ) { return null ; } }
public class SSLChannelProvider { /** * Required service : this is not dynamic , and so is called after deactivate * @ param ref reference to the service */ protected void unsetSslSupport ( SSLSupport service ) { } }
sslSupport = null ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "unsetSslSupport" , service ) ; }
public class ObjectFactory { /** * Create an instance of { @ link Project . Calendars . Calendar . WeekDays . WeekDay . WorkingTimes . WorkingTime } */ public Project . Calendars . Calendar . WeekDays . WeekDay . WorkingTimes . WorkingTime createProjectCalendarsCalendarWeekDaysWeekDayWorkingTimesWorkingTime ( ) { } }
return new Project . Calendars . Calendar . WeekDays . WeekDay . WorkingTimes . WorkingTime ( ) ;
public class CollidableUpdater { /** * Check if the collidable entered in collision with another one . * @ param origin The origin used . * @ param provider The provider owner . * @ param transformable The transformable owner . * @ param other The collidable reference . * @ param accepted The accepted groups . * @ return The collisions found if collide . */ public List < Collision > collide ( Origin origin , FeatureProvider provider , Transformable transformable , Collidable other , Collection < Integer > accepted ) { } }
final List < Collision > collisions = new ArrayList < > ( ) ; if ( enabled && other . isEnabled ( ) && accepted . contains ( other . getGroup ( ) ) ) { final int size = cacheColls . size ( ) ; for ( int i = 0 ; i < size ; i ++ ) { final Collision collision = collide ( origin , provider , transformable , other , cacheColls . get ( i ) , cacheRect . get ( i ) ) ; if ( collision != null ) { collisions . add ( collision ) ; } } } return collisions ;
public class DeleteCommand { /** * Webdav Delete method implementation . * @ param session current session * @ param path file path * @ param lockTokenHeader lock tokens * @ return the instance of javax . ws . rs . core . Response */ public Response delete ( Session session , String path , String lockTokenHeader ) { } }
try { if ( lockTokenHeader == null ) { lockTokenHeader = "" ; } Item item = session . getItem ( path ) ; if ( item . isNode ( ) ) { Node node = ( Node ) item ; if ( node . isLocked ( ) ) { String nodeLockToken = node . getLock ( ) . getLockToken ( ) ; if ( ( nodeLockToken == null ) || ( ! nodeLockToken . equals ( lockTokenHeader ) ) ) { return Response . status ( HTTPStatus . LOCKED ) . entity ( "The " + path + " item is locked. " ) . type ( MediaType . TEXT_PLAIN ) . build ( ) ; } } } item . remove ( ) ; session . save ( ) ; return Response . status ( HTTPStatus . NO_CONTENT ) . build ( ) ; } catch ( PathNotFoundException exc ) { return Response . status ( HTTPStatus . NOT_FOUND ) . entity ( exc . getMessage ( ) ) . build ( ) ; } catch ( RepositoryException exc ) { return Response . status ( HTTPStatus . FORBIDDEN ) . entity ( exc . getMessage ( ) ) . build ( ) ; }
public class ImageSizeImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . IMAGE_SIZE__UNITBASE : setUNITBASE ( UNITBASE_EDEFAULT ) ; return ; case AfplibPackage . IMAGE_SIZE__HRESOL : setHRESOL ( HRESOL_EDEFAULT ) ; return ; case AfplibPackage . IMAGE_SIZE__VRESOL : setVRESOL ( VRESOL_EDEFAULT ) ; return ; case AfplibPackage . IMAGE_SIZE__HSIZE : setHSIZE ( HSIZE_EDEFAULT ) ; return ; case AfplibPackage . IMAGE_SIZE__VSIZE : setVSIZE ( VSIZE_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class BytesMessageImpl { /** * ( non - Javadoc ) * @ see javax . jms . BytesMessage # readBoolean ( ) */ @ Override public boolean readBoolean ( ) throws JMSException { } }
backupState ( ) ; try { return getInput ( ) . readBoolean ( ) ; } catch ( EOFException e ) { restoreState ( ) ; throw new MessageEOFException ( "End of body reached" ) ; } catch ( IOException e ) { restoreState ( ) ; throw new FFMQException ( "Cannot read message body" , "IO_ERROR" , e ) ; } catch ( RuntimeException e ) { restoreState ( ) ; throw e ; }
public class HijriCalendar { /** * / * [ deutsch ] * < p > Erzeugt ein neues Hijri - Kalenderdatum in der angegebenen Variante . < / p > * @ param variantSource source of calendar variant * @ param hyear islamic year * @ param hmonth islamic month * @ param hdom islamic day of month * @ return new instance of { @ code HijriCalendar } * @ throws ChronoException if given variant is not supported * @ throws IllegalArgumentException in case of any inconsistencies * @ since 3.6/4.4 */ public static HijriCalendar of ( VariantSource variantSource , int hyear , HijriMonth hmonth , int hdom ) { } }
return HijriCalendar . of ( variantSource . getVariant ( ) , hyear , hmonth . getValue ( ) , hdom ) ;
public class Joiner { /** * Adds the contents of the given { @ code StringJoiner } without prefix and * suffix as the next element if it is non - empty . If the given { @ code * StringJoiner } is empty , the call has no effect . * < p > A { @ code StringJoiner } is empty if { @ link # addAll ( CharSequence ) add ( ) } * has never been called , and if { @ code merge ( ) } has never been called * with a non - empty { @ code StringJoiner } argument . * < p > If the other { @ code StringJoiner } is using a different delimiter , * then elements from the other { @ code StringJoiner } are concatenated with * that delimiter and the result is appended to this { @ code StringJoiner } * as a single element . * @ param other The { @ code StringJoiner } whose contents should be merged * into this one * @ throws NullPointerException if the other { @ code StringJoiner } is null * @ return This { @ code StringJoiner } */ public Joiner merge ( Joiner other ) { } }
N . checkArgNotNull ( other ) ; if ( other . buffer != null ) { final int length = other . buffer . length ( ) ; // lock the length so that we can seize the data to be appended // before initiate copying to avoid interference , especially when // merge ' this ' StringBuilder builder = prepareBuilder ( ) ; builder . append ( other . buffer , other . prefix . length ( ) , length ) ; } return this ;
public class AbstractCompressionCodec { /** * TODO : make protected on a minor release */ byte [ ] writeAndClose ( byte [ ] payload , StreamWrapper wrapper ) throws IOException { } }
ByteArrayOutputStream outputStream = new ByteArrayOutputStream ( 512 ) ; OutputStream compressionStream = wrapper . wrap ( outputStream ) ; try { compressionStream . write ( payload ) ; compressionStream . flush ( ) ; } finally { Objects . nullSafeClose ( compressionStream ) ; } return outputStream . toByteArray ( ) ;
public class LocPathIterator { /** * Return the first node out of the nodeset , if this expression is * a nodeset expression . This is the default implementation for * nodesets . Derived classes should try and override this and return a * value without having to do a clone operation . * @ param xctxt The XPath runtime context . * @ return the first node out of the nodeset , or DTM . NULL . */ public int asNode ( XPathContext xctxt ) throws javax . xml . transform . TransformerException { } }
DTMIterator iter = ( DTMIterator ) m_clones . getInstance ( ) ; int current = xctxt . getCurrentNode ( ) ; iter . setRoot ( current , xctxt ) ; int next = iter . nextNode ( ) ; // m _ clones . freeInstance ( iter ) ; iter . detach ( ) ; return next ;
public class ParticleEditor { /** * Import an emitter XML file */ public void importEmitter ( ) { } }
chooser . setDialogTitle ( "Open" ) ; int resp = chooser . showOpenDialog ( this ) ; if ( resp == JFileChooser . APPROVE_OPTION ) { File file = chooser . getSelectedFile ( ) ; File path = file . getParentFile ( ) ; try { final ConfigurableEmitter emitter = ParticleIO . loadEmitter ( file ) ; if ( emitter . getImageName ( ) != null ) { File possible = new File ( path , emitter . getImageName ( ) ) ; if ( possible . exists ( ) ) { emitter . setImageName ( possible . getAbsolutePath ( ) ) ; } else { chooser . setDialogTitle ( "Locate the image: " + emitter . getImageName ( ) ) ; resp = chooser . showOpenDialog ( this ) ; FileFilter filter = new FileFilter ( ) { public boolean accept ( File f ) { if ( f . isDirectory ( ) ) { return true ; } return ( f . getName ( ) . equals ( emitter . getImageName ( ) ) ) ; } public String getDescription ( ) { return emitter . getImageName ( ) ; } } ; chooser . addChoosableFileFilter ( filter ) ; if ( resp == JFileChooser . APPROVE_OPTION ) { File image = chooser . getSelectedFile ( ) ; emitter . setImageName ( image . getAbsolutePath ( ) ) ; path = image . getParentFile ( ) ; } chooser . resetChoosableFileFilters ( ) ; chooser . addChoosableFileFilter ( xmlFileFilter ) ; } } addEmitter ( emitter ) ; emitters . setSelected ( emitter ) ; } catch ( IOException e ) { Log . error ( e ) ; JOptionPane . showMessageDialog ( this , e . getMessage ( ) ) ; } }
public class PollingMultiFileWatcher { /** * Stops polling the files for changes . Should be called during server shutdown or when this watcher is no * longer needed to make sure the background thread is stopped . */ @ Override public void shutdown ( ) { } }
if ( isStarted ( ) ) { future . cancel ( true ) ; executorService . shutdown ( ) ; future = null ; executorService = null ; watchedFiles = ImmutableSet . of ( ) ; callback = null ; metadataCacheRef . set ( ImmutableMap . of ( ) ) ; stats . clear ( ) ; }
public class PolicyAssignmentsInner { /** * Deletes a policy assignment by ID . * When providing a scope for the assigment , use ' / subscriptions / { subscription - id } / ' for subscriptions , ' / subscriptions / { subscription - id } / resourceGroups / { resource - group - name } ' for resource groups , and ' / subscriptions / { subscription - id } / resourceGroups / { resource - group - name } / providers / { resource - provider - namespace } / { resource - type } / { resource - name } ' for resources . * @ param policyAssignmentId The ID of the policy assignment to delete . Use the format ' / { scope } / providers / Microsoft . Authorization / policyAssignments / { policy - assignment - name } ' . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PolicyAssignmentInner object */ public Observable < PolicyAssignmentInner > deleteByIdAsync ( String policyAssignmentId ) { } }
return deleteByIdWithServiceResponseAsync ( policyAssignmentId ) . map ( new Func1 < ServiceResponse < PolicyAssignmentInner > , PolicyAssignmentInner > ( ) { @ Override public PolicyAssignmentInner call ( ServiceResponse < PolicyAssignmentInner > response ) { return response . body ( ) ; } } ) ;
public class Serializables { /** * Utility for returning a Serializable object from a byte array . */ public static < T extends Serializable > T deserialize ( byte [ ] bytes ) throws IOException , ClassNotFoundException { } }
return deserialize ( bytes , false ) ;
public class SurveyorUncaughtExceptionHandler { /** * Given a stack trace , turn it into a HTML formatted string - to improve its display * @ param stackTrace - stack trace to convert to string * @ return String with stack trace formatted with HTML line breaks */ private String printStackTrace ( Object [ ] stackTrace ) { } }
StringBuilder output = new StringBuilder ( ) ; for ( Object line : stackTrace ) { output . append ( line ) ; output . append ( newline ) ; } return output . toString ( ) ;
public class LogGammaDistribution { /** * LogGamma distribution PDF ( with 0.0 for x & lt ; 0) * @ param x query value * @ param k Alpha * @ param theta Theta = 1 / Beta * @ return probability density */ public static double pdf ( double x , double k , double theta , double shift ) { } }
x = ( x - shift ) ; return x <= 0. ? 0. : FastMath . pow ( theta , k ) / GammaDistribution . gamma ( k ) * FastMath . pow ( 1 + x , - ( theta + 1. ) ) * FastMath . pow ( FastMath . log1p ( x ) , k - 1. ) ;
public class ModelsImpl { /** * Deletes a hierarchical entity extractor child from the application . * @ param appId The application ID . * @ param versionId The version ID . * @ param hEntityId The hierarchical entity extractor ID . * @ param hChildId The hierarchical entity extractor child ID . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the OperationStatus object if successful . */ public OperationStatus deleteHierarchicalEntityChild ( UUID appId , String versionId , UUID hEntityId , UUID hChildId ) { } }
return deleteHierarchicalEntityChildWithServiceResponseAsync ( appId , versionId , hEntityId , hChildId ) . toBlocking ( ) . single ( ) . body ( ) ;
public class DefaultActionBuilder { /** * 根据class对应的profile获得ctl / action类中除去后缀后的名字 。 < br > * 如果对应profile中是uriStyle , 那么类中只保留简单类名 , 去掉后缀 , 并且小写第一个字母 。 < br > * 否则加上包名 , 其中的 . 编成URI路径分割符 。 包名不做其他处理 。 < br > * 复杂URL , 以 / 开始 * @ param className */ public Action build ( Class < ? > clazz ) { } }
Action action = new Action ( ) ; String className = clazz . getName ( ) ; Profile profile = profileService . getProfile ( className ) ; org . beangle . struts2 . annotation . Action an = clazz . getAnnotation ( org . beangle . struts2 . annotation . Action . class ) ; StringBuilder sb = new StringBuilder ( ) ; // namespace sb . append ( profile . getUriPath ( ) ) ; if ( null != an ) { String name = an . value ( ) ; if ( ! name . startsWith ( "/" ) ) { if ( Constants . SEO_URI . equals ( profile . getUriPathStyle ( ) ) ) { sb . append ( unCamel ( substringBeforeLast ( profile . getInfix ( className ) , "/" ) ) + "/" + name ) ; } else { sb . append ( name ) ; } } else { sb . append ( name . substring ( 1 ) ) ; } } else { if ( Constants . SHORT_URI . equals ( profile . getUriPathStyle ( ) ) ) { String simpleName = className . substring ( className . lastIndexOf ( '.' ) + 1 ) ; sb . append ( uncapitalize ( simpleName . substring ( 0 , simpleName . length ( ) - profile . getActionSuffix ( ) . length ( ) ) ) ) ; } else if ( Constants . SIMPLE_URI . equals ( profile . getUriPathStyle ( ) ) ) { sb . append ( profile . getInfix ( className ) ) ; } else if ( Constants . SEO_URI . equals ( profile . getUriPathStyle ( ) ) ) { sb . append ( unCamel ( profile . getInfix ( className ) ) ) ; } else { throw new RuntimeException ( "unsupported uri style " + profile . getUriPathStyle ( ) ) ; } } action . path ( sb . toString ( ) ) ; action . method ( profile . getDefaultMethod ( ) ) . extention ( profile . getUriExtension ( ) ) ; return action ;
public class IpCamDevice { /** * This method will send HTTP HEAD request to the camera URL to check whether it ' s online or * offline . It ' s online when this request succeed and it ' s offline if any exception occurs or * response code is 404 Not Found . * @ return True if camera is online , false otherwise */ public boolean isOnline ( ) { } }
LOG . debug ( "Checking online status for {} at {}" , getName ( ) , getURL ( ) ) ; try { return client . execute ( new HttpHead ( toURI ( getURL ( ) ) ) ) . getStatusLine ( ) . getStatusCode ( ) != 404 ; } catch ( Exception e ) { return false ; }
public class Snappy { /** * Uncompress the input [ offset , offset + length ) as a String of the given * encoding * @ param input * @ param offset * @ param length * @ param encoding * @ return the uncompressed data * @ throws IOException */ public static String uncompressString ( byte [ ] input , int offset , int length , String encoding ) throws IOException , UnsupportedEncodingException { } }
byte [ ] uncompressed = new byte [ uncompressedLength ( input , offset , length ) ] ; uncompress ( input , offset , length , uncompressed , 0 ) ; return new String ( uncompressed , encoding ) ;