signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class CmsSolrSpellchecker { /** * Returns the body of the request . This method is used to read posted JSON data .
* @ param request The request .
* @ return String representation of the request ' s body .
* @ throws IOException in case reading the request fails */
private String getRequestBody ( ServletRequest request ) throws IOException { } } | final StringBuilder sb = new StringBuilder ( ) ; String line = request . getReader ( ) . readLine ( ) ; while ( null != line ) { sb . append ( line ) ; line = request . getReader ( ) . readLine ( ) ; } return sb . toString ( ) ; |
public class CoverageUtilities { /** * Creates a { @ link WritableRandomIter } .
* < p > It is important to use this method since it supports also
* large GRASS rasters .
* < p > If the size would throw an integer overflow , a { @ link GrassLegacyRandomIter }
* will be proposed to try to save the saveable .
* @ param raster the coverage on which to wrap a { @ link WritableRandomIter } .
* @ return the iterator . */
public static WritableRandomIter getWritableRandomIterator ( WritableRaster raster ) { } } | if ( raster instanceof GrassLegacyWritableRaster ) { GrassLegacyWritableRaster wRaster = ( GrassLegacyWritableRaster ) raster ; double [ ] [ ] data = wRaster . getData ( ) ; getWritableRandomIterator ( data [ 0 ] . length , data . length ) ; } WritableRandomIter iter = RandomIterFactory . createWritable ( raster , null ) ; return iter ; |
public class CmsIndexingThreadManager { /** * Returns if the indexing manager still have indexing threads . < p >
* @ return true if the indexing manager still have indexing threads */
public boolean isRunning ( ) { } } | if ( m_lastLogErrorTime <= 0 ) { m_lastLogErrorTime = System . currentTimeMillis ( ) ; m_lastLogWarnTime = m_lastLogErrorTime ; } else { long currentTime = System . currentTimeMillis ( ) ; if ( ( currentTime - m_lastLogWarnTime ) > 30000 ) { // write warning to log after 30 seconds
if ( LOG . isWarnEnabled ( ) ) { LOG . warn ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_WAITING_ABANDONED_THREADS_2 , new Integer ( m_abandonedCounter ) , new Integer ( ( m_startedCounter - m_returnedCounter ) ) ) ) ; } m_lastLogWarnTime = currentTime ; } if ( ( currentTime - m_lastLogErrorTime ) > 600000 ) { // write error to log after 10 minutes
LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_WAITING_ABANDONED_THREADS_2 , new Integer ( m_abandonedCounter ) , new Integer ( ( m_startedCounter - m_returnedCounter ) ) ) ) ; m_lastLogErrorTime = currentTime ; } } boolean result = ( m_returnedCounter + m_abandonedCounter ) < m_startedCounter ; if ( result && LOG . isInfoEnabled ( ) ) { // write a note to the log that all threads have finished
LOG . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_THREADS_FINISHED_0 ) ) ; } return result ; |
public class ClusterControllerClient { /** * Gets the resource representation for a cluster in a project .
* < p > Sample code :
* < pre > < code >
* try ( ClusterControllerClient clusterControllerClient = ClusterControllerClient . create ( ) ) {
* String projectId = " " ;
* String region = " " ;
* String clusterName = " " ;
* Cluster response = clusterControllerClient . getCluster ( projectId , region , clusterName ) ;
* < / code > < / pre >
* @ param projectId Required . The ID of the Google Cloud Platform project that the cluster belongs
* to .
* @ param region Required . The Cloud Dataproc region in which to handle the request .
* @ param clusterName Required . The cluster name .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final Cluster getCluster ( String projectId , String region , String clusterName ) { } } | GetClusterRequest request = GetClusterRequest . newBuilder ( ) . setProjectId ( projectId ) . setRegion ( region ) . setClusterName ( clusterName ) . build ( ) ; return getCluster ( request ) ; |
public class SelectQuery { /** * Find all the tables used in the query in FROM clause ( from columns , criterias and orders ) .
* @ return List of { @ link Table } s */
private List < Table > findAllUsedTablesInFrom ( ) { } } | List < Table > allTables = new ArrayList < Table > ( ) ; Map < Table , List < JoinCriteria > > sourceMap = new HashMap < Table , List < JoinCriteria > > ( ) ; Map < Table , List < JoinCriteria > > destMap = new HashMap < Table , List < JoinCriteria > > ( ) ; // see what tables are used in outer joins
for ( Object criteria : criterias ) { try { JoinCriteria joinCriteria = ( JoinCriteria ) criteria ; Table sourceTable = joinCriteria . getSource ( ) . getTable ( ) ; Table destTable = joinCriteria . getDestination ( ) . getTable ( ) ; List < JoinCriteria > sourceList = sourceMap . get ( sourceTable ) ; List < JoinCriteria > destList = sourceMap . get ( destTable ) ; if ( sourceList == null ) { sourceList = new ArrayList < JoinCriteria > ( ) ; sourceMap . put ( sourceTable , sourceList ) ; } sourceList . add ( joinCriteria ) ; if ( destList == null ) { destList = new ArrayList < JoinCriteria > ( ) ; destMap . put ( destTable , destList ) ; } destList . add ( joinCriteria ) ; } catch ( ClassCastException ex ) { // not a JoinCriteria
} } // add tables from column selection
for ( Column column : columns ) { Table table = column . getTable ( ) ; if ( table != null ) { table . setDialect ( dialect ) ; } if ( canAddTableToFromClause ( table , destMap , allTables ) ) { table . setJoins ( sourceMap . get ( table ) ) ; allTables . add ( table ) ; } } // add tables from criterias
// it is possible that a table is used only in joins without any column selection from it
for ( Object criteria : criterias ) { try { JoinCriteria joinCriteria = ( JoinCriteria ) criteria ; Table sourceTable = joinCriteria . getSource ( ) . getTable ( ) ; Table destTable = joinCriteria . getDestination ( ) . getTable ( ) ; if ( canAddTableToFromClause ( sourceTable , destMap , allTables ) ) { sourceTable . setJoins ( sourceMap . get ( sourceTable ) ) ; allTables . add ( sourceTable ) ; } if ( canAddTableToFromClause ( destTable , destMap , allTables ) ) { destTable . setJoins ( destMap . get ( destTable ) ) ; allTables . add ( destTable ) ; } } catch ( ClassCastException ex ) { // not a JoinCriteria
} } // add tables used by order columns
for ( Order order : orders ) { Table table = order . getColumn ( ) . getTable ( ) ; if ( table != null ) { table . setDialect ( dialect ) ; } if ( canAddTableToFromClause ( table , destMap , allTables ) ) { allTables . add ( table ) ; } } return allTables ; |
public class WeekDay { /** * Returns the corresponding < code > java . util . Calendar . DAY _ OF _ WEEK < / code >
* constant for the specified < code > WeekDay < / code > .
* @ param weekday a week day instance
* @ return the corresponding < code > java . util . Calendar < / code > day */
public static int getCalendarDay ( final WeekDay weekday ) { } } | int calendarDay = - 1 ; if ( SU . getDay ( ) . equals ( weekday . getDay ( ) ) ) { calendarDay = Calendar . SUNDAY ; } else if ( MO . getDay ( ) . equals ( weekday . getDay ( ) ) ) { calendarDay = Calendar . MONDAY ; } else if ( TU . getDay ( ) . equals ( weekday . getDay ( ) ) ) { calendarDay = Calendar . TUESDAY ; } else if ( WE . getDay ( ) . equals ( weekday . getDay ( ) ) ) { calendarDay = Calendar . WEDNESDAY ; } else if ( TH . getDay ( ) . equals ( weekday . getDay ( ) ) ) { calendarDay = Calendar . THURSDAY ; } else if ( FR . getDay ( ) . equals ( weekday . getDay ( ) ) ) { calendarDay = Calendar . FRIDAY ; } else if ( SA . getDay ( ) . equals ( weekday . getDay ( ) ) ) { calendarDay = Calendar . SATURDAY ; } return calendarDay ; |
public class OptionalDouble { /** * Performs negated filtering on inner value if it is present .
* @ param predicate a predicate function
* @ return this { @ code OptionalDouble } if the value is present and doesn ' t matches predicate ,
* otherwise an empty { @ code OptionalDouble }
* @ since 1.1.9 */
@ NotNull public OptionalDouble filterNot ( @ NotNull DoublePredicate predicate ) { } } | return filter ( DoublePredicate . Util . negate ( predicate ) ) ; |
public class RelationalReleaseCache { /** * Counts the total number of objects governed by this factory in the database .
* @ return the number of objects in the database
* @ throws PersistenceException an error occurred counting the elements in the database */
@ Override public long count ( ) throws PersistenceException { } } | logger . debug ( "enter - count()" ) ; try { Transaction xaction = Transaction . getInstance ( true ) ; Counter counter = getCounter ( null ) ; try { Map < String , Object > results ; long count ; results = xaction . execute ( counter , new HashMap < String , Object > ( 0 ) , readDataSource ) ; count = ( ( Number ) results . get ( "count" ) ) . longValue ( ) ; xaction . commit ( ) ; return count ; } finally { xaction . rollback ( ) ; } } finally { logger . debug ( "exit - count()" ) ; } |
public class RoleAssignmentsInner { /** * Creates a role assignment by ID .
* @ param roleId The ID of the role assignment to create .
* @ param parameters Parameters for the role assignment .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < RoleAssignmentInner > createByIdAsync ( String roleId , RoleAssignmentCreateParameters parameters , final ServiceCallback < RoleAssignmentInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( createByIdWithServiceResponseAsync ( roleId , parameters ) , serviceCallback ) ; |
public class QuartzSchedulerPlugin { /** * On initialization the triggers and related esjp are loaded from the
* eFaps Database .
* @ see org . quartz . spi . SchedulerPlugin # initialize ( java . lang . String , org . quartz . Scheduler )
* @ param _ name Name of the scheduler
* @ param _ scheduler scheduler
* @ param _ loadHelper The classLoadHelper the < code > SchedulerFactory < / code > is
* actually using
* @ throws SchedulerException on error */
@ Override public void initialize ( final String _name , final Scheduler _scheduler , final ClassLoadHelper _loadHelper ) throws SchedulerException { } } | try { final QueryBuilder queryBldr = new QueryBuilder ( CIAdminCommon . QuartzTriggerAbstract ) ; final MultiPrintQuery multi = queryBldr . getPrint ( ) ; multi . addAttribute ( CIAdminCommon . QuartzTriggerAbstract . Type , CIAdminCommon . QuartzTriggerAbstract . Name , CIAdminCommon . QuartzTriggerAbstract . Parameter1 , CIAdminCommon . QuartzTriggerAbstract . Parameter2 , CIAdminCommon . QuartzTriggerAbstract . Parameter3 ) ; final SelectBuilder sel = new SelectBuilder ( ) . linkto ( CIAdminCommon . QuartzTriggerAbstract . ESJPLink ) . file ( ) . label ( ) ; multi . addSelect ( sel ) ; multi . execute ( ) ; while ( multi . next ( ) ) { final Type type = multi . < Type > getAttribute ( CIAdminCommon . QuartzTriggerAbstract . Type ) ; final String name = multi . < String > getAttribute ( CIAdminCommon . QuartzTriggerAbstract . Name ) ; final Integer para1 = multi . < Integer > getAttribute ( CIAdminCommon . QuartzTriggerAbstract . Parameter1 ) ; final Integer para2 = multi . < Integer > getAttribute ( CIAdminCommon . QuartzTriggerAbstract . Parameter2 ) ; final Integer para3 = multi . < Integer > getAttribute ( CIAdminCommon . QuartzTriggerAbstract . Parameter3 ) ; final String esjp = multi . < String > getSelect ( sel ) ; Trigger trigger = null ; if ( type . isKindOf ( CIAdminCommon . QuartzTriggerSecondly . getType ( ) ) ) { trigger = TriggerBuilder . newTrigger ( ) . withIdentity ( name ) . withSchedule ( para2 > 0 ? SimpleScheduleBuilder . simpleSchedule ( ) . withIntervalInSeconds ( para1 ) . withRepeatCount ( para2 ) : SimpleScheduleBuilder . repeatSecondlyForever ( para1 ) ) . build ( ) ; } else if ( type . isKindOf ( CIAdminCommon . QuartzTriggerMinutely . getType ( ) ) ) { trigger = TriggerBuilder . newTrigger ( ) . withIdentity ( name ) . withSchedule ( para2 > 0 ? SimpleScheduleBuilder . simpleSchedule ( ) . withIntervalInMinutes ( para1 ) . withRepeatCount ( para2 ) : SimpleScheduleBuilder . repeatMinutelyForever ( para1 ) ) . startAt ( new DateTime ( ) . plusMinutes ( para3 ) . toDate ( ) ) . build ( ) ; } else if ( type . isKindOf ( CIAdminCommon . QuartzTriggerHourly . getType ( ) ) ) { trigger = TriggerBuilder . newTrigger ( ) . withIdentity ( name ) . withSchedule ( para2 > 0 ? SimpleScheduleBuilder . simpleSchedule ( ) . withIntervalInHours ( para1 ) . withRepeatCount ( para2 ) : SimpleScheduleBuilder . repeatHourlyForever ( para1 ) ) . build ( ) ; } else if ( type . isKindOf ( CIAdminCommon . QuartzTriggerDaily . getType ( ) ) ) { trigger = TriggerBuilder . newTrigger ( ) . withIdentity ( name ) . withSchedule ( CronScheduleBuilder . dailyAtHourAndMinute ( para1 , para2 ) ) . build ( ) ; } else if ( type . isKindOf ( CIAdminCommon . QuartzTriggerWeekly . getType ( ) ) ) { trigger = TriggerBuilder . newTrigger ( ) . withIdentity ( name ) . withSchedule ( CronScheduleBuilder . weeklyOnDayAndHourAndMinute ( para1 , para2 , para3 ) ) . build ( ) ; } else if ( type . isKindOf ( CIAdminCommon . QuartzTriggerMonthly . getType ( ) ) ) { trigger = TriggerBuilder . newTrigger ( ) . withIdentity ( name ) . withSchedule ( CronScheduleBuilder . monthlyOnDayAndHourAndMinute ( para1 , para2 , para3 ) ) . build ( ) ; } @ SuppressWarnings ( "unchecked" ) final Class < ? extends Job > clazz = ( Class < ? extends Job > ) Class . forName ( esjp , false , EFapsClassLoader . getInstance ( ) ) ; // class must be instantiated to force that related esjps are also loaded here
clazz . newInstance ( ) ; final JobDetail jobDetail = JobBuilder . newJob ( clazz ) . withIdentity ( name + "_" + esjp , Quartz . QUARTZGROUP ) . build ( ) ; if ( trigger != null ) { _scheduler . scheduleJob ( jobDetail , trigger ) ; } } } catch ( final ClassNotFoundException e ) { throw new SchedulerException ( e ) ; } catch ( final EFapsException e ) { throw new SchedulerException ( e ) ; } catch ( final InstantiationException e ) { throw new SchedulerException ( e ) ; } catch ( final IllegalAccessException e ) { throw new SchedulerException ( e ) ; } |
public class ISUPMessageFactoryImpl { /** * ( non - Javadoc )
* @ see org . restcomm . protocols . ss7 . isup . ISUPMessageFactory # createCCR ( int cic ) */
public ContinuityCheckRequestMessage createCCR ( int cic ) { } } | ContinuityCheckRequestMessage msg = createCCR ( ) ; CircuitIdentificationCode code = this . parameterFactory . createCircuitIdentificationCode ( ) ; code . setCIC ( cic ) ; msg . setCircuitIdentificationCode ( code ) ; return msg ; |
public class MediaFormat { /** * Try to guess a nice human readable ratio string from the given decimal ratio
* @ param ratio Ratio
* @ param numberFormat Number format
* @ return Ratio display string or null if no nice string was found */
private static String guessHumanReadableRatioString ( double ratio , NumberFormat numberFormat ) { } } | for ( long width = 1 ; width <= 50 ; width ++ ) { double height = width / ratio ; if ( isLong ( height ) ) { return numberFormat . format ( width ) + ":" + numberFormat . format ( height ) ; } } for ( long width = 1 ; width <= 200 ; width ++ ) { double height = width / 2d / ratio ; if ( isHalfLong ( height ) ) { return numberFormat . format ( width / 2d ) + ":" + numberFormat . format ( height ) ; } } return null ; |
public class ChromeDriver { /** * Execute a Chrome Devtools Protocol command and get returned result . The
* command and command args should follow
* < a href = " https : / / chromedevtools . github . io / devtools - protocol / " > chrome
* devtools protocol domains / commands < / a > . */
public Map < String , Object > executeCdpCommand ( String commandName , Map < String , Object > parameters ) { } } | Objects . requireNonNull ( commandName , "Command name must be set." ) ; Objects . requireNonNull ( parameters , "Parameters for command must be set." ) ; @ SuppressWarnings ( "unchecked" ) Map < String , Object > toReturn = ( Map < String , Object > ) getExecuteMethod ( ) . execute ( ChromeDriverCommand . EXECUTE_CDP_COMMAND , ImmutableMap . of ( "cmd" , commandName , "params" , parameters ) ) ; return ImmutableMap . copyOf ( toReturn ) ; |
public class ExceptionSoftening { /** * returns an array of catch types that the current pc is in
* @ param infos the list of catch infos for this method
* @ param pc the current pc
* @ return an set of catch exception types that the pc is currently in */
private static Set < String > findPossibleCatchSignatures ( List < CatchInfo > infos , int pc ) { } } | Set < String > catchTypes = new HashSet < > ( 6 ) ; ListIterator < CatchInfo > it = infos . listIterator ( infos . size ( ) ) ; while ( it . hasPrevious ( ) ) { CatchInfo ci = it . previous ( ) ; if ( ( pc >= ci . getStart ( ) ) && ( pc < ci . getFinish ( ) ) ) { catchTypes . add ( ci . getSignature ( ) ) ; } else { break ; } } return catchTypes ; |
public class DateTimeExtensions { /** * Converts the Calendar to a corresponding { @ link java . time . LocalTime } . If the Calendar has a different
* time zone than the system default , the LocalTime will be adjusted into the default time zone .
* @ param self a Calendar
* @ return a LocalTime
* @ since 2.5.0 */
public static LocalTime toLocalTime ( final Calendar self ) { } } | int hour = self . get ( Calendar . HOUR_OF_DAY ) ; int minute = self . get ( Calendar . MINUTE ) ; int second = self . get ( Calendar . SECOND ) ; int ns = self . get ( Calendar . MILLISECOND ) * 1_000_000 ; return LocalTime . of ( hour , minute , second , ns ) ; |
public class Default { public void handleOptions ( HttpServletRequest request , HttpServletResponse response ) throws IOException { } } | // Handle OPTIONS request for entire server
// 9.2
response . setIntHeader ( HttpFields . __ContentLength , 0 ) ; response . setHeader ( HttpFields . __Allow , _AllowString ) ; response . flushBuffer ( ) ; |
public class AbstractViewQuery { /** * Get Bitmap for current view
* @ return */
public Bitmap getViewBitmap ( ) { } } | if ( view != null ) return null ; Bitmap bitmap = Bitmap . createBitmap ( view . getWidth ( ) , view . getHeight ( ) , Bitmap . Config . ARGB_8888 ) ; Canvas canvas = new Canvas ( bitmap ) ; view . layout ( view . getLeft ( ) , view . getTop ( ) , view . getRight ( ) , view . getBottom ( ) ) ; view . draw ( canvas ) ; return bitmap ; |
public class S3Discovery { /** * Write the domain controller data to an S3 file .
* @ param data the domain controller data
* @ param domainName the name of the directory in the bucket to write the S3 file to
* @ throws IOException */
private void writeToFile ( List < DomainControllerData > data , String domainName ) throws IOException { } } | if ( domainName == null || data == null ) { return ; } if ( conn == null ) { init ( ) ; } try { String key = S3Util . sanitize ( domainName ) + "/" + S3Util . sanitize ( DC_FILE_NAME ) ; byte [ ] buf = S3Util . domainControllerDataToByteBuffer ( data ) ; S3Object val = new S3Object ( buf , null ) ; if ( usingPreSignedUrls ( ) ) { Map headers = new TreeMap ( ) ; headers . put ( "x-amz-acl" , Arrays . asList ( "public-read" ) ) ; conn . put ( pre_signed_put_url , val , headers ) . connection . getResponseMessage ( ) ; } else { Map headers = new TreeMap ( ) ; headers . put ( "Content-Type" , Arrays . asList ( "text/plain" ) ) ; conn . put ( location , key , val , headers ) . connection . getResponseMessage ( ) ; } } catch ( Exception e ) { throw HostControllerLogger . ROOT_LOGGER . cannotWriteToS3File ( e . getLocalizedMessage ( ) ) ; } |
public class Layers { /** * Removes a layer .
* @ param layer The layer to remove
* @ param redraw Whether the map should be redrawn after removing the layer
* @ see List # remove ( Object ) */
public synchronized boolean remove ( Layer layer , boolean redraw ) { } } | checkIsNull ( layer ) ; if ( this . layersList . remove ( layer ) ) { layer . unassign ( ) ; if ( redraw ) { this . redrawer . redrawLayers ( ) ; } return true ; } return false ; |
public class CacheAdapter { /** * Inititalize the instance .
* @ param properties
* properties */
public void init ( Properties properties ) { } } | staleIfError = Parameters . STALE_IF_ERROR . getValue ( properties ) ; staleWhileRevalidate = Parameters . STALE_WHILE_REVALIDATE . getValue ( properties ) ; int maxAsynchronousWorkers = Parameters . MAX_ASYNCHRONOUS_WORKERS . getValue ( properties ) ; if ( staleWhileRevalidate > 0 && maxAsynchronousWorkers == 0 ) { throw new ConfigurationException ( "You must set a positive value for maxAsynchronousWorkers " + "in order to enable background revalidation (staleWhileRevalidate)" ) ; } ttl = Parameters . TTL . getValue ( properties ) ; xCacheHeader = Parameters . X_CACHE_HEADER . getValue ( properties ) ; viaHeader = Parameters . VIA_HEADER . getValue ( properties ) ; LOG . info ( "Initializing cache for provider " + Arrays . toString ( Parameters . REMOTE_URL_BASE . getValue ( properties ) ) + " staleIfError=" + staleIfError + " staleWhileRevalidate=" + staleWhileRevalidate + " ttl=" + ttl + " xCacheHeader=" + xCacheHeader + " viaHeader=" + viaHeader ) ; |
public class CommonOps_ZDRM { /** * Creates a new square matrix whose diagonal elements are specified by data and all
* the other elements are zero . < br >
* < br >
* a < sub > ij < / sub > = 0 if i & le ; j < br >
* a < sub > ij < / sub > = diag [ i ] if i = j < br >
* @ param data Contains the values of the diagonal elements of the resulting matrix .
* @ return A new complex matrix . */
public static ZMatrixRMaj diag ( double ... data ) { } } | if ( data . length % 2 == 1 ) throw new IllegalArgumentException ( "must be an even number of arguments" ) ; return diag ( new ZMatrixRMaj ( 1 , 1 ) , data . length / 2 , data ) ; |
public class DataJoinReducerBase { /** * The subclass can overwrite this method to perform additional filtering
* and / or other processing logic before a value is collected .
* @ param key
* @ param aRecord
* @ param output
* @ param reporter
* @ throws IOException */
protected void collect ( Object key , TaggedMapOutput aRecord , OutputCollector output , Reporter reporter ) throws IOException { } } | this . collected += 1 ; addLongValue ( "collectedCount" , 1 ) ; if ( aRecord != null ) { output . collect ( key , aRecord . getData ( ) ) ; reporter . setStatus ( "key: " + key . toString ( ) + " collected: " + collected ) ; addLongValue ( "actuallyCollectedCount" , 1 ) ; } |
public class DaseinSequencer { /** * Creates a new entry in the database for this sequence . This method
* will throw an error if two threads are simultaneously trying
* to create a sequence . This state should never occur if you
* go ahead and create the sequence in the database before
* deploying the application . It could be avoided by checking
* SQL exceptions for the proper XOPEN SQLState for duplicate
* keys . Unfortunately , that approach is error prone due to the lack
* of consistency in proper XOPEN SQLState reporting in JDBC drivers .
* @ param conn the JDBC connection to use
* @ throws java . sql . SQLException a database error occurred */
private void create ( Connection conn ) throws SQLException { } } | logger . debug ( "enter - create()" ) ; try { PreparedStatement stmt = null ; ResultSet rs = null ; try { stmt = conn . prepareStatement ( CREATE_SEQ ) ; stmt . setString ( INS_NAME , getName ( ) ) ; stmt . setLong ( INS_NEXT_KEY , nextKey ) ; stmt . setLong ( INS_INTERVAL , interval ) ; stmt . setLong ( INS_UPDATE , System . currentTimeMillis ( ) ) ; if ( stmt . executeUpdate ( ) != 1 ) { logger . warn ( "Unable to create sequence " + getName ( ) + "." ) ; sequence = - 1L ; } } finally { if ( rs != null ) { try { rs . close ( ) ; } catch ( SQLException ignore ) { /* ignore */
} } if ( stmt != null ) { try { stmt . close ( ) ; } catch ( SQLException ignore ) { /* ignore */
} } } } finally { logger . debug ( "exit - create()" ) ; } |
public class NGCommunicator { /** * Reads a NailGun chunk header from the underlying InputStream .
* @ return type of chunk received
* @ throws EOFException if underlying stream / socket is closed which happens on client
* disconnection or server closure
* @ throws IOException if thrown by the underlying InputStream , or if an unexpected NailGun chunk
* type is encountered . */
private byte readChunk ( ) throws IOException { } } | try { return readChunkImpl ( ) ; } catch ( SocketException ex ) { // Some stream implementations may throw SocketException and not EOFException when socket is
// terminated by
// application
// By common agreement , rethrow it as EOFException and let an upstream handler take care .
synchronized ( orchestratorEvent ) { if ( shutdown ) { EOFException newException = new EOFException ( "NGCommunicator is shutting down" ) ; newException . initCause ( ex ) ; throw newException ; } } throw ex ; } |
public class KNXAddress { /** * Creates a KNX address from a string < code > address < / code > representation .
* An address level separator of type ' . ' found in < code > address < / code > indicates an
* individual address , i . e . an { @ link IndividualAddress } is created , otherwise a
* { @ link GroupAddress } is created . < br >
* Allowed separators are ' . ' or ' / ' , mutually exclusive .
* @ param address string containing the KNX address
* @ return the created KNX address , either of subtype { @ link GroupAddress } or
* { @ link IndividualAddress }
* @ throws KNXFormatException thrown on unknown address type , wrong address syntax or
* wrong separator used */
public static KNXAddress create ( String address ) throws KNXFormatException { } } | if ( address . indexOf ( '.' ) != - 1 ) return new IndividualAddress ( address ) ; return new GroupAddress ( address ) ; |
public class IOUtils { /** * Closes an image input stream , catching and logging any exceptions
* @ param aImageInputStream A supplied image input stream to close */
public static void closeQuietly ( final ImageInputStream aImageInputStream ) { } } | if ( aImageInputStream != null ) { try { aImageInputStream . close ( ) ; } catch ( final IOException details ) { LOGGER . error ( details . getMessage ( ) , details ) ; } } |
public class FourierTransform { /** * 2 - D Discrete Fourier Transform .
* @ param data Data to transform .
* @ param direction Transformation direction . */
public static void DFT2 ( ComplexNumber [ ] [ ] data , Direction direction ) { } } | int n = data . length ; int m = data [ 0 ] . length ; ComplexNumber [ ] row = new ComplexNumber [ Math . max ( m , n ) ] ; for ( int i = 0 ; i < n ; i ++ ) { // copy row
for ( int j = 0 ; j < n ; j ++ ) row [ j ] = data [ i ] [ j ] ; // transform it
FourierTransform . DFT ( row , direction ) ; // copy back
for ( int j = 0 ; j < n ; j ++ ) data [ i ] [ j ] = row [ j ] ; } // process columns
ComplexNumber [ ] col = new ComplexNumber [ n ] ; for ( int j = 0 ; j < n ; j ++ ) { // copy column
for ( int i = 0 ; i < n ; i ++ ) col [ i ] = data [ i ] [ j ] ; // transform it
FourierTransform . DFT ( col , direction ) ; // copy back
for ( int i = 0 ; i < n ; i ++ ) data [ i ] [ j ] = col [ i ] ; } |
public class DescribeProvisionedProductPlanRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DescribeProvisionedProductPlanRequest describeProvisionedProductPlanRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( describeProvisionedProductPlanRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeProvisionedProductPlanRequest . getAcceptLanguage ( ) , ACCEPTLANGUAGE_BINDING ) ; protocolMarshaller . marshall ( describeProvisionedProductPlanRequest . getPlanId ( ) , PLANID_BINDING ) ; protocolMarshaller . marshall ( describeProvisionedProductPlanRequest . getPageSize ( ) , PAGESIZE_BINDING ) ; protocolMarshaller . marshall ( describeProvisionedProductPlanRequest . getPageToken ( ) , PAGETOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class DurationConverter { /** * Parses a duration string . If no units are specified in the string , it is
* assumed to be in milliseconds .
* This implementation was blatantly stolen / adapted from the typesafe - config project :
* https : / / github . com / typesafehub / config / blob / v1.3.0 / config / src / main / java / com / typesafe / config / impl / SimpleConfig . java # L551 - L624
* @ param input the string to parse
* @ return duration
* @ throws IllegalArgumentException if input is invalid */
private static Duration parseDuration ( String input ) { } } | String [ ] parts = ConverterUtil . splitNumericAndChar ( input ) ; String numberString = parts [ 0 ] ; String originalUnitString = parts [ 1 ] ; String unitString = originalUnitString ; if ( numberString . length ( ) == 0 ) { throw new IllegalArgumentException ( String . format ( "No number in duration value '%s'" , input ) ) ; } if ( unitString . length ( ) > 2 && ! unitString . endsWith ( "s" ) ) { unitString = unitString + "s" ; } ChronoUnit units ; // note that this is deliberately case - sensitive
switch ( unitString ) { case "ns" : case "nanos" : case "nanoseconds" : units = ChronoUnit . NANOS ; break ; case "us" : case "µs" : case "micros" : case "microseconds" : units = ChronoUnit . MICROS ; break ; case "" : case "ms" : case "millis" : case "milliseconds" : units = ChronoUnit . MILLIS ; break ; case "s" : case "seconds" : units = ChronoUnit . SECONDS ; break ; case "m" : case "minutes" : units = ChronoUnit . MINUTES ; break ; case "h" : case "hours" : units = ChronoUnit . HOURS ; break ; case "d" : case "days" : units = ChronoUnit . DAYS ; break ; default : throw new IllegalArgumentException ( String . format ( "Could not parse time unit '%s' (try ns, us, ms, s, m, h, d)" , originalUnitString ) ) ; } return Duration . of ( Long . parseLong ( numberString ) , units ) ; |
public class Similarity { /** * Calculates the similarity of the two vectors using the provided
* similarity measure .
* @ param similarityType the similarity evaluation to use when comparing
* { @ code a } and { @ code b }
* @ param a a { @ code Vector }
* @ param b a { @ code Vector }
* @ return the similarity according to the specified measure */
public static < T extends Vector > double getSimilarity ( SimType similarityType , T a , T b ) { } } | switch ( similarityType ) { case COSINE : return cosineSimilarity ( a , b ) ; case PEARSON_CORRELATION : return correlation ( a , b ) ; case EUCLIDEAN : return euclideanSimilarity ( a , b ) ; case SPEARMAN_RANK_CORRELATION : return spearmanRankCorrelationCoefficient ( a , b ) ; case JACCARD_INDEX : return jaccardIndex ( a , b ) ; case AVERAGE_COMMON_FEATURE_RANK : return averageCommonFeatureRank ( a , b ) ; case LIN : return linSimilarity ( a , b ) ; case KL_DIVERGENCE : return klDivergence ( a , b ) ; case KENDALLS_TAU : return kendallsTau ( a , b ) ; case TANIMOTO_COEFFICIENT : return tanimotoCoefficient ( a , b ) ; } return 0 ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link PersonType }
* { @ code > } */
@ XmlElementDecl ( namespace = "http://www.w3.org/2005/Atom" , name = "author" , scope = EntryType . class ) public JAXBElement < PersonType > createEntryTypeAuthor ( PersonType value ) { } } | return new JAXBElement < PersonType > ( ENTRY_TYPE_AUTHOR_QNAME , PersonType . class , EntryType . class , value ) ; |
public class ButtonRadioSet { /** * Method creating a new { @ link Radio }
* @ param wicketId
* Wicket identifier
* @ param model
* Model to use
* @ param group
* Group of the { @ link Radio }
* @ return a { @ link Radio } */
protected Radio < T > newRadio ( String wicketId , IModel < T > model , RadioGroup < T > group ) { } } | Radio < T > radio = new Radio < T > ( wicketId , model , group ) ; return radio ; |
public class BinaryRowSerializer { /** * Copy a binaryRow which stored in paged input view to output view .
* @ param source source paged input view where the binary row stored
* @ param target the target output view . */
public void copyFromPagesToView ( AbstractPagedInputView source , DataOutputView target ) throws IOException { } } | checkSkipReadForFixLengthPart ( source ) ; int length = source . readInt ( ) ; target . writeInt ( length ) ; target . write ( source , length ) ; |
public class TableDefinition { /** * Compute and return the date at which the shard with the given number starts based
* on this table ' s sharding options . For example , if sharding - granularity is MONTH
* and the sharding - start is 2012-10-15 , then shard 2 starts on 2012-11-01.
* @ param shardNumber Shard number ( & gt ; 0 ) .
* @ return Date on which the given shard starts . It will & gt ; = this table ' s
* sharding - start option . */
public Date computeShardStart ( int shardNumber ) { } } | assert isSharded ( ) ; assert shardNumber > 0 ; assert m_shardingStartDate != null ; // Shard # 1 always starts on the sharding - start date .
Date result = null ; if ( shardNumber == 1 ) { result = m_shardingStartDate . getTime ( ) ; } else { // Clone m _ shardingStartDate and adjust by shard number .
GregorianCalendar shardDate = ( GregorianCalendar ) m_shardingStartDate . clone ( ) ; switch ( m_shardingGranularity ) { case HOUR : // Increment start date HOUR by shard number - 1.
shardDate . add ( Calendar . HOUR_OF_DAY , shardNumber - 1 ) ; break ; case DAY : // Increment start date DAY by shard number - 1.
shardDate . add ( Calendar . DAY_OF_MONTH , shardNumber - 1 ) ; break ; case WEEK : // Round the sharding - start date down to the MONDAY of the same week .
// Then increment it ' s DAY by ( shard number - 1 ) * 7.
shardDate = Utils . truncateToWeek ( m_shardingStartDate ) ; shardDate . add ( Calendar . DAY_OF_MONTH , ( shardNumber - 1 ) * 7 ) ; break ; case MONTH : // Increment start date MONTH by shard number - 1 , but the day is always 1.
shardDate . add ( Calendar . MONTH , shardNumber - 1 ) ; shardDate . set ( Calendar . DAY_OF_MONTH , 1 ) ; break ; } result = shardDate . getTime ( ) ; } assert computeShardNumber ( result ) == shardNumber ; return result ; |
public class SmbFile { /** * Retuns the Windows UNC style path with backslashs intead of forward slashes .
* @ return The UNC path . */
public String getUncPath ( ) { } } | getUncPath0 ( ) ; if ( share == null ) { return "\\\\" + url . getHost ( ) ; } return "\\\\" + url . getHost ( ) + canon . replace ( '/' , '\\' ) ; |
public class CharOperation { /** * Answers the concatenation of the three arrays inserting the sep1 character between the first two arrays and sep2
* between the last two . It answers null if the three arrays are null . If the first array is null , then it answers
* the concatenation of second and third inserting the sep2 character between them . If the second array is null ,
* then it answers the concatenation of first and third inserting the sep1 character between them . If the third
* array is null , then it answers the concatenation of first and second inserting the sep1 character between them . < br >
* < br >
* For example :
* < ol >
* < li >
* < pre >
* first = null
* sep1 = ' / '
* second = { ' a ' }
* sep2 = ' : '
* third = { ' b ' }
* = & gt ; result = { ' a ' , ' : ' , ' b ' }
* < / pre >
* < / li >
* < li >
* < pre >
* first = { ' a ' }
* sep1 = ' / '
* second = null
* sep2 = ' : '
* third = { ' b ' }
* = & gt ; result = { ' a ' , ' / ' , ' b ' }
* < / pre >
* < / li >
* < li >
* < pre >
* first = { ' a ' }
* sep1 = ' / '
* second = { ' b ' }
* sep2 = ' : '
* third = null
* = & gt ; result = { ' a ' , ' / ' , ' b ' }
* < / pre >
* < / li >
* < li >
* < pre >
* first = { ' a ' }
* sep1 = ' / '
* second = { ' b ' }
* sep2 = ' : '
* third = { ' c ' }
* = & gt ; result = { ' a ' , ' / ' , ' b ' , ' : ' , ' c ' }
* < / pre >
* < / li >
* < / ol >
* @ param first
* the first array to concatenate
* @ param sep1
* the character to insert
* @ param second
* the second array to concatenate
* @ param sep2
* the character to insert
* @ param third
* the second array to concatenate
* @ return the concatenation of the three arrays inserting the sep1 character between the two arrays and sep2
* between the last two . */
public static final char [ ] concat ( char [ ] first , char sep1 , char [ ] second , char sep2 , char [ ] third ) { } } | if ( first == null ) { return concat ( second , third , sep2 ) ; } if ( second == null ) { return concat ( first , third , sep1 ) ; } if ( third == null ) { return concat ( first , second , sep1 ) ; } int length1 = first . length ; int length2 = second . length ; int length3 = third . length ; char [ ] result = new char [ length1 + length2 + length3 + 2 ] ; System . arraycopy ( first , 0 , result , 0 , length1 ) ; result [ length1 ] = sep1 ; System . arraycopy ( second , 0 , result , length1 + 1 , length2 ) ; result [ length1 + length2 + 1 ] = sep2 ; System . arraycopy ( third , 0 , result , length1 + length2 + 2 , length3 ) ; return result ; |
public class Capacitor { /** * read into an array like Streams
* @ param array
* @ param offset
* @ param length
* @ return */
public int read ( byte [ ] array , int offset , int length ) { } } | if ( curr == null ) return - 1 ; int len ; int count = 0 ; while ( length > 0 ) { // loop through while there ' s data needed
if ( ( len = curr . remaining ( ) ) > length ) { // if enough data in curr buffer , use this code
curr . get ( array , offset , length ) ; count += length ; length = 0 ; } else { // get data from curr , mark how much is needed to fulfil , and loop for next curr .
curr . get ( array , offset , len ) ; count += len ; offset += len ; length -= len ; if ( idx < bbs . size ( ) ) { curr = bbs . get ( idx ++ ) ; } else { length = 0 ; // stop , and return the count of how many we were able to load
} } } return count ; |
public class SpScheduler { /** * doesn ' t matter , it isn ' t going to be used for anything . */
void handleFragmentTaskMessage ( FragmentTaskMessage message ) { } } | FragmentTaskMessage msg = message ; long newSpHandle ; // The site has been marked as non - leader . The follow - up batches or fragments are processed here
if ( ! message . isForReplica ( ) && ( m_isLeader || message . isExecutedOnPreviousLeader ( ) ) ) { // message processed on leader
// Quick hack to make progress . . . we need to copy the FragmentTaskMessage
// before we start mucking with its state ( SPHANDLE ) . We need to revisit
// all the messaging mess at some point .
msg = new FragmentTaskMessage ( message . getInitiatorHSId ( ) , message . getCoordinatorHSId ( ) , message ) ; // Not going to use the timestamp from the new Ego because the multi - part timestamp is what should be used
msg . setTimestamp ( message . getTimestamp ( ) ) ; msg . setExecutedOnPreviousLeader ( message . isExecutedOnPreviousLeader ( ) ) ; if ( ! message . isReadOnly ( ) ) { TxnEgo ego = advanceTxnEgo ( ) ; newSpHandle = ego . getTxnId ( ) ; if ( m_outstandingTxns . get ( msg . getTxnId ( ) ) == null ) { updateMaxScheduledTransactionSpHandle ( newSpHandle ) ; } } else { newSpHandle = getMaxScheduledTxnSpHandle ( ) ; } msg . setSpHandle ( newSpHandle ) ; msg . setLastSpUniqueId ( m_uniqueIdGenerator . getLastUniqueId ( ) ) ; logRepair ( msg ) ; if ( msg . getInitiateTask ( ) != null ) { msg . getInitiateTask ( ) . setSpHandle ( newSpHandle ) ; // set the handle
// Trigger reserialization so the new handle is used
msg . setStateForDurability ( msg . getInitiateTask ( ) , msg . getInvolvedPartitions ( ) ) ; } /* * If there a replicas to send it to , forward it !
* Unless . . . it ' s read only AND not a sysproc . Read only sysprocs may expect to be sent
* everywhere .
* In that case don ' t propagate it to avoid a determinism check and extra messaging overhead */
if ( IS_KSAFE_CLUSTER && ( ! message . isReadOnly ( ) || msg . isSysProcTask ( ) ) ) { for ( long hsId : m_sendToHSIds ) { FragmentTaskMessage finalMsg = msg ; final VoltTrace . TraceEventBatch traceLog = VoltTrace . log ( VoltTrace . Category . SPI ) ; if ( traceLog != null ) { traceLog . add ( ( ) -> VoltTrace . beginAsync ( "replicatefragment" , MiscUtils . hsIdPairTxnIdToString ( m_mailbox . getHSId ( ) , hsId , finalMsg . getSpHandle ( ) , finalMsg . getTxnId ( ) ) , "txnId" , TxnEgo . txnIdToString ( finalMsg . getTxnId ( ) ) , "dest" , CoreUtils . hsIdToString ( hsId ) ) ) ; } } FragmentTaskMessage replmsg = new FragmentTaskMessage ( m_mailbox . getHSId ( ) , m_mailbox . getHSId ( ) , msg ) ; replmsg . setForReplica ( true ) ; replmsg . setTimestamp ( msg . getTimestamp ( ) ) ; // K - safety cluster doesn ' t always mean partition has replicas ,
// node failure may reduce the number of replicas for each partition .
if ( m_sendToHSIds . length > 0 ) { m_mailbox . send ( m_sendToHSIds , replmsg ) ; } DuplicateCounter counter ; /* * Non - determinism should be impossible to happen with MP fragments .
* if you see " MP _ DETERMINISM _ ERROR " as procedure name in the crash logs
* something has horribly gone wrong . */
if ( message . getFragmentTaskType ( ) != FragmentTaskMessage . SYS_PROC_PER_SITE ) { counter = new DuplicateCounter ( msg . getCoordinatorHSId ( ) , msg . getTxnId ( ) , m_replicaHSIds , replmsg ) ; } else { counter = new SysProcDuplicateCounter ( msg . getCoordinatorHSId ( ) , msg . getTxnId ( ) , m_replicaHSIds , replmsg ) ; } safeAddToDuplicateCounterMap ( new DuplicateCounterKey ( message . getTxnId ( ) , newSpHandle ) , counter ) ; } } else { // message processed on replica
logRepair ( msg ) ; newSpHandle = msg . getSpHandle ( ) ; setMaxSeenTxnId ( newSpHandle ) ; } Iv2Trace . logFragmentTaskMessage ( message , m_mailbox . getHSId ( ) , newSpHandle , false ) ; doLocalFragmentOffer ( msg ) ; |
public class DefaultFontMapper { /** * Returns an AWT Font which can be used to represent the given BaseFont
* @ paramfontthe font to be converted
* @ paramsizethe desired point size of the resulting font
* @ returna Font which has similar properties to the provided BaseFont */
public Font pdfToAwt ( BaseFont font , int size ) { } } | String names [ ] [ ] = font . getFullFontName ( ) ; if ( names . length == 1 ) return new Font ( names [ 0 ] [ 3 ] , 0 , size ) ; String name10 = null ; String name3x = null ; for ( int k = 0 ; k < names . length ; ++ k ) { String name [ ] = names [ k ] ; if ( name [ 0 ] . equals ( "1" ) && name [ 1 ] . equals ( "0" ) ) name10 = name [ 3 ] ; else if ( name [ 2 ] . equals ( "1033" ) ) { name3x = name [ 3 ] ; break ; } } String finalName = name3x ; if ( finalName == null ) finalName = name10 ; if ( finalName == null ) finalName = names [ 0 ] [ 3 ] ; return new Font ( finalName , 0 , size ) ; |
public class RelationalHSCache { /** * Removes the specified item from the system permanently .
* @ param xaction the transaction under which this event is occurring
* @ param item the item to be removed
* @ throws PersistenceException an error occurred talking to the data store or
* removal of these objects is prohibited */
@ Override public void remove ( Transaction xaction , T item ) throws PersistenceException { } } | xaction . execute ( getDeleter ( ) , getCache ( ) . getKeys ( item ) , writeDataSource ) ; getCache ( ) . release ( item ) ; |
public class RowCursor { /** * Set a blob value with an open blob stream . */
public final OutputStream openOutputStream ( int index ) { } } | Column column = _row . columns ( ) [ index ] ; return column . openOutputStream ( this ) ; |
public class ComponentDao { /** * Same as { @ link # selectByQuery ( DbSession , String , ComponentQuery , int , int ) } except
* that the filter on organization is disabled . */
public List < ComponentDto > selectByQuery ( DbSession session , ComponentQuery query , int offset , int limit ) { } } | return selectByQueryImpl ( session , null , query , offset , limit ) ; |
public class TagVFilter { /** * Converts the map to a filter list . If a filter already exists for a
* tag group by and we ' re told to process group bys , then the duplicate
* is skipped .
* @ param map A set of tag keys and values . May be null or empty .
* @ param filters A set of filters to add the converted filters to . This may
* not be null .
* @ param group _ by Whether or not to set the group by flag and kick dupes */
public static void mapToFilters ( final Map < String , String > map , final List < TagVFilter > filters , final boolean group_by ) { } } | if ( map == null || map . isEmpty ( ) ) { return ; } for ( final Map . Entry < String , String > entry : map . entrySet ( ) ) { TagVFilter filter = getFilter ( entry . getKey ( ) , entry . getValue ( ) ) ; if ( filter == null && entry . getValue ( ) . equals ( "*" ) ) { filter = new TagVWildcardFilter ( entry . getKey ( ) , "*" , true ) ; } else if ( filter == null ) { filter = new TagVLiteralOrFilter ( entry . getKey ( ) , entry . getValue ( ) ) ; } if ( group_by ) { filter . setGroupBy ( true ) ; boolean duplicate = false ; for ( final TagVFilter existing : filters ) { if ( filter . equals ( existing ) ) { LOG . debug ( "Skipping duplicate filter: " + existing ) ; existing . setGroupBy ( true ) ; duplicate = true ; break ; } } if ( ! duplicate ) { filters . add ( filter ) ; } } else { filters . add ( filter ) ; } } |
public class CmsListItem { /** * Removes a decoration widget . < p >
* @ param widget the widget to remove
* @ param width the widget width */
protected void removeDecorationWidget ( Widget widget , int width ) { } } | if ( ( widget != null ) && m_decorationWidgets . remove ( widget ) ) { m_decorationWidth -= width ; initContent ( ) ; } |
public class MultiUserChatLight { /** * Returns a new Chat for sending private messages to a given room occupant .
* The Chat ' s occupant address is the room ' s JID ( i . e .
* roomName @ service / nick ) . The server service will change the ' from ' address
* to the sender ' s room JID and delivering the message to the intended
* recipient ' s full JID .
* @ param occupant
* occupant unique room JID ( e . g .
* ' darkcave @ macbeth . shakespeare . lit / Paul ' ) .
* @ param listener
* the listener is a message listener that will handle messages
* for the newly created chat .
* @ return new Chat for sending private messages to a given room occupant . */
@ SuppressWarnings ( "deprecation" ) @ Deprecated // Do not re - use Chat API , which was designed for XMPP - IM 1:1 chats and not MUClight private chats .
public org . jivesoftware . smack . chat . Chat createPrivateChat ( EntityJid occupant , ChatMessageListener listener ) { } } | return org . jivesoftware . smack . chat . ChatManager . getInstanceFor ( connection ) . createChat ( occupant , listener ) ; |
public class DescribeScalingPlansRequest { /** * The names of the scaling plans ( up to 10 ) . If you specify application sources , you cannot specify scaling plan
* names .
* @ param scalingPlanNames
* The names of the scaling plans ( up to 10 ) . If you specify application sources , you cannot specify scaling
* plan names . */
public void setScalingPlanNames ( java . util . Collection < String > scalingPlanNames ) { } } | if ( scalingPlanNames == null ) { this . scalingPlanNames = null ; return ; } this . scalingPlanNames = new java . util . ArrayList < String > ( scalingPlanNames ) ; |
public class XPathUtils { /** * Evaluate XPath expression with result type NodeList .
* @ param node
* @ param xPathExpression
* @ param nsContext
* @ return */
public static NodeList evaluateAsNodeList ( Node node , String xPathExpression , NamespaceContext nsContext ) { } } | NodeList result = ( NodeList ) evaluateExpression ( node , xPathExpression , nsContext , XPathConstants . NODESET ) ; if ( result == null ) { throw new CitrusRuntimeException ( "No result for XPath expression: '" + xPathExpression + "'" ) ; } return result ; |
public class ComponentsJmxRegistration { /** * Performs the MBean registration .
* @ param resourceDMBeans */
public void registerMBeans ( Collection < ResourceDMBean > resourceDMBeans ) throws CacheException { } } | try { for ( ResourceDMBean resource : resourceDMBeans ) JmxUtil . registerMBean ( resource , getObjectName ( resource ) , mBeanServer ) ; } catch ( Exception e ) { throw new CacheException ( "Failure while registering mbeans" , e ) ; } |
public class Reflector { /** * to invoke a getter Method of a Object
* @ param obj Object to invoke method from
* @ param prop Name of the Method without get
* @ return return Value of the getter Method
* @ throws PageException */
public static Object callGetter ( Object obj , String prop ) throws PageException { } } | try { return getGetter ( obj . getClass ( ) , prop ) . invoke ( obj ) ; } catch ( InvocationTargetException e ) { Throwable target = e . getTargetException ( ) ; if ( target instanceof PageException ) throw ( PageException ) target ; throw Caster . toPageException ( e . getTargetException ( ) ) ; } catch ( Exception e ) { throw Caster . toPageException ( e ) ; } |
public class SimulatorImpl { /** * Logs message and exception and throws Exception .
* @ param message message
* @ param e PyException
* @ throws java . lang . Exception Exception built with ' message + " : \ n " + message _ of _ e ' */
private static void logAndThrowException ( String message , PyException e ) throws Exception { } } | LOGGER . error ( message , e ) ; throw new Exception ( message + ":\n" + PythonHelper . getMessage ( e ) ) ; |
public class ChemObject { /** * Removes a property for a IChemObject .
* @ param description The object description of the property ( most likely a
* unique string )
* @ see # setProperty
* @ see # getProperty */
@ Override public void removeProperty ( Object description ) { } } | if ( properties != null ) { properties . remove ( description ) ; if ( properties . isEmpty ( ) ) properties = null ; } |
public class PersistentIndex { /** * Returns a < code > ReadOnlyIndexReader < / code > and registeres
* < code > listener < / code > to send notifications when documents are deleted on
* < code > this < / code > index .
* @ param listener the listener to notify when documents are deleted .
* @ return a < code > ReadOnlyIndexReader < / code > .
* @ throws IOException if the reader cannot be obtained . */
synchronized ReadOnlyIndexReader getReadOnlyIndexReader ( IndexListener listener ) throws IOException { } } | // Ensure all in - memory changes are persisted before returning reader to merger
releaseWriterAndReaders ( ) ; ReadOnlyIndexReader reader = getReadOnlyIndexReader ( ) ; this . listener = listener ; return reader ; |
public class DevirtualizePrototypeMethods { /** * Rewrites object method call sites as calls to global functions that take " this " as their first
* argument .
* < p > Before : o . foo ( a , b , c )
* < p > After : foo ( o , a , b , c ) */
private void rewriteCall ( Node getprop , String newMethodName ) { } } | checkArgument ( getprop . isGetProp ( ) , getprop ) ; Node call = getprop . getParent ( ) ; checkArgument ( call . isCall ( ) , call ) ; Node receiver = getprop . getFirstChild ( ) ; // This rewriting does not exactly preserve order of operations ; the newly inserted static
// method name will be resolved before ` receiver ` is evaluated . This is known to be safe due
// to the eligibility checks earlier in the pass .
// We choose not to do a full - fidelity rewriting ( e . g . using ` ExpressionDecomposer ` ) because
// doing so means extracting ` receiver ` into a new variable at each call - site . This has a
// significant code - size impact ( circa 2018-11-19 ) .
getprop . removeChild ( receiver ) ; call . replaceChild ( getprop , receiver ) ; call . addChildToFront ( IR . name ( newMethodName ) . srcref ( getprop ) ) ; if ( receiver . isSuper ( ) ) { // Case : ` super . foo ( a , b ) ` = > ` foo ( this , a , b ) `
receiver . setToken ( Token . THIS ) ; } call . putBooleanProp ( Node . FREE_CALL , true ) ; compiler . reportChangeToEnclosingScope ( call ) ; |
public class AppServiceCertificateOrdersInner { /** * Creates or updates a certificate and associates with key vault secret .
* Creates or updates a certificate and associates with key vault secret .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param certificateOrderName Name of the certificate order .
* @ param name Name of the certificate .
* @ param keyVaultCertificate Key vault certificate resource Id .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < ServiceResponse < AppServiceCertificateResourceInner > > createOrUpdateCertificateWithServiceResponseAsync ( String resourceGroupName , String certificateOrderName , String name , AppServiceCertificateResourceInner keyVaultCertificate ) { } } | if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( certificateOrderName == null ) { throw new IllegalArgumentException ( "Parameter certificateOrderName is required and cannot be null." ) ; } if ( name == null ) { throw new IllegalArgumentException ( "Parameter name is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( keyVaultCertificate == null ) { throw new IllegalArgumentException ( "Parameter keyVaultCertificate is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } Validator . validate ( keyVaultCertificate ) ; Observable < Response < ResponseBody > > observable = service . createOrUpdateCertificate ( resourceGroupName , certificateOrderName , name , this . client . subscriptionId ( ) , keyVaultCertificate , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) ; return client . getAzureClient ( ) . getPutOrPatchResultAsync ( observable , new TypeToken < AppServiceCertificateResourceInner > ( ) { } . getType ( ) ) ; |
public class KeyValueSource { /** * A helper method to build a KeyValueSource implementation based on the specified { @ link ISet } . < br / >
* The key returned by this KeyValueSource implementation is < b > ALWAYS < / b > the name of the set itself ,
* whereas the value are the entries of the set , one by one . This implementation behaves like a MultiMap
* with a single key but multiple values .
* @ param set set to build a KeyValueSource implementation
* @ param < V > value type of the set
* @ return KeyValueSource implementation based on the specified set */
public static < V > KeyValueSource < String , V > fromSet ( ISet < ? extends V > set ) { } } | return new SetKeyValueSource < V > ( set . getName ( ) ) ; |
public class ExprCfgParserImpl { /** * Parses interpolated code . */
public ExprCfg parseInterpolate ( ) { } } | StringBuilder text = new StringBuilder ( ) ; StringBuilder exprString = new StringBuilder ( ) ; ExprCfg expr = null ; int ch ; int exprToken = - 1 ; while ( ( ch = read ( ) ) >= 0 ) { if ( _checkEscape && ch == '\\' ) { ch = read ( ) ; if ( ch == '$' || ch == '#' || ch == '\\' ) text . append ( ( char ) ch ) ; else { text . append ( '\\' ) ; unread ( ) ; } } else if ( ch == '$' || ch == '#' ) { int origChar = ch ; ch = read ( ) ; if ( ch == '{' ) { if ( exprToken != - 1 && exprToken != origChar ) throw error ( L . l ( "Mixed '#' and '$'. Expected '{0}' at '{1}'" , Character . toString ( ( char ) exprToken ) , Character . toString ( ( char ) origChar ) ) ) ; exprToken = origChar ; if ( text . length ( ) > 0 ) { ExprCfgString right = new ExprCfgString ( text . toString ( ) ) ; if ( expr == null ) { expr = right ; } else { expr = new ExprCfgConcat ( expr , right ) ; } text . setLength ( 0 ) ; } exprString . setLength ( 0 ) ; int depth = 0 ; for ( ch = read ( ) ; ch > 0 && ! ( ch == '}' && depth == 0 ) ; ch = read ( ) ) { exprString . append ( ( char ) ch ) ; switch ( ch ) { case '{' : depth ++ ; break ; case '}' : depth -- ; break ; case '\'' : case '"' : { int end = ch ; for ( ch = read ( ) ; ch > 0 && ch != end ; ch = read ( ) ) { exprString . append ( ( char ) ch ) ; if ( ch == '\\' ) { ch = read ( ) ; if ( ch > 0 ) exprString . append ( ( char ) ch ) ; } } if ( ch > 0 ) exprString . append ( ( char ) ch ) ; } break ; } } if ( ch != '}' ) throw error ( L . l ( "expected '}' at end of EL expression" , exprString ) ) ; ExprCfg right = create ( exprString . toString ( ) ) . parseExpr ( ) ; if ( expr == null ) { expr = right ; } else { expr = new ExprCfgConcat ( expr , right ) ; } } else { text . append ( ( char ) origChar ) ; unread ( ) ; } } else { text . append ( ( char ) ch ) ; } } if ( text . length ( ) > 0 ) { ExprCfgString right = new ExprCfgString ( text . toString ( ) ) ; if ( expr == null ) { expr = right ; } else { expr = new ExprCfgConcat ( expr , right ) ; } } if ( expr == null ) { expr = new ExprCfgString ( "" ) ; } return expr ; |
public class Matchers { /** * Determines whether an expression has an annotation of the given type . This includes annotations
* inherited from superclasses due to @ Inherited .
* @ param annotationClass the binary class name of the annotation ( e . g .
* " javax . annotation . Nullable " , or " some . package . OuterClassName $ InnerClassName " ) */
public static < T extends Tree > Matcher < T > hasAnnotation ( final String annotationClass ) { } } | return new Matcher < T > ( ) { @ Override public boolean matches ( T tree , VisitorState state ) { return ASTHelpers . hasAnnotation ( ASTHelpers . getDeclaredSymbol ( tree ) , annotationClass , state ) ; } } ; |
public class SchemaService { /** * Return the { @ link ApplicationDefinition } for the application in the default tenant .
* Null is returned if no application is found with the given name in the default
* tenant .
* @ return The { @ link ApplicationDefinition } for the given application or null if no
* no application such application is defined in the default tenant .
* @ deprecated This method only works for the default tenant . Use
* { @ link # getApplication ( Tenant , String ) } instead . */
public ApplicationDefinition getApplication ( String appName ) { } } | checkServiceState ( ) ; Tenant tenant = TenantService . instance ( ) . getDefaultTenant ( ) ; return getApplicationDefinition ( tenant , appName ) ; |
public class AbstractHeaderFile { /** * checks if the accessed region is accessible
* @ param offset
* the byte - offset where to start
* @ param length
* the length of the region to access
* @ return the offset , if the region is accessible
* @ throws IOException */
protected long checkRegions ( long offset , int length ) throws IOException { } } | if ( offset + length > filledUpTo ) throw new IOException ( "Can't access memory outside the file size (" + filledUpTo + " bytes). You've requested portion " + offset + "-" + ( offset + length ) + " bytes. File: " + osFile . getAbsolutePath ( ) + "\n" + "actual Filesize: " + size + "\n" + "actual filledUpTo: " + filledUpTo ) ; return offset ; |
public class KunderaCriteriaBuilder { /** * ( non - Javadoc )
* @ see
* javax . persistence . criteria . CriteriaBuilder # parameter ( java . lang . Class ,
* java . lang . String ) */
@ Override public < T > ParameterExpression < T > parameter ( Class < T > arg0 , String arg1 ) { } } | // TODO Auto - generated method stub
return null ; |
public class JobSchedulesImpl { /** * Lists all of the job schedules in the specified account .
* @ param jobScheduleListOptions Additional parameters for the operation
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; CloudJobSchedule & gt ; object */
public Observable < Page < CloudJobSchedule > > listAsync ( final JobScheduleListOptions jobScheduleListOptions ) { } } | return listWithServiceResponseAsync ( jobScheduleListOptions ) . map ( new Func1 < ServiceResponseWithHeaders < Page < CloudJobSchedule > , JobScheduleListHeaders > , Page < CloudJobSchedule > > ( ) { @ Override public Page < CloudJobSchedule > call ( ServiceResponseWithHeaders < Page < CloudJobSchedule > , JobScheduleListHeaders > response ) { return response . body ( ) ; } } ) ; |
public class AbstractBandMatrix { /** * Checks the row and column indices , and returns the linear data index */
int getIndex ( int row , int column ) { } } | check ( row , column ) ; return ku + row - column + column * ( kl + ku + 1 ) ; |
public class Unchecked { /** * Wrap a { @ link CheckedIntFunction } in a { @ link IntFunction } .
* Example :
* < code > < pre >
* IntStream . of ( 1 , 2 , 3 ) . mapToObj ( Unchecked . intFunction ( i - > {
* if ( i & lt ; 0)
* throw new Exception ( " Only positive numbers allowed " ) ;
* return " " + i ;
* < / pre > < / code > */
public static < R > IntFunction < R > intFunction ( CheckedIntFunction < R > function ) { } } | return intFunction ( function , THROWABLE_TO_RUNTIME_EXCEPTION ) ; |
public class PadOperation { /** * Returns the direction in which the padding will be done . If the direction string is null or invalid , ' Start ' will
* be taken instead . */
private String getDirectionString ( String direction ) { } } | if ( direction == null || ! ( direction . equals ( "Start" ) || direction . equals ( "End" ) ) ) { getLogger ( ) . debug ( "Unrecognized direction string. The standard value 'Start' will be used." ) ; return "Start" ; } return direction ; |
public class AllureShutdownHook { /** * Mark unfinished test cases as interrupted for each unfinished test suite , then write
* test suite result
* @ see # createFakeTestcaseWithWarning ( ru . yandex . qatools . allure . model . TestSuiteResult )
* @ see # markTestcaseAsInterruptedIfNotFinishedYet ( ru . yandex . qatools . allure . model . TestCaseResult ) */
@ Override public void run ( ) { } } | for ( Map . Entry < String , TestSuiteResult > entry : testSuites ) { for ( TestCaseResult testCase : entry . getValue ( ) . getTestCases ( ) ) { markTestcaseAsInterruptedIfNotFinishedYet ( testCase ) ; } entry . getValue ( ) . getTestCases ( ) . add ( createFakeTestcaseWithWarning ( entry . getValue ( ) ) ) ; Allure . LIFECYCLE . fire ( new TestSuiteFinishedEvent ( entry . getKey ( ) ) ) ; } |
public class DeterminismHash { /** * Update the overall hash . Add a pair of ints to the array
* if the size isn ' t too large . */
public void offerStatement ( int stmtHash , int offset , ByteBuffer psetBuffer ) { } } | m_inputCRC . update ( stmtHash ) ; m_inputCRC . updateFromPosition ( offset , psetBuffer ) ; if ( m_hashCount < MAX_HASHES_COUNT ) { m_hashes [ m_hashCount ] = stmtHash ; m_hashes [ m_hashCount + 1 ] = ( int ) m_inputCRC . getValue ( ) ; } m_hashCount += 2 ; |
public class TransportContext { /** * Create a server which will attempt to bind to a specific host and port . */
public TransportServer createServer ( String host , int port , List < TransportServerBootstrap > bootstraps ) { } } | return new TransportServer ( this , host , port , rpcHandler , bootstraps ) ; |
public class MinioClient { /** * Checks if given bucket exist and is having read access .
* < / p > < b > Example : < / b > < br >
* < pre > { @ code boolean found = minioClient . bucketExists ( " my - bucketname " ) ;
* if ( found ) {
* System . out . println ( " my - bucketname exists " ) ;
* } else {
* System . out . println ( " my - bucketname does not exist " ) ;
* } } < / pre >
* @ param bucketName Bucket name .
* @ return True if the bucket exists and the user has at least read access .
* @ throws InvalidBucketNameException upon invalid bucket name is given
* @ throws NoSuchAlgorithmException
* upon requested algorithm was not found during signature calculation
* @ throws InsufficientDataException upon getting EOFException while reading given
* @ throws IOException upon connection error
* @ throws InvalidKeyException
* upon an invalid access key or secret key
* @ throws NoResponseException upon no response from server
* @ throws XmlPullParserException upon parsing response xml
* @ throws ErrorResponseException upon unsuccessful execution
* @ throws InternalException upon internal library error */
public boolean bucketExists ( String bucketName ) throws InvalidBucketNameException , NoSuchAlgorithmException , InsufficientDataException , IOException , InvalidKeyException , NoResponseException , XmlPullParserException , ErrorResponseException , InternalException { } } | try { executeHead ( bucketName , null ) ; return true ; } catch ( ErrorResponseException e ) { if ( e . errorResponse ( ) . errorCode ( ) != ErrorCode . NO_SUCH_BUCKET ) { throw e ; } } return false ; |
public class TimeZone { /** * Returns a long standard time name of this { @ code TimeZone } suitable for
* presentation to the user in the default locale .
* < p > This method is equivalent to :
* < blockquote > < pre >
* getDisplayName ( false , { @ link # LONG } ,
* Locale . getDefault ( { @ link Locale . Category # DISPLAY } ) )
* < / pre > < / blockquote >
* @ return the human - readable name of this time zone in the default locale .
* @ since 1.2
* @ see # getDisplayName ( boolean , int , Locale )
* @ see Locale # getDefault ( Locale . Category )
* @ see Locale . Category */
public final String getDisplayName ( ) { } } | return getDisplayName ( false , LONG , Locale . getDefault ( Locale . Category . DISPLAY ) ) ; |
public class TCPMasterConnection { /** * Prepares the associated < tt > ModbusTransport < / tt > of this
* < tt > TCPMasterConnection < / tt > for use .
* @ param useRtuOverTcp True if the RTU protocol should be used over TCP
* @ throws IOException if an I / O related error occurs . */
private void prepareTransport ( boolean useRtuOverTcp ) throws IOException { } } | // If we don ' t have a transport , or the transport type has changed
if ( transport == null || ( this . useRtuOverTcp != useRtuOverTcp ) ) { // Save the flag to tell us which transport type to use
this . useRtuOverTcp = useRtuOverTcp ; // Select the correct transport
if ( useRtuOverTcp ) { logger . trace ( "prepareTransport() -> using RTU over TCP transport." ) ; transport = new ModbusRTUTCPTransport ( socket ) ; transport . setMaster ( this ) ; } else { logger . trace ( "prepareTransport() -> using standard TCP transport." ) ; transport = new ModbusTCPTransport ( socket ) ; transport . setMaster ( this ) ; } } else { logger . trace ( "prepareTransport() -> using custom transport: {}" , transport . getClass ( ) . getSimpleName ( ) ) ; transport . setSocket ( socket ) ; } transport . setTimeout ( timeout ) ; |
public class Sorter { /** * For ascending ,
* return 1 if dataArray [ o1 ] is greater than dataArray [ o2]
* return 0 if dataArray [ o1 ] is equal to dataArray [ o2]
* return - 1 if dataArray [ o1 ] is less than dataArray [ o2]
* For decending , do it in the opposize way . */
@ Override public int compare ( Integer o1 , Integer o2 ) { } } | double diff = dataArray [ o2 ] - dataArray [ o1 ] ; if ( diff == 0 ) { return 0 ; } if ( sortType == ASCENDING ) { return ( diff > 0 ) ? - 1 : 1 ; } else { return ( diff > 0 ) ? 1 : - 1 ; } |
public class CompareFileExtensions { /** * Sets the flags in the FileContentResultBean object according to the given boolean flag what
* to ignore .
* @ param fileContentResultBean
* The FileContentResultBean .
* @ param ignoreAbsolutePathEquality
* If this is true then the absolute path equality will be ignored .
* @ param ignoreExtensionEquality
* If this is true then the extension equality will be ignored .
* @ param ignoreLengthEquality
* If this is true then the length equality will be ignored .
* @ param ignoreLastModified
* If this is true then the last modified equality will be ignored .
* @ param ignoreNameEquality
* If this is true then the name equality will be ignored .
* @ param ignoreContentEquality
* If this is true then the content equality will be ignored . */
public static void compare ( final IFileContentResultBean fileContentResultBean , final boolean ignoreAbsolutePathEquality , final boolean ignoreExtensionEquality , final boolean ignoreLengthEquality , final boolean ignoreLastModified , final boolean ignoreNameEquality , final boolean ignoreContentEquality ) { } } | compare ( fileContentResultBean , ignoreAbsolutePathEquality , ignoreExtensionEquality , ignoreLengthEquality , ignoreLastModified , ignoreNameEquality ) ; final File source = fileContentResultBean . getSourceFile ( ) ; final File compare = fileContentResultBean . getFileToCompare ( ) ; if ( ! ignoreContentEquality ) { boolean contentEquality ; try { final String sourceChecksum = ChecksumExtensions . getChecksum ( source , HashAlgorithm . SHA_512 . getAlgorithm ( ) ) ; final String compareChecksum = ChecksumExtensions . getChecksum ( compare , HashAlgorithm . SHA_512 . getAlgorithm ( ) ) ; contentEquality = sourceChecksum . equals ( compareChecksum ) ; fileContentResultBean . setContentEquality ( contentEquality ) ; } catch ( final NoSuchAlgorithmException e ) { // if the algorithm is not supported check it with CRC32.
try { contentEquality = ChecksumExtensions . getCheckSumCRC32 ( source ) == ChecksumExtensions . getCheckSumCRC32 ( compare ) ; fileContentResultBean . setContentEquality ( contentEquality ) ; } catch ( IOException e1 ) { fileContentResultBean . setContentEquality ( false ) ; } } catch ( IOException e ) { fileContentResultBean . setContentEquality ( false ) ; } } else { fileContentResultBean . setContentEquality ( true ) ; } |
public class ECPProfileHandlerController { /** * Handle ecp request .
* @ param response the response
* @ param request the request */
@ PostMapping ( path = SamlIdPConstants . ENDPOINT_SAML2_IDP_ECP_PROFILE_SSO , consumes = { } } | MediaType . TEXT_XML_VALUE , SamlIdPConstants . ECP_SOAP_PAOS_CONTENT_TYPE } , produces = { MediaType . TEXT_XML_VALUE , SamlIdPConstants . ECP_SOAP_PAOS_CONTENT_TYPE } ) public void handleEcpRequest ( final HttpServletResponse response , final HttpServletRequest request ) { val soapContext = decodeSoapRequest ( request ) ; val credential = extractBasicAuthenticationCredential ( request , response ) ; if ( credential == null ) { LOGGER . error ( "Credentials could not be extracted from the SAML ECP request" ) ; return ; } if ( soapContext == null ) { LOGGER . error ( "SAML ECP request could not be determined from the authentication request" ) ; return ; } handleEcpRequest ( response , request , soapContext , credential , SAMLConstants . SAML2_PAOS_BINDING_URI ) ; |
public class FeatureInfoBuilder { /** * Project the geometry into the provided projection
* @ param geometryData geometry data
* @ param projection projection */
public void projectGeometry ( GeoPackageGeometryData geometryData , Projection projection ) { } } | if ( geometryData . getGeometry ( ) != null ) { try { SpatialReferenceSystemDao srsDao = DaoManager . createDao ( featureDao . getDb ( ) . getConnectionSource ( ) , SpatialReferenceSystem . class ) ; int srsId = geometryData . getSrsId ( ) ; SpatialReferenceSystem srs = srsDao . queryForId ( ( long ) srsId ) ; if ( ! projection . equals ( srs . getOrganization ( ) , srs . getOrganizationCoordsysId ( ) ) ) { Projection geomProjection = srs . getProjection ( ) ; ProjectionTransform transform = geomProjection . getTransformation ( projection ) ; Geometry projectedGeometry = transform . transform ( geometryData . getGeometry ( ) ) ; geometryData . setGeometry ( projectedGeometry ) ; SpatialReferenceSystem projectionSrs = srsDao . getOrCreateCode ( projection . getAuthority ( ) , Long . parseLong ( projection . getCode ( ) ) ) ; geometryData . setSrsId ( ( int ) projectionSrs . getSrsId ( ) ) ; } } catch ( SQLException e ) { throw new GeoPackageException ( "Failed to project geometry to projection with Authority: " + projection . getAuthority ( ) + ", Code: " + projection . getCode ( ) , e ) ; } } |
public class InstanceAggregatedAssociationOverviewMarshaller { /** * Marshall the given parameter object . */
public void marshall ( InstanceAggregatedAssociationOverview instanceAggregatedAssociationOverview , ProtocolMarshaller protocolMarshaller ) { } } | if ( instanceAggregatedAssociationOverview == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( instanceAggregatedAssociationOverview . getDetailedStatus ( ) , DETAILEDSTATUS_BINDING ) ; protocolMarshaller . marshall ( instanceAggregatedAssociationOverview . getInstanceAssociationStatusAggregatedCount ( ) , INSTANCEASSOCIATIONSTATUSAGGREGATEDCOUNT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class JobState { /** * Convert this { @ link JobState } to a json document .
* @ param jsonWriter a { @ link com . google . gson . stream . JsonWriter }
* used to write the json document
* @ param keepConfig whether to keep all configuration properties
* @ throws IOException */
public void toJson ( JsonWriter jsonWriter , boolean keepConfig ) throws IOException { } } | jsonWriter . beginObject ( ) ; writeStateSummary ( jsonWriter ) ; jsonWriter . name ( "task states" ) ; jsonWriter . beginArray ( ) ; for ( TaskState taskState : this . taskStates . values ( ) ) { taskState . toJson ( jsonWriter , keepConfig ) ; } for ( TaskState taskState : this . skippedTaskStates . values ( ) ) { taskState . toJson ( jsonWriter , keepConfig ) ; } jsonWriter . endArray ( ) ; if ( keepConfig ) { jsonWriter . name ( "properties" ) ; propsToJson ( jsonWriter ) ; } jsonWriter . endObject ( ) ; |
public class FieldFormatterRegistry { /** * 指定したパスとクラスタイプに対する { @ link FieldFormatter } を取得する 。
* < p > ただし 、 リストのインデックスやキーが含まれている場合 、 引数 「 fieldPath 」 からそれらを取り除いた値で比較する 。
* < br > 一致するフィールドに対するフォーマッタが見つからない場合は 、 タイプのみで比較する 。
* @ param fieldPath フィールドパス 。
* @ param requiredType 取得したいクラスタイプ
* @ return 見つからない場合は 、 nullを返す 。 */
public < T > FieldFormatter < T > findFormatter ( final String fieldPath , final Class < T > requiredType ) { } } | // 完全なパスで比較
FieldFormatter < T > formatter = getFormatter ( fieldPath , requiredType ) ; if ( formatter != null ) { return formatter ; } // インデックスを除去した形式で比較
final List < String > strippedPaths = new ArrayList < > ( ) ; addStrippedPropertyPaths ( strippedPaths , "" , fieldPath ) ; for ( String strippedPath : strippedPaths ) { formatter = getFormatter ( strippedPath , requiredType ) ; if ( formatter != null ) { return formatter ; } } // 見つからない場合は 、 タイプのみで比較した物を取得する
return getFormatter ( requiredType ) ; |
public class JobShop { /** * / * RandomConfigurationProblem interface */
public Configuration getRandomConfiguration ( ) { } } | List < Integer > tmp = new ArrayList < Integer > ( this . dimension ) ; for ( int i = 0 ; i < this . dimension ; ++ i ) tmp . add ( JcopRandom . nextInt ( this . machines ) ) ; return new Configuration ( tmp , "Empty JobShop created (random)" ) ; |
public class GetLicenseConfigurationResult { /** * List of flexible text strings designating license rules .
* @ param licenseRules
* List of flexible text strings designating license rules . */
public void setLicenseRules ( java . util . Collection < String > licenseRules ) { } } | if ( licenseRules == null ) { this . licenseRules = null ; return ; } this . licenseRules = new java . util . ArrayList < String > ( licenseRules ) ; |
public class DBUpdate { /** * Add the given values to the array value if they don ' t already exist in the specified field atomically
* @ param field The field to add the values to
* @ param values The values to add
* @ return this object */
public static Builder addToSet ( String field , List < ? > values ) { } } | return new Builder ( ) . addToSet ( field , values ) ; |
public class AbstractPojoQuery { @ Override public QueryResult < P > execute ( StatementOptions options , Object ... params ) { } } | return multi ( options ) . add ( params ) . execute ( ) ; |
public class AuditRecordRepositoryAuditor { /** * Get an instance of the Audit Record Repository auditor from the
* global context
* @ return Audit Record Repository auditor instance */
public static AuditRecordRepositoryAuditor getAuditor ( ) { } } | AuditorModuleContext ctx = AuditorModuleContext . getContext ( ) ; return ( AuditRecordRepositoryAuditor ) ctx . getAuditor ( AuditRecordRepositoryAuditor . class ) ; |
public class JmfTr { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . utils . ras . SibTr # exit ( com . ibm . websphere . ras . TraceComponent , java . lang . String ) */
public static void exit ( TraceComponent tc , String methodName ) { } } | if ( isTracing ( ) ) SibTr . exit ( tc , methodName ) ; |
public class CPInstancePersistenceImpl { /** * Returns the first cp instance in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; .
* @ param uuid the uuid
* @ param companyId the company ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching cp instance
* @ throws NoSuchCPInstanceException if a matching cp instance could not be found */
@ Override public CPInstance findByUuid_C_First ( String uuid , long companyId , OrderByComparator < CPInstance > orderByComparator ) throws NoSuchCPInstanceException { } } | CPInstance cpInstance = fetchByUuid_C_First ( uuid , companyId , orderByComparator ) ; if ( cpInstance != null ) { return cpInstance ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( ", companyId=" ) ; msg . append ( companyId ) ; msg . append ( "}" ) ; throw new NoSuchCPInstanceException ( msg . toString ( ) ) ; |
public class InputAction { /** * Validate given input according to valid answer tokens .
* @ param input
* @ return */
private boolean checkAnswer ( String input ) { } } | StringTokenizer tok = new StringTokenizer ( validAnswers , ANSWER_SEPARATOR ) ; while ( tok . hasMoreTokens ( ) ) { if ( tok . nextElement ( ) . toString ( ) . trim ( ) . equalsIgnoreCase ( input . trim ( ) ) ) { return true ; } } log . info ( "User input is not valid - must be one of " + validAnswers ) ; return false ; |
public class WebSocketConnection { /** * Returns the extensions list as a comma separated string as specified by the rfc .
* @ return extension list string or null if no extensions are enabled */
public String getExtensionsAsString ( ) { } } | String extensionsList = null ; if ( extensions != null ) { StringBuilder sb = new StringBuilder ( ) ; for ( String key : extensions . keySet ( ) ) { sb . append ( key ) ; sb . append ( "; " ) ; } extensionsList = sb . toString ( ) . trim ( ) ; } return extensionsList ; |
public class NotificationView { /** * Set the x location of pivot point around which the contentView is rotated .
* @ param x */
public void setContentViewPivotX ( float x ) { } } | if ( DBG ) Log . v ( TAG , "setContentViewPivotX - x=" + x ) ; mContentView . setPivotY ( x ) ; |
public class BaasBox { /** * Asynchronously sends a raw rest request to the server that is specified by
* the parameters passed in
* @ param flags bitmask of flags for the request { @ see Flags }
* @ param method the method to use
* @ param endpoint the resource
* @ param body an optional json array
* @ return a raw { @ link com . baasbox . android . json . JsonObject } response wrapped as { @ link com . baasbox . android . BaasResult } */
@ Deprecated public RequestToken rest ( int method , String endpoint , JsonArray body , int flags , boolean authenticate , BaasHandler < JsonObject > jsonHandler ) { } } | return mRest . async ( RestImpl . methodFrom ( method ) , endpoint , body , authenticate , flags , jsonHandler ) ; |
public class CSVWriter { /** * / * private void findMapKeys ( StructureDefinition def , List < StructureDefinitionMappingComponent > maps , IWorkerContext context ) {
* maps . addAll ( def . getMapping ( ) ) ;
* if ( def . getBaseDefinition ( ) ! = null ) {
* StructureDefinition base = context . fetchResource ( StructureDefinition . class , def . getBaseDefinition ( ) ) ;
* findMapKeys ( base , maps , context ) ; */
public void processElement ( ElementDefinition ed ) throws Exception { } } | CSVLine line = new CSVLine ( ) ; lines . add ( line ) ; line . addString ( ed . getPath ( ) ) ; line . addString ( ed . getSliceName ( ) ) ; line . addString ( itemList ( ed . getAlias ( ) ) ) ; line . addString ( ed . getLabel ( ) ) ; line . addValue ( ed . getMin ( ) ) ; line . addValue ( ed . getMax ( ) ) ; line . addString ( ed . getMustSupport ( ) ? "Y" : "" ) ; line . addString ( ed . getIsModifier ( ) ? "Y" : "" ) ; line . addString ( ed . getIsSummary ( ) ? "Y" : "" ) ; line . addString ( itemList ( ed . getType ( ) ) ) ; line . addString ( ed . getShort ( ) ) ; line . addString ( ed . getDefinition ( ) ) ; line . addString ( ed . getComment ( ) ) ; line . addString ( ed . getRequirements ( ) ) ; line . addString ( ed . getDefaultValue ( ) != null ? renderType ( ed . getDefaultValue ( ) ) : "" ) ; line . addString ( ed . getMeaningWhenMissing ( ) ) ; line . addString ( ed . hasFixed ( ) ? renderType ( ed . getFixed ( ) ) : "" ) ; line . addString ( ed . hasPattern ( ) ? renderType ( ed . getPattern ( ) ) : "" ) ; line . addString ( ed . hasExample ( ) ? renderType ( ed . getExample ( ) . get ( 0 ) . getValue ( ) ) : "" ) ; // todo . . . ?
line . addString ( ed . hasMinValue ( ) ? renderType ( ed . getMinValue ( ) ) : "" ) ; line . addString ( ed . hasMaxValue ( ) ? renderType ( ed . getMaxValue ( ) ) : "" ) ; line . addValue ( ( ed . hasMaxLength ( ) ? Integer . toString ( ed . getMaxLength ( ) ) : "" ) ) ; if ( ed . hasBinding ( ) ) { line . addString ( ed . getBinding ( ) . getStrength ( ) != null ? ed . getBinding ( ) . getStrength ( ) . toCode ( ) : "" ) ; line . addString ( ed . getBinding ( ) . getDescription ( ) ) ; if ( ed . getBinding ( ) . getValueSet ( ) == null ) line . addString ( "" ) ; else line . addString ( ed . getBinding ( ) . getValueSet ( ) ) ; } else { line . addValue ( "" ) ; line . addValue ( "" ) ; line . addValue ( "" ) ; } line . addString ( itemList ( ed . getCode ( ) ) ) ; if ( ed . hasSlicing ( ) ) { line . addString ( itemList ( ed . getSlicing ( ) . getDiscriminator ( ) ) ) ; line . addString ( ed . getSlicing ( ) . getDescription ( ) ) ; line . addBoolean ( ed . getSlicing ( ) . getOrdered ( ) ) ; line . addString ( ed . getSlicing ( ) . getRules ( ) != null ? ed . getSlicing ( ) . getRules ( ) . toCode ( ) : "" ) ; } else { line . addValue ( "" ) ; line . addValue ( "" ) ; line . addValue ( "" ) ; } if ( ed . getBase ( ) != null ) { line . addString ( ed . getBase ( ) . getPath ( ) ) ; line . addValue ( ed . getBase ( ) . getMin ( ) ) ; line . addValue ( ed . getBase ( ) . getMax ( ) ) ; } else { line . addValue ( "" ) ; line . addValue ( "" ) ; line . addValue ( "" ) ; } line . addString ( itemList ( ed . getCondition ( ) ) ) ; line . addString ( itemList ( ed . getConstraint ( ) ) ) ; for ( StructureDefinitionMappingComponent mapKey : def . getMapping ( ) ) { for ( ElementDefinitionMappingComponent map : ed . getMapping ( ) ) { if ( map . getIdentity ( ) . equals ( mapKey . getIdentity ( ) ) ) line . addString ( map . getMap ( ) ) ; } } |
public class CmsTemplateContextManager { /** * Utility method which either reads a property from the template used for a specific resource , or from the template context provider used for the resource if available . < p >
* @ param cms the CMS context to use
* @ param res the resource from whose template or template context provider the property should be read
* @ param propertyName the property name
* @ param fallbackValue the fallback value
* @ return the property value */
public String readPropertyFromTemplate ( CmsObject cms , CmsResource res , String propertyName , String fallbackValue ) { } } | try { CmsProperty templateProp = cms . readPropertyObject ( res , CmsPropertyDefinition . PROPERTY_TEMPLATE , true ) ; String templatePath = templateProp . getValue ( ) . trim ( ) ; if ( hasPropertyPrefix ( templatePath ) ) { I_CmsTemplateContextProvider provider = getTemplateContextProvider ( templatePath ) ; return provider . readCommonProperty ( cms , propertyName , fallbackValue ) ; } else { return cms . readPropertyObject ( templatePath , propertyName , false ) . getValue ( fallbackValue ) ; } } catch ( Exception e ) { LOG . error ( e ) ; return fallbackValue ; } |
public class Index { /** * Search for query rules
* @ param query the query
* @ param requestOptions Options to pass to this request */
public JSONObject searchRules ( RuleQuery query , RequestOptions requestOptions ) throws AlgoliaException , JSONException { } } | JSONObject body = new JSONObject ( ) ; if ( query . getQuery ( ) != null ) { body = body . put ( "query" , query . getQuery ( ) ) ; } if ( query . getAnchoring ( ) != null ) { body = body . put ( "anchoring" , query . getAnchoring ( ) ) ; } if ( query . getContext ( ) != null ) { body = body . put ( "context" , query . getContext ( ) ) ; } if ( query . getPage ( ) != null ) { body = body . put ( "page" , query . getPage ( ) ) ; } if ( query . getHitsPerPage ( ) != null ) { body = body . put ( "hitsPerPage" , query . getHitsPerPage ( ) ) ; } return client . postRequest ( "/1/indexes/" + encodedIndexName + "/rules/search" , body . toString ( ) , false , true , requestOptions ) ; |
public class ZipUtils { /** * Unzips a zip file into an output folder .
* @ param zipFile the zip file
* @ param outputFolder the output folder where the files
* @ throws IOException */
public static void unZipFiles ( File zipFile , File outputFolder ) throws IOException { } } | InputStream is = new BufferedInputStream ( new FileInputStream ( zipFile ) ) ; try { unZipFiles ( is , outputFolder ) ; } finally { IOUtils . closeQuietly ( is ) ; } |
public class TIFFUtilities { /** * Splits all pages from the input TIFF file to one file per page in the
* output directory .
* @ param inputFile
* @ param outputDirectory
* @ return generated files
* @ throws IOException */
public static List < File > split ( File inputFile , File outputDirectory ) throws IOException { } } | ImageInputStream input = null ; List < File > outputFiles = new ArrayList < > ( ) ; try { input = ImageIO . createImageInputStream ( inputFile ) ; List < TIFFPage > pages = getPages ( input ) ; int pageNo = 1 ; for ( TIFFPage tiffPage : pages ) { ArrayList < TIFFPage > outputPages = new ArrayList < TIFFPage > ( 1 ) ; ImageOutputStream outputStream = null ; try { File outputFile = new File ( outputDirectory , String . format ( "%04d" , pageNo ) + ".tif" ) ; outputStream = ImageIO . createImageOutputStream ( outputFile ) ; outputPages . clear ( ) ; outputPages . add ( tiffPage ) ; writePages ( outputStream , outputPages ) ; outputFiles . add ( outputFile ) ; } finally { if ( outputStream != null ) { outputStream . flush ( ) ; outputStream . close ( ) ; } } ++ pageNo ; } } finally { if ( input != null ) { input . close ( ) ; } } return outputFiles ; |
public class WaitFor { /** * Waits up to the provided wait time for a cookie exists in the application with the provided
* cookieName . This information will be logged and recorded , with a
* screenshot for traceability and added debugging support .
* @ param expectedCookieName the name of the cookie
* @ param seconds the number of seconds to wait */
public void cookieExists ( double seconds , String expectedCookieName ) { } } | double end = System . currentTimeMillis ( ) + ( seconds * 1000 ) ; while ( ! app . is ( ) . cookiePresent ( expectedCookieName ) && System . currentTimeMillis ( ) < end ) ; double timeTook = Math . min ( ( seconds * 1000 ) - ( end - System . currentTimeMillis ( ) ) , seconds * 1000 ) / 1000 ; checkCookieExists ( expectedCookieName , seconds , timeTook ) ; |
public class Bech32 { /** * Encode a Bech32 string . */
public static String encode ( String hrp , final byte [ ] values ) { } } | checkArgument ( hrp . length ( ) >= 1 , "Human-readable part is too short" ) ; checkArgument ( hrp . length ( ) <= 83 , "Human-readable part is too long" ) ; hrp = hrp . toLowerCase ( Locale . ROOT ) ; byte [ ] checksum = createChecksum ( hrp , values ) ; byte [ ] combined = new byte [ values . length + checksum . length ] ; System . arraycopy ( values , 0 , combined , 0 , values . length ) ; System . arraycopy ( checksum , 0 , combined , values . length , checksum . length ) ; StringBuilder sb = new StringBuilder ( hrp . length ( ) + 1 + combined . length ) ; sb . append ( hrp ) ; sb . append ( '1' ) ; for ( byte b : combined ) { sb . append ( CHARSET . charAt ( b ) ) ; } return sb . toString ( ) ; |
public class DefaultProcessDiagramCanvas { /** * This method makes coordinates of connection flow better .
* @ param sourceShapeType
* @ param targetShapeType
* @ param sourceGraphicInfo
* @ param targetGraphicInfo
* @ param graphicInfoList */
public List < GraphicInfo > connectionPerfectionizer ( SHAPE_TYPE sourceShapeType , SHAPE_TYPE targetShapeType , GraphicInfo sourceGraphicInfo , GraphicInfo targetGraphicInfo , List < GraphicInfo > graphicInfoList ) { } } | Shape shapeFirst = createShape ( sourceShapeType , sourceGraphicInfo ) ; Shape shapeLast = createShape ( targetShapeType , targetGraphicInfo ) ; if ( graphicInfoList != null && graphicInfoList . size ( ) > 0 ) { GraphicInfo graphicInfoFirst = graphicInfoList . get ( 0 ) ; GraphicInfo graphicInfoLast = graphicInfoList . get ( graphicInfoList . size ( ) - 1 ) ; if ( shapeFirst != null ) { graphicInfoFirst . setX ( shapeFirst . getBounds2D ( ) . getCenterX ( ) ) ; graphicInfoFirst . setY ( shapeFirst . getBounds2D ( ) . getCenterY ( ) ) ; } if ( shapeLast != null ) { graphicInfoLast . setX ( shapeLast . getBounds2D ( ) . getCenterX ( ) ) ; graphicInfoLast . setY ( shapeLast . getBounds2D ( ) . getCenterY ( ) ) ; } Point p = null ; if ( shapeFirst != null ) { Line2D . Double lineFirst = new Line2D . Double ( graphicInfoFirst . getX ( ) , graphicInfoFirst . getY ( ) , graphicInfoList . get ( 1 ) . getX ( ) , graphicInfoList . get ( 1 ) . getY ( ) ) ; p = getIntersection ( shapeFirst , lineFirst ) ; if ( p != null ) { graphicInfoFirst . setX ( p . getX ( ) ) ; graphicInfoFirst . setY ( p . getY ( ) ) ; } } if ( shapeLast != null ) { Line2D . Double lineLast = new Line2D . Double ( graphicInfoLast . getX ( ) , graphicInfoLast . getY ( ) , graphicInfoList . get ( graphicInfoList . size ( ) - 2 ) . getX ( ) , graphicInfoList . get ( graphicInfoList . size ( ) - 2 ) . getY ( ) ) ; p = getIntersection ( shapeLast , lineLast ) ; if ( p != null ) { graphicInfoLast . setX ( p . getX ( ) ) ; graphicInfoLast . setY ( p . getY ( ) ) ; } } } return graphicInfoList ; |
public class DownloadFile { /** * Validates the { @ code downloadedFile } ' s contents against a SHA - 256
* checksum provided with it .
* @ return { @ code true } if the computed checksum matches { @ code checksum } ,
* { @ code false } if it does not match or if the { @ code checksum } is
* { @ code null } .
* @ throws IOException Thrown if an IO error occurred while reading the
* { @ link File downloadedFile } . */
public boolean validateContent ( ) throws IOException { } } | if ( checksum == null ) { return false ; } final FileInputStream fis = new FileInputStream ( downloadedFile ) ; final String computedHash = computeHashSHA256 ( fis ) ; return computedHash . equals ( checksum ) ; |
public class LocationAttributes { /** * Returns the column number of an element ( SAX flavor )
* @ param attrs
* the element ' s attributes that hold the location information
* @ return the element ' s column number or < code > - 1 < / code > if
* < code > attrs < / code > has no location information . */
public static int getColumn ( Attributes attrs ) { } } | String col = attrs . getValue ( URI , COL_ATTR ) ; return col != null ? Integer . parseInt ( col ) : - 1 ; |
public class ClientRequestExecutorFactory { /** * Create a ClientRequestExecutor for the given { @ link SocketDestination } .
* @ param dest { @ link SocketDestination } */
@ Override public void createAsync ( final SocketDestination dest , final KeyedResourcePool < SocketDestination , ClientRequestExecutor > pool ) throws Exception { } } | int numCreated = created . incrementAndGet ( ) ; if ( logger . isDebugEnabled ( ) ) logger . debug ( "Creating socket " + numCreated + " for " + dest . getHost ( ) + ":" + dest . getPort ( ) + " using protocol " + dest . getRequestFormatType ( ) . getCode ( ) ) ; SocketChannel socketChannel = null ; ClientRequestExecutor clientRequestExecutor = null ; long durationMs = 0 ; try { socketChannel = SocketChannel . open ( ) ; socketChannel . socket ( ) . setReceiveBufferSize ( this . socketBufferSize ) ; socketChannel . socket ( ) . setSendBufferSize ( this . socketBufferSize ) ; socketChannel . socket ( ) . setTcpNoDelay ( true ) ; socketChannel . socket ( ) . setSoTimeout ( soTimeoutMs ) ; socketChannel . socket ( ) . setKeepAlive ( this . socketKeepAlive ) ; socketChannel . configureBlocking ( false ) ; socketChannel . connect ( new InetSocketAddress ( dest . getHost ( ) , dest . getPort ( ) ) ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Created socket " + numCreated + " for " + dest . getHost ( ) + ":" + dest . getPort ( ) + " using protocol " + dest . getRequestFormatType ( ) . getCode ( ) + " after " + durationMs + " ms." ) ; } ClientRequestSelectorManager selectorManager = selectorManagers [ counter . getAndIncrement ( ) % selectorManagers . length ] ; Selector selector = selectorManager . getSelector ( ) ; clientRequestExecutor = new ClientRequestExecutor ( selector , socketChannel , socketBufferSize , idleConnectionTimeoutNs , dest ) ; int timeoutMs = this . getTimeout ( ) ; ProtocolNegotiatorClientRequest protocolRequest = new ProtocolNegotiatorClientRequest ( dest . getRequestFormatType ( ) ) ; NonblockingStoreCallback callback = new NonblockingStoreCallback ( ) { @ Override public void requestComplete ( Object result , long requestTime ) { if ( result instanceof Exception ) { Exception e = ( Exception ) result ; /* * There are 2 places where we can get a store timeout
* Exception
* 1 ) While doing connect - the machine was up once , but
* not anymore . In that case , TCP SYN will be sent by
* the client , but server would not sent TCP ACK as it
* is dead .
* 2 ) After connect doing Protocol Negotiation - Most
* likely the server and kernel is up , but the process
* is in a zombie state because of hard drive failure or
* stuck in shutdown or doing a GC . This can be
* intermittent or hard failures . Before this code
* change , if the process entered this state , Voldemort
* clients may not detect the failure immediately . They
* are treated as normal errors , instead of catastrophic
* erros . This was the reason before it is better to kill
* the process on a machine and let the machine stay up .
* After this code change they will be treated as
* connection failures ( catastrophic errors ) to help
* recover the clients faster .
* The second case can increase the false positives , but
* if a server is consistently timing out it is better
* to treat the server as dead and let the clients
* recover faster . */
if ( e instanceof StoreTimeoutException ) { e = new UnreachableStoreException ( "Error establishing connection for destination " + dest , new ConnectException ( e . getMessage ( ) ) ) ; } if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Reporting exception to pool " + e . getClass ( ) + " for destination " + dest ) ; } pool . reportException ( dest , e ) ; } } } ; NonblockingStoreCallbackClientRequest < String > clientRequest = new NonblockingStoreCallbackClientRequest < String > ( pool , dest , protocolRequest , clientRequestExecutor , callback , stats ) ; clientRequestExecutor . setConnectRequest ( clientRequest , timeoutMs ) ; selectorManager . add ( clientRequestExecutor ) ; selector . wakeup ( ) ; } catch ( Exception e ) { // Make sure not to leak socketChannels
if ( socketChannel != null ) { try { socketChannel . close ( ) ; } catch ( Exception ex ) { if ( logger . isEnabledFor ( Level . WARN ) ) logger . warn ( ex , ex ) ; } } // If clientRequestExector is not null , some additional clean up may
// be warranted . However , clientRequestExecutor . close ( ) , the
// " obvious " clean up , is not safe to call here . This is because
// . close ( ) checks in a resource to the KeyedResourcePool that was
// never checked out .
throw UnreachableStoreException . wrap ( "Error establishing connection for destination " + dest , e ) ; } if ( stats != null ) { stats . incrementCount ( dest , ClientSocketStats . Tracked . CONNECTION_CREATED_EVENT ) ; stats . recordConnectionEstablishmentTimeUs ( dest , durationMs * Time . US_PER_MS ) ; } |
public class XtextSemanticSequencer { /** * Contexts :
* CharacterRange returns CharacterRange
* Constraint :
* ( left = CharacterRange _ CharacterRange _ 1_0 right = Keyword ) */
protected void sequence_CharacterRange ( ISerializationContext context , CharacterRange semanticObject ) { } } | if ( errorAcceptor != null ) { if ( transientValues . isValueTransient ( semanticObject , XtextPackage . Literals . CHARACTER_RANGE__LEFT ) == ValueTransient . YES ) errorAcceptor . accept ( diagnosticProvider . createFeatureValueMissing ( semanticObject , XtextPackage . Literals . CHARACTER_RANGE__LEFT ) ) ; if ( transientValues . isValueTransient ( semanticObject , XtextPackage . Literals . CHARACTER_RANGE__RIGHT ) == ValueTransient . YES ) errorAcceptor . accept ( diagnosticProvider . createFeatureValueMissing ( semanticObject , XtextPackage . Literals . CHARACTER_RANGE__RIGHT ) ) ; } SequenceFeeder feeder = createSequencerFeeder ( context , semanticObject ) ; feeder . accept ( grammarAccess . getCharacterRangeAccess ( ) . getCharacterRangeLeftAction_1_0 ( ) , semanticObject . getLeft ( ) ) ; feeder . accept ( grammarAccess . getCharacterRangeAccess ( ) . getRightKeywordParserRuleCall_1_2_0 ( ) , semanticObject . getRight ( ) ) ; feeder . finish ( ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.