signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class PropositionUtil { /** * Binary search for a primitive parameter by timestamp , optimized for when * the parameters are stored in a list that does not implement * < code > java . util . RandomAccess < / code > . * @ param list * a < code > List < / code > of < code > PrimitiveParameter < / code > * objects all with the same paramId , cannot be < code > null < / code > . * @ param tstamp * the timestamp we ' re interested in finding . * @ return a < code > PrimitiveParameter < / code > , or < code > null < / code > if * not found . */ private static < U extends Unit > PrimitiveParameter iteratorBinarySearch ( List < PrimitiveParameter > list , long tstamp ) { } }
int low = 0 ; int high = list . size ( ) - 1 ; ListIterator < PrimitiveParameter > i = list . listIterator ( ) ; while ( low <= high ) { /* * We use > > > instead of > > or / 2 to avoid overflow . Sun ' s * implementation of binary search actually doesn ' t do this ( bug * # 5045582 ) . */ int mid = ( low + high ) >>> 1 ; PrimitiveParameter midVal = iteratorBinarySearchGet ( i , mid ) ; long cmp = midVal . getPosition ( ) - tstamp ; if ( cmp < 0 ) { low = mid + 1 ; } else if ( cmp > 0 ) { high = mid - 1 ; } else { return midVal ; } } return null ;
public class SassRubyGenerator { /** * ( non - Javadoc ) * @ see * net . jawr . web . resource . bundle . generator . AbstractCachedGenerator # resetCache */ @ Override protected void resetCache ( ) { } }
super . resetCache ( ) ; cacheProperties . put ( JawrConstant . SASS_GENERATOR_URL_MODE , useAbsoluteURL ? SASS_GENERATOR_ABSOLUTE_URL_MODE : SASS_GENERATOR_RELATIVE_URL_MODE ) ;
public class MiniBenchmark { /** * Parses the input args with a command line format , using * { @ link org . apache . commons . cli . CommandLineParser } . * @ param args the input args * @ return true if parsing succeeded */ private static boolean parseInputArgs ( String [ ] args ) { } }
CommandLineParser parser = new DefaultParser ( ) ; CommandLine cmd ; try { cmd = parser . parse ( OPTIONS , args ) ; } catch ( ParseException e ) { System . out . println ( "Failed to parse input args: " + e ) ; return false ; } sHelp = cmd . hasOption ( "help" ) ; sType = OperationType . valueOf ( cmd . getOptionValue ( "type" , "READ" ) ) ; sFileSize = FormatUtils . parseSpaceSize ( cmd . getOptionValue ( "fileSize" , "1KB" ) ) ; sIterations = Integer . parseInt ( cmd . getOptionValue ( "iterations" , "1" ) ) ; sConcurrency = Integer . parseInt ( cmd . getOptionValue ( "concurrency" , "1" ) ) ; return true ;
public class JDBCResultSet { /** * < ! - - start generic documentation - - > * Updates the designated column with a < code > String < / code > value . * The updater methods are used to update column values in the * current row or the insert row . The updater methods do not * update the underlying database ; instead the < code > updateRow < / code > or * < code > insertRow < / code > methods are called to update the database . * < ! - - end generic documentation - - > * < ! - - start release - specific documentation - - > * < div class = " ReleaseSpecificDocumentation " > * < h3 > HSQLDB - Specific Information : < / h3 > < p > * HSQLDB supports this feature . < p > * < / div > * < ! - - end release - specific documentation - - > * @ param columnLabel the label for the column specified with the SQL AS clause . If the SQL AS clause was not specified , then the label is the name of the column * @ param x the new column value * @ exception SQLException if a database access error occurs , * the result set concurrency is < code > CONCUR _ READ _ ONLY < / code > * or this method is called on a closed result set * @ exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @ since JDK 1.2 ( JDK 1.1 . x developers : read the overview for * JDBCResultSet ) */ public void updateString ( String columnLabel , String x ) throws SQLException { } }
updateString ( findColumn ( columnLabel ) , x ) ;
public class PlatformSummary { /** * The tiers in which the platform runs . * @ return The tiers in which the platform runs . */ public java . util . List < String > getSupportedTierList ( ) { } }
if ( supportedTierList == null ) { supportedTierList = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return supportedTierList ;
public class InteropFramework { /** * Support for content negotiation , jax - rs style . Create a list of media * type supported by the framework . * @ see < a href = " http : / / docs . oracle . com / javaee / 6 / tutorial / doc / gkqbq . html " > Content Negotiation < / a > */ public List < Variant > getVariants ( ) { } }
List < Variant > vs = new ArrayList < Variant > ( ) ; for ( Map . Entry < String , ProvFormat > entry : mimeTypeRevMap . entrySet ( ) ) { if ( isOutputFormat ( entry . getValue ( ) ) ) { String [ ] parts = entry . getKey ( ) . split ( "/" ) ; MediaType m = new MediaType ( parts [ 0 ] , parts [ 1 ] ) ; vs . add ( new Variant ( m , ( java . util . Locale ) null , ( String ) null ) ) ; } } return vs ;
public class ColVals { /** * < p > Evaluate column string val for * SQL statement INSERT or UPDATE . * For String ' [ val ] ' or NULL < / p > * @ param pNm column name * @ return Object column val * @ throws ExceptionWithCode if column not found */ public final Object evObjVl ( final String pNm ) throws ExceptionWithCode { } }
if ( this . ints != null && this . ints . keySet ( ) . contains ( pNm ) ) { return this . ints . get ( pNm ) ; } if ( this . longs != null && this . longs . keySet ( ) . contains ( pNm ) ) { return this . longs . get ( pNm ) ; } if ( this . strs != null && this . strs . keySet ( ) . contains ( pNm ) ) { String val = ( String ) this . strs . get ( pNm ) ; if ( val == null ) { return null ; } else { if ( this . exprs != null && this . exprs . contains ( pNm ) ) { return val ; } else { return "'" + val + "'" ; } } } if ( this . floats != null && this . floats . keySet ( ) . contains ( pNm ) ) { return this . floats . get ( pNm ) ; } if ( this . doubles != null && this . doubles . keySet ( ) . contains ( pNm ) ) { return this . doubles . get ( pNm ) ; } if ( this . idLongs != null && this . idLongs . keySet ( ) . contains ( pNm ) ) { return this . idLongs . get ( pNm ) ; } if ( this . idStrs != null && this . idStrs . keySet ( ) . contains ( pNm ) ) { return this . idStrs . get ( pNm ) ; } if ( this . idInts != null && this . idInts . keySet ( ) . contains ( pNm ) ) { return this . idInts . get ( pNm ) ; } throw new ExceptionWithCode ( ExceptionWithCode . WRONG_PARAMETER , "There is no field - " + pNm ) ;
public class UserCoreDao { /** * Build where ( or selection ) args for the value * @ param value * value * @ return where args */ public String [ ] buildWhereArgs ( ColumnValue value ) { } }
String [ ] args = null ; if ( value != null ) { if ( value . getValue ( ) != null && value . getTolerance ( ) != null ) { args = getValueToleranceRange ( value ) ; } else { args = buildWhereArgs ( value . getValue ( ) ) ; } } return args ;
public class EncodedGradientsAccumulator { /** * This method accepts updates suitable for StepFunction , and accumulates / propagates it across all workers * @ param array */ @ Override public void storeUpdate ( INDArray array , int iterationNumber , int epochNumber ) { } }
try { if ( accumulator . get ( ) == null ) { // we don ' t want accumulator to be attached to workspaces try ( MemoryWorkspace workspace = Nd4j . getMemoryManager ( ) . scopeOutOfWorkspaces ( ) ) { accumulator . set ( Nd4j . create ( array . shape ( ) , array . ordering ( ) ) ) ; } } // accumulate gradients updates in residental array accumulator . get ( ) . addi ( array ) ; if ( isDebug ) log . info ( "thread {} locking at Register" , Thread . currentThread ( ) . getId ( ) ) ; // block until ParallelWrapper sends us message about number of threads in this cycle if ( ! bypassMode . get ( ) ) while ( ! registered . get ( ) ) { ThreadUtils . uncheckedSleep ( 1 ) ; if ( throwable . isTriggered ( ) ) throw new RuntimeException ( throwable . get ( ) ) ; } if ( isDebug ) log . info ( "thread {} unlocking at Register" , Thread . currentThread ( ) . getId ( ) ) ; // propagate changes & modify accumulator handler . broadcastUpdates ( accumulator . get ( ) , iterationNumber , epochNumber ) ; // we ' re blocking here , untill all done broadcasting updates synchronize ( currentConsumers . get ( ) ) ; } catch ( Exception e ) { throwable . setIfFirst ( e ) ; throw new RuntimeException ( e ) ; }
public class SQLiteModelMethod { /** * return true if is use a bean ( that this dao manage ) as parameter . * @ return return true if is use a bean ( that this dao manage ) as parameter . */ public boolean hasBeanAsParameter ( ) { } }
TypeName entityTypeName = TypeUtility . typeName ( this . getEntity ( ) . getElement ( ) ) ; for ( Pair < String , TypeName > item : this . parameters ) { if ( item . value1 . equals ( entityTypeName ) ) { return true ; } } return false ;
public class RequestParam { /** * Returns a request parameter as enum value . * @ param < T > Enum type * @ param request Request . * @ param param Parameter name . * @ param enumClass Enum class * @ param defaultValue Default value . * @ return Parameter value or the default value if it is not set or an invalid enum value . */ @ SuppressWarnings ( "unchecked" ) public static < T extends Enum > @ Nullable T getEnum ( @ NotNull ServletRequest request , @ NotNull String param , @ NotNull Class < T > enumClass , @ Nullable T defaultValue ) { } }
String value = RequestParam . get ( request , param ) ; if ( StringUtils . isNotEmpty ( value ) ) { try { return ( T ) Enum . valueOf ( enumClass , value ) ; } catch ( IllegalArgumentException ex ) { // ignore , return default } } return defaultValue ;
public class Constants { /** * Returns a string representation of a constant value ( given in * internal representation ) , quoted and formatted as in Java source . */ public static String format ( Object value , Type type ) { } }
value = decode ( value , type ) ; switch ( type . getTag ( ) ) { case BYTE : return formatByte ( ( Byte ) value ) ; case LONG : return formatLong ( ( Long ) value ) ; case FLOAT : return formatFloat ( ( Float ) value ) ; case DOUBLE : return formatDouble ( ( Double ) value ) ; case CHAR : return formatChar ( ( Character ) value ) ; } if ( value instanceof String ) return formatString ( ( String ) value ) ; return value + "" ;
public class WaitingProcessOutputLineListener { /** * Monitors the output of the process to check whether the wait condition is satisfied . */ @ Override public void onOutputLine ( String line ) { } }
if ( waitLatch . getCount ( ) > 0 && message != null && line . matches ( message ) ) { waitLatch . countDown ( ) ; }
public class AbstractSailthruClient { /** * HTTP POST Request with Map * @ param action * @ param data * @ throws IOException */ public JsonResponse apiPost ( ApiAction action , Map < String , Object > data ) throws IOException { } }
return httpRequestJson ( action , HttpRequestMethod . POST , data ) ;
public class BuildPhase { /** * Additional information about a build phase , especially to help troubleshoot a failed build . * @ param contexts * Additional information about a build phase , especially to help troubleshoot a failed build . */ public void setContexts ( java . util . Collection < PhaseContext > contexts ) { } }
if ( contexts == null ) { this . contexts = null ; return ; } this . contexts = new java . util . ArrayList < PhaseContext > ( contexts ) ;
public class CoreAuthenticationUtils { /** * Convert attribute values to multi valued objects . * @ param attributes the attributes * @ return the map of attributes to return */ public static Map < String , List < Object > > convertAttributeValuesToMultiValuedObjects ( final Map < String , Object > attributes ) { } }
val entries = attributes . entrySet ( ) ; return entries . stream ( ) . collect ( Collectors . toMap ( Map . Entry :: getKey , entry -> { val value = entry . getValue ( ) ; return CollectionUtils . toCollection ( value , ArrayList . class ) ; } ) ) ;
public class SocketExtensions { /** * Close the given { @ link Socket } . * @ param clientSocket * the client socket * @ throws IOException * Signals that an I / O exception has occurred . */ public static void close ( final Socket clientSocket ) throws IOException { } }
if ( clientSocket != null && ! clientSocket . isClosed ( ) ) { clientSocket . close ( ) ; }
public class ConverterSet { /** * Returns a copy of this set , with the given converter added . If a * matching converter is already in the set , the given converter replaces * it . If the converter is exactly the same as one already in the set , the * original set is returned . * @ param converter converter to add , must not be null * @ param removed if not null , element 0 is set to the removed converter * @ throws NullPointerException if converter is null */ ConverterSet add ( Converter converter , Converter [ ] removed ) { } }
Converter [ ] converters = iConverters ; int length = converters . length ; for ( int i = 0 ; i < length ; i ++ ) { Converter existing = converters [ i ] ; if ( converter . equals ( existing ) ) { // Already in the set . if ( removed != null ) { removed [ 0 ] = null ; } return this ; } if ( converter . getSupportedType ( ) == existing . getSupportedType ( ) ) { // Replace the converter . Converter [ ] copy = new Converter [ length ] ; for ( int j = 0 ; j < length ; j ++ ) { if ( j != i ) { copy [ j ] = converters [ j ] ; } else { copy [ j ] = converter ; } } if ( removed != null ) { removed [ 0 ] = existing ; } return new ConverterSet ( copy ) ; } } // Not found , so add it . Converter [ ] copy = new Converter [ length + 1 ] ; System . arraycopy ( converters , 0 , copy , 0 , length ) ; copy [ length ] = converter ; if ( removed != null ) { removed [ 0 ] = null ; } return new ConverterSet ( copy ) ;
public class CommerceAvailabilityEstimatePersistenceImpl { /** * Returns all the commerce availability estimates where groupId = & # 63 ; . * @ param groupId the group ID * @ return the matching commerce availability estimates */ @ Override public List < CommerceAvailabilityEstimate > findByGroupId ( long groupId ) { } }
return findByGroupId ( groupId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ;
public class NioGroovyMethods { /** * Create a new InputStream for this file and passes it into the closure . * This method ensures the stream is closed after the closure returns . * @ param self a Path * @ param closure a closure * @ return the value returned by the closure * @ throws java . io . IOException if an IOException occurs . * @ see org . codehaus . groovy . runtime . IOGroovyMethods # withStream ( java . io . InputStream , groovy . lang . Closure ) * @ since 2.3.0 */ public static Object withInputStream ( Path self , @ ClosureParams ( value = SimpleType . class , options = "java.io.InputStream" ) Closure closure ) throws IOException { } }
return IOGroovyMethods . withStream ( newInputStream ( self ) , closure ) ;
public class ConnectorServiceImpl { /** * Declarative Services method for unsetting the RRS XA resource factory service implementation reference . * @ param ref reference to the service */ protected void unsetRRSXAResourceFactory ( ServiceReference < Object > ref ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "unsetRRSXAResourceFactory" , ref ) ; rrsXAResFactorySvcRef . unsetReference ( ref ) ;
public class VirtualCdj { /** * Gets the current playback position , then checks if we are within { @ link BeatSender # SLEEP _ THRESHOLD } ms before * an upcoming beat , or { @ link BeatSender # BEAT _ THRESHOLD } ms after one , sleeping until that is no longer the case . * @ return the current playback position , potentially after having delayed a bit so that is not too near a beat */ private Snapshot avoidBeatPacket ( ) { } }
Snapshot playState = getPlaybackPosition ( ) ; double distance = playState . distanceFromBeat ( ) ; while ( playing . get ( ) && ( ( ( distance < 0.0 ) && ( Math . abs ( distance ) <= BeatSender . SLEEP_THRESHOLD ) ) || ( ( distance >= 0.0 ) && ( distance <= ( BeatSender . BEAT_THRESHOLD + 1 ) ) ) ) ) { try { Thread . sleep ( 2 ) ; } catch ( InterruptedException e ) { logger . warn ( "Interrupted while sleeping to avoid beat packet; ignoring." , e ) ; } playState = getPlaybackPosition ( ) ; distance = playState . distanceFromBeat ( ) ; } return playState ;
public class GetLoadBalancerMetricDataRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetLoadBalancerMetricDataRequest getLoadBalancerMetricDataRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getLoadBalancerMetricDataRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getLoadBalancerMetricDataRequest . getLoadBalancerName ( ) , LOADBALANCERNAME_BINDING ) ; protocolMarshaller . marshall ( getLoadBalancerMetricDataRequest . getMetricName ( ) , METRICNAME_BINDING ) ; protocolMarshaller . marshall ( getLoadBalancerMetricDataRequest . getPeriod ( ) , PERIOD_BINDING ) ; protocolMarshaller . marshall ( getLoadBalancerMetricDataRequest . getStartTime ( ) , STARTTIME_BINDING ) ; protocolMarshaller . marshall ( getLoadBalancerMetricDataRequest . getEndTime ( ) , ENDTIME_BINDING ) ; protocolMarshaller . marshall ( getLoadBalancerMetricDataRequest . getUnit ( ) , UNIT_BINDING ) ; protocolMarshaller . marshall ( getLoadBalancerMetricDataRequest . getStatistics ( ) , STATISTICS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AbstractDecoratorWithLabel { /** * Set the label of widget . * @ param plabel a label widget */ @ UiChild ( limit = 1 , tagname = "label" ) public void setChildLabel ( final Widget plabel ) { } }
label = plabel ; getLayout ( ) . add ( label ) ;
public class BaseLinkedQueue { /** * { @ inheritDoc } < br > * IMPLEMENTATION NOTES : < br > * This is an O ( n ) operation as we run through all the nodes and count them . < br > * The accuracy of the value returned by this method is subject to races with producer / consumer threads . In * particular when racing with the consumer thread this method may under estimate the size . < br > * @ see java . util . Queue # size ( ) */ @ Override public final int size ( ) { } }
// Read consumer first , this is important because if the producer is node is ' older ' than the consumer // the consumer may overtake it ( consume past it ) invalidating the ' snapshot ' notion of size . LinkedQueueNode < E > chaserNode = lvConsumerNode ( ) ; LinkedQueueNode < E > producerNode = lvProducerNode ( ) ; int size = 0 ; // must chase the nodes all the way to the producer node , but there ' s no need to count beyond expected head . while ( chaserNode != producerNode && // don ' t go passed producer node chaserNode != null && // stop at last node size < Integer . MAX_VALUE ) // stop at max int { LinkedQueueNode < E > next ; next = chaserNode . lvNext ( ) ; // check if this node has been consumed , if so return what we have if ( next == chaserNode ) { return size ; } chaserNode = next ; size ++ ; } return size ;
public class UpgradableLock { /** * Acquire an exclusive write lock , possibly blocking until interrupted . * @ param locker object trying to become lock owner */ public final void lockForWriteInterruptibly ( L locker ) throws InterruptedException { } }
if ( Thread . interrupted ( ) ) { throw new InterruptedException ( ) ; } if ( ! tryLockForWrite ( locker ) ) { Result upgradeResult = lockForUpgradeInterruptibly_ ( locker ) ; if ( ! tryLockForWrite ( locker ) ) { lockForWriteQueuedInterruptibly ( locker , addWriteWaiter ( ) ) ; } if ( upgradeResult == Result . ACQUIRED ) { // clear upgrade state bit to indicate automatic upgrade while ( ! clearUpgradeLock ( mState ) ) { } } else { // undo automatic upgrade count increment mUpgradeCount -- ; } }
public class BaseProgressLayerDrawable { /** * { @ inheritDoc } */ @ Override @ SuppressLint ( "NewApi" ) public void setTintMode ( @ NonNull PorterDuff . Mode tintMode ) { } }
mBackgroundDrawable . setTintMode ( tintMode ) ; mSecondaryProgressDrawable . setTintMode ( tintMode ) ; mProgressDrawable . setTintMode ( tintMode ) ;
public class Fu { /** * Query by context . getContentResolver ( ) . * @ param uri * @ param projectionTypeMap * @ param selection * @ param selectionArgs * @ param sortOrder * @ return */ @ SuppressWarnings ( "rawtypes" ) public static List < Map < String , Object > > query ( final Uri uri , Map < String , Class > projectionTypeMap ) { } }
return query ( uri , projectionTypeMap , null , null , null ) ;
public class QueryIOManager { /** * The load / write operation * @ param file * The target file object from which the saved queries are loaded . * @ throws IOException */ public void load ( File file ) throws IOException { } }
if ( ! file . exists ( ) ) { return ; // NO - OP : Users may not have the saved file } if ( ! file . canRead ( ) ) { throw new IOException ( String . format ( "Error while reading the file located at %s.\n" + "Make sure you have the read permission at the location specified." , file . getAbsolutePath ( ) ) ) ; } // Clean the controller first before loading queryController . reset ( ) ; LineNumberReader reader = new LineNumberReader ( new FileReader ( file ) ) ; String line = "" ; while ( ( line = reader . readLine ( ) ) != null ) { try { if ( isCommentLine ( line ) || line . isEmpty ( ) ) { continue ; // skip comment lines and empty lines } if ( line . contains ( QUERY_GROUP ) ) { // The group ID is enclosed by a double - quotes sign String groupId = line . substring ( line . indexOf ( "\"" ) + 1 , line . lastIndexOf ( "\"" ) ) ; readQueryGroup ( reader , groupId ) ; } else if ( line . contains ( QUERY_ITEM ) ) { // The query ID is enclosed by a double - quotes sign String queryId = line . substring ( line . indexOf ( "\"" ) + 1 , line . lastIndexOf ( "\"" ) ) ; readQueryContent ( reader , "" , queryId ) ; } else { throw new IOException ( "Unknown syntax: " + line ) ; } } catch ( Exception e ) { throw new IOException ( String . format ( "Invalid syntax at line: %s" , reader . getLineNumber ( ) ) , e ) ; } }
public class StoreCallback { /** * Method sessionAttributeSet * @ param session * @ param name * @ param oldValue * @ param newValue * @ see com . ibm . wsspi . session . IStoreCallback # sessionAttributeSet ( com . ibm . wsspi . session . ISession , java . lang . Object , java . lang . Object , java . lang . Object ) */ public void sessionAttributeSet ( ISession session , Object name , Object oldValue , Boolean oldIsListener , Object newValue , Boolean newIsListener ) { } }
_sessionStateEventDispatcher . sessionAttributeSet ( session , name , oldValue , oldIsListener , newValue , newIsListener ) ;
public class IntervalJoinOperator { /** * Process a { @ link StreamRecord } from the left stream . Whenever an { @ link StreamRecord } * arrives at the left stream , it will get added to the left buffer . Possible join candidates * for that element will be looked up from the right buffer and if the pair lies within the * user defined boundaries , it gets passed to the { @ link ProcessJoinFunction } . * @ param record An incoming record to be joined * @ throws Exception Can throw an Exception during state access */ @ Override public void processElement1 ( StreamRecord < T1 > record ) throws Exception { } }
processElement ( record , leftBuffer , rightBuffer , lowerBound , upperBound , true ) ;
public class RESTProxyServlet { /** * For any request URL other than the context root , delegate to the * appropriate handler . If no handler is available , a 404 will be set * into the response . * @ param request * @ param response * @ param pathInfo * @ throws IOException */ private void handleWithDelegate ( final HttpServletRequest request , final HttpServletResponse response ) throws IOException { } }
// Delegate to handler boolean foundHandler = REST_HANDLER_CONTAINER . handleRequest ( new ServletRESTRequestImpl ( request ) , new ServletRESTResponseImpl ( response ) ) ; // RESTHandler handler = REST _ HANDLER _ CONTAINER . getHandler ( pathInfo ) ; if ( ! foundHandler ) { // No handler found , so we send back a 404 " not found " response . String errorMsg = Tr . formatMessage ( tc , "HANDLER_NOT_FOUND_ERROR" , request . getRequestURI ( ) ) ; response . sendError ( HttpServletResponse . SC_NOT_FOUND , errorMsg ) ; }
public class Humanize { /** * Same as { @ link # naturalDay ( Date ) } with the given locale . * @ param then * The date * @ param locale * Target locale * @ return String with ' today ' , ' tomorrow ' or ' yesterday ' compared to * current day . Otherwise , returns a string formatted according to a * locale sensitive DateFormat . */ @ Expose public static String naturalDay ( Date then , Locale locale ) { } }
return naturalDay ( DateFormat . SHORT , then , locale ) ;
public class StaticTraceInstrumentation { /** * Main entry point for command line execution . */ public final static void main ( String [ ] args ) throws Exception { } }
if ( args == null || args . length <= 0 ) { printUsageMessage ( ) ; return ; } StaticTraceInstrumentation sti = new StaticTraceInstrumentation ( ) ; sti . processArguments ( args ) ; sti . processPackageInfo ( ) ; sti . executeInstrumentation ( ) ;
public class ConfigParams { /** * Overrides parameters with new values from specified ConfigParams and returns * a new ConfigParams object . * @ param configParams ConfigMap with parameters to override the current values . * @ return a new ConfigParams object . * @ see ConfigParams # setDefaults ( ConfigParams ) */ public ConfigParams override ( ConfigParams configParams ) { } }
StringValueMap map = StringValueMap . fromMaps ( this , configParams ) ; return new ConfigParams ( map ) ;
public class wisite_translationinternalip_binding { /** * Use this API to count the filtered set of wisite _ translationinternalip _ binding resources . * filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */ public static long count_filtered ( nitro_service service , String sitepath , String filter ) throws Exception { } }
wisite_translationinternalip_binding obj = new wisite_translationinternalip_binding ( ) ; obj . set_sitepath ( sitepath ) ; options option = new options ( ) ; option . set_count ( true ) ; option . set_filter ( filter ) ; wisite_translationinternalip_binding [ ] response = ( wisite_translationinternalip_binding [ ] ) obj . getfiltered ( service , option ) ; if ( response != null ) { return response [ 0 ] . __count ; } return 0 ;
public class Main { /** * Uploads a file in a single request . This approach is preferred for small files since it * eliminates unnecessary round - trips to the servers . * @ param dbxClient Dropbox user authenticated client * @ param localFIle local file to upload * @ param dropboxPath Where to upload the file to within Dropbox */ private static void uploadFile ( DbxClientV2 dbxClient , File localFile , String dropboxPath ) { } }
try ( InputStream in = new FileInputStream ( localFile ) ) { ProgressListener progressListener = l -> printProgress ( l , localFile . length ( ) ) ; FileMetadata metadata = dbxClient . files ( ) . uploadBuilder ( dropboxPath ) . withMode ( WriteMode . ADD ) . withClientModified ( new Date ( localFile . lastModified ( ) ) ) . uploadAndFinish ( in , progressListener ) ; System . out . println ( metadata . toStringMultiline ( ) ) ; } catch ( UploadErrorException ex ) { System . err . println ( "Error uploading to Dropbox: " + ex . getMessage ( ) ) ; System . exit ( 1 ) ; } catch ( DbxException ex ) { System . err . println ( "Error uploading to Dropbox: " + ex . getMessage ( ) ) ; System . exit ( 1 ) ; } catch ( IOException ex ) { System . err . println ( "Error reading from file \"" + localFile + "\": " + ex . getMessage ( ) ) ; System . exit ( 1 ) ; }
public class JFXDialog { /** * set the dialog container * Note : the dialog container must be StackPane , its the container for the dialog to be shown in . * @ param dialogContainer */ public void setDialogContainer ( StackPane dialogContainer ) { } }
if ( dialogContainer != null ) { this . dialogContainer = dialogContainer ; // FIXME : need to be improved to consider only the parent boundary offsetX = dialogContainer . getBoundsInLocal ( ) . getWidth ( ) ; offsetY = dialogContainer . getBoundsInLocal ( ) . getHeight ( ) ; animation = getShowAnimation ( transitionType . get ( ) ) ; }
public class DescribeTerminationPolicyTypesResult { /** * The termination policies supported by Amazon EC2 Auto Scaling : < code > OldestInstance < / code > , * < code > OldestLaunchConfiguration < / code > , < code > NewestInstance < / code > , < code > ClosestToNextInstanceHour < / code > , * < code > Default < / code > , < code > OldestLaunchTemplate < / code > , and < code > AllocationStrategy < / code > . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setTerminationPolicyTypes ( java . util . Collection ) } or * { @ link # withTerminationPolicyTypes ( java . util . Collection ) } if you want to override the existing values . * @ param terminationPolicyTypes * The termination policies supported by Amazon EC2 Auto Scaling : < code > OldestInstance < / code > , * < code > OldestLaunchConfiguration < / code > , < code > NewestInstance < / code > , * < code > ClosestToNextInstanceHour < / code > , < code > Default < / code > , < code > OldestLaunchTemplate < / code > , and * < code > AllocationStrategy < / code > . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeTerminationPolicyTypesResult withTerminationPolicyTypes ( String ... terminationPolicyTypes ) { } }
if ( this . terminationPolicyTypes == null ) { setTerminationPolicyTypes ( new com . amazonaws . internal . SdkInternalList < String > ( terminationPolicyTypes . length ) ) ; } for ( String ele : terminationPolicyTypes ) { this . terminationPolicyTypes . add ( ele ) ; } return this ;
public class ReferenceBasedOutlierDetection { /** * Computes for each object the distance to one reference point . ( one * dimensional representation of the data set ) * @ param refPoint Reference Point Feature Vector * @ param database database to work on * @ param distFunc Distance function to use * @ return array containing the distance to one reference point for each * database object and the object id */ protected DoubleDBIDList computeDistanceVector ( NumberVector refPoint , Relation < ? extends NumberVector > database , PrimitiveDistanceQuery < ? super NumberVector > distFunc ) { } }
ModifiableDoubleDBIDList referenceDists = DBIDUtil . newDistanceDBIDList ( database . size ( ) ) ; for ( DBIDIter iditer = database . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { referenceDists . add ( distFunc . distance ( iditer , refPoint ) , iditer ) ; } referenceDists . sort ( ) ; return referenceDists ;
public class BeanInfoUtil { /** * Builds a property descriptor with no explicit visibility . */ protected static TypedPropertyDescriptor _buildPropertyDescriptor ( String propertyName , Class beanClass , String getterName , String setterName ) { } }
try { return new TypedPropertyDescriptor ( propertyName , beanClass , getterName , setterName ) ; } catch ( IntrospectionException e ) { throw new RuntimeException ( "Failed to create property \"" + propertyName + "\" on class \"" + beanClass + "\": " + e . getClass ( ) + " - " + e . getMessage ( ) ) ; }
public class FindbugsPlugin { /** * Removes all consequent enumerated keys from given store staring with given prefix */ private static void resetStore ( IPreferenceStore store , String prefix ) { } }
int start = 0 ; // 99 is paranoia . while ( start < 99 ) { String name = prefix + start ; if ( store . contains ( name ) ) { store . setToDefault ( name ) ; } else { break ; } start ++ ; }
public class Constraints { /** * Apply a inclusive " range " constraint to a bean property . * @ param propertyName the property with the range constraint . * @ param min the low edge of the range * @ param max the high edge of the range * @ return The range constraint constraint */ public PropertyConstraint inRange ( String propertyName , Comparable min , Comparable max ) { } }
return value ( propertyName , range ( min , max ) ) ;
public class UpdateMatchmakingConfigurationRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateMatchmakingConfigurationRequest updateMatchmakingConfigurationRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateMatchmakingConfigurationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateMatchmakingConfigurationRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( updateMatchmakingConfigurationRequest . getDescription ( ) , DESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( updateMatchmakingConfigurationRequest . getGameSessionQueueArns ( ) , GAMESESSIONQUEUEARNS_BINDING ) ; protocolMarshaller . marshall ( updateMatchmakingConfigurationRequest . getRequestTimeoutSeconds ( ) , REQUESTTIMEOUTSECONDS_BINDING ) ; protocolMarshaller . marshall ( updateMatchmakingConfigurationRequest . getAcceptanceTimeoutSeconds ( ) , ACCEPTANCETIMEOUTSECONDS_BINDING ) ; protocolMarshaller . marshall ( updateMatchmakingConfigurationRequest . getAcceptanceRequired ( ) , ACCEPTANCEREQUIRED_BINDING ) ; protocolMarshaller . marshall ( updateMatchmakingConfigurationRequest . getRuleSetName ( ) , RULESETNAME_BINDING ) ; protocolMarshaller . marshall ( updateMatchmakingConfigurationRequest . getNotificationTarget ( ) , NOTIFICATIONTARGET_BINDING ) ; protocolMarshaller . marshall ( updateMatchmakingConfigurationRequest . getAdditionalPlayerCount ( ) , ADDITIONALPLAYERCOUNT_BINDING ) ; protocolMarshaller . marshall ( updateMatchmakingConfigurationRequest . getCustomEventData ( ) , CUSTOMEVENTDATA_BINDING ) ; protocolMarshaller . marshall ( updateMatchmakingConfigurationRequest . getGameProperties ( ) , GAMEPROPERTIES_BINDING ) ; protocolMarshaller . marshall ( updateMatchmakingConfigurationRequest . getGameSessionData ( ) , GAMESESSIONDATA_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PathBuilder { /** * Creates a path using the provided JsonPath structure . * @ param jsonPath JsonPath structure to be parsed * @ return String representing structure provided in the input */ public static String buildPath ( JsonPath jsonPath ) { } }
List < String > urlParts = new LinkedList < > ( ) ; JsonPath currentJsonPath = jsonPath ; String pathPart ; do { if ( currentJsonPath instanceof RelationshipsPath ) { pathPart = RELATIONSHIP_MARK + SEPARATOR + currentJsonPath . getElementName ( ) ; } else if ( currentJsonPath instanceof FieldPath ) { pathPart = currentJsonPath . getElementName ( ) ; } else { pathPart = currentJsonPath . getElementName ( ) ; if ( currentJsonPath . getIds ( ) != null ) { pathPart += SEPARATOR + mergeIds ( currentJsonPath . getIds ( ) ) ; } } urlParts . add ( pathPart ) ; currentJsonPath = currentJsonPath . getParentResource ( ) ; } while ( currentJsonPath != null ) ; Collections . reverse ( urlParts ) ; return SEPARATOR + StringUtils . join ( SEPARATOR , urlParts ) + SEPARATOR ;
public class ClassFileMetaData { /** * Returns the name of all implemented interfaces . */ public List < String > getInterfaces ( ) { } }
int size = readValue ( endOfPool + 6 ) ; if ( size == 0 ) return Collections . emptyList ( ) ; List < String > result = new ArrayList < String > ( ) ; for ( int i = 0 ; i < size ; i ++ ) { result . add ( getClassName ( readValue ( endOfPool + 8 + ( i * 2 ) ) ) ) ; } return result ;
public class UnescapeTransliterator { /** * Implements { @ link Transliterator # handleTransliterate } . */ @ Override protected void handleTransliterate ( Replaceable text , Position pos , boolean isIncremental ) { } }
int start = pos . start ; int limit = pos . limit ; int i , ipat ; loop : while ( start < limit ) { // Loop over the forms in spec [ ] . Exit this loop when we // match one of the specs . Exit the outer loop if a // partial match is detected and isIncremental is true . for ( ipat = 0 ; spec [ ipat ] != END ; ) { // Read the header int prefixLen = spec [ ipat ++ ] ; int suffixLen = spec [ ipat ++ ] ; int radix = spec [ ipat ++ ] ; int minDigits = spec [ ipat ++ ] ; int maxDigits = spec [ ipat ++ ] ; // s is a copy of start that is advanced over the // characters as we parse them . int s = start ; boolean match = true ; for ( i = 0 ; i < prefixLen ; ++ i ) { if ( s >= limit ) { if ( i > 0 ) { // We ' ve already matched a character . This is // a partial match , so we return if in // incremental mode . In non - incremental mode , // go to the next spec . if ( isIncremental ) { break loop ; } match = false ; break ; } } char c = text . charAt ( s ++ ) ; if ( c != spec [ ipat + i ] ) { match = false ; break ; } } if ( match ) { int u = 0 ; int digitCount = 0 ; for ( ; ; ) { if ( s >= limit ) { // Check for partial match in incremental mode . if ( s > start && isIncremental ) { break loop ; } break ; } int ch = text . char32At ( s ) ; int digit = UCharacter . digit ( ch , radix ) ; if ( digit < 0 ) { break ; } s += UTF16 . getCharCount ( ch ) ; u = ( u * radix ) + digit ; if ( ++ digitCount == maxDigits ) { break ; } } match = ( digitCount >= minDigits ) ; if ( match ) { for ( i = 0 ; i < suffixLen ; ++ i ) { if ( s >= limit ) { // Check for partial match in incremental mode . if ( s > start && isIncremental ) { break loop ; } match = false ; break ; } char c = text . charAt ( s ++ ) ; if ( c != spec [ ipat + prefixLen + i ] ) { match = false ; break ; } } if ( match ) { // At this point , we have a match String str = UTF16 . valueOf ( u ) ; text . replace ( start , s , str ) ; limit -= s - start - str . length ( ) ; // The following break statement leaves the // loop that is traversing the forms in // spec [ ] . We then parse the next input // character . break ; } } } ipat += prefixLen + suffixLen ; } if ( start < limit ) { start += UTF16 . getCharCount ( text . char32At ( start ) ) ; } } pos . contextLimit += limit - pos . limit ; pos . limit = limit ; pos . start = start ;
public class SQLiteUpdateTaskHelper { /** * Execute SQL . * @ param database * the database * @ param context * the context * @ param rawResourceId * the raw resource id */ public static void executeSQL ( final SQLiteDatabase database , Context context , int rawResourceId ) { } }
String [ ] c = IOUtils . readTextFile ( context , rawResourceId ) . split ( ";" ) ; List < String > commands = Arrays . asList ( c ) ; executeSQL ( database , commands ) ;
public class ParameterImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case BpsimPackage . PARAMETER__RESULT_REQUEST : getResultRequest ( ) . clear ( ) ; getResultRequest ( ) . addAll ( ( Collection < ? extends ResultType > ) newValue ) ; return ; case BpsimPackage . PARAMETER__PARAMETER_VALUE_GROUP : ( ( FeatureMap . Internal ) getParameterValueGroup ( ) ) . set ( newValue ) ; return ; case BpsimPackage . PARAMETER__PARAMETER_VALUE : getParameterValue ( ) . clear ( ) ; getParameterValue ( ) . addAll ( ( Collection < ? extends ParameterValue > ) newValue ) ; return ; case BpsimPackage . PARAMETER__KPI : setKpi ( ( Boolean ) newValue ) ; return ; case BpsimPackage . PARAMETER__SLA : setSla ( ( Boolean ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class Table { /** * Untick a checkbox in a cell of a table indicated by the input row and column indices . * @ param row * int number of row for cell * @ param column * int number of column for cell */ public void uncheckCheckboxInCell ( int row , int column ) { } }
String checkboxLocator = getXPathBase ( ) + "tr[" + row + "]/td[" + column + "]/input" ; CheckBox cb = new CheckBox ( checkboxLocator ) ; cb . uncheck ( ) ;
public class PrepareRoutingSubnetworks { /** * This method checks if the node is removed or inaccessible for ALL encoders . * @ return true if no edges are reachable from the specified nodeIndex for any flag encoder . */ boolean detectNodeRemovedForAllEncoders ( EdgeExplorer edgeExplorerAllEdges , int nodeIndex ) { } }
// we could implement a ' fast check ' for several previously marked removed nodes via GHBitSet // removedNodesPerVehicle . The problem is that we would need long - indices but BitSet only supports int ( due to nodeIndex * numberOfEncoders ) // if no edges are reachable return true EdgeIterator iter = edgeExplorerAllEdges . setBaseNode ( nodeIndex ) ; while ( iter . next ( ) ) { // if at least on encoder allows one direction return false for ( BooleanEncodedValue accessEnc : accessEncList ) { if ( iter . get ( accessEnc ) || iter . getReverse ( accessEnc ) ) return false ; } } return true ;
public class WalkModFacade { /** * Downloads the list of declared plugins in the configuration file using Ivy . Ignores the * ConfigurationProvided if passed in the constructor . * @ throws InvalidConfigurationException * if the walkmod configuration is invalid and it is working in no verbose mode . */ public void install ( ) throws InvalidConfigurationException { } }
if ( cfg . exists ( ) ) { if ( options . isVerbose ( ) ) { log . info ( cfg . getAbsoluteFile ( ) + " [ok]" ) ; } // Uses Ivy always ConfigurationProvider cp = new IvyConfigurationProvider ( options . isOffline ( ) , options . isVerbose ( ) ) ; if ( options . isVerbose ( ) ) { log . info ( "** THE PLUGIN INSTALLATION STARTS **" ) ; System . out . print ( "----------------------------------------" ) ; System . out . println ( "----------------------------------------" ) ; } long startTime = System . currentTimeMillis ( ) ; long endTime = startTime ; DecimalFormat myFormatter = new DecimalFormat ( "###.###" ) ; DateFormat df = new SimpleDateFormat ( "EEE, d MMM yyyy HH:mm:ss" , Locale . US ) ; boolean error = false ; try { userDir = new File ( System . getProperty ( "user.dir" ) ) . getAbsolutePath ( ) ; System . setProperty ( "user.dir" , options . getExecutionDirectory ( ) . getCanonicalPath ( ) ) ; ConfigurationManager cfgManager = new ConfigurationManager ( cfg , cp ) ; Configuration cf = cfgManager . getConfiguration ( ) ; List < String > modules = cf . getModules ( ) ; if ( modules != null && ! modules . isEmpty ( ) ) { for ( String module : modules ) { File aux = new File ( module ) . getAbsoluteFile ( ) ; if ( aux . isDirectory ( ) ) { if ( options . isVerbose ( ) ) { log . info ( "** MODULE " + aux . getAbsoluteFile ( ) + " [ok] **" ) ; } WalkModFacade facade = new WalkModFacade ( OptionsBuilder . options ( options ) . executionDirectory ( aux ) . build ( ) ) ; facade . install ( ) ; } else { log . error ( "The module " + aux . getAbsolutePath ( ) + " is not an existing directory" ) ; } } } } catch ( Exception e ) { System . setProperty ( "user.dir" , userDir ) ; if ( options . isVerbose ( ) ) { error = true ; endTime = System . currentTimeMillis ( ) ; double time = 0 ; if ( endTime > startTime ) { time = ( double ) ( endTime - startTime ) / ( double ) 1000 ; } String timeMsg = myFormatter . format ( time ) ; System . out . print ( "----------------------------------------" ) ; System . out . println ( "----------------------------------------" ) ; log . info ( "PLUGIN INSTALLATION FAILS" ) ; System . out . println ( ) ; System . out . print ( "----------------------------------------" ) ; System . out . println ( "----------------------------------------" ) ; log . info ( "Total time: " + timeMsg + " seconds" ) ; log . info ( "Finished at: " + df . format ( new Date ( ) ) ) ; log . info ( "Final memory: " + ( Runtime . getRuntime ( ) . freeMemory ( ) ) / 1048576 + " M/ " + ( Runtime . getRuntime ( ) . totalMemory ( ) / 1048576 ) + " M" ) ; System . out . print ( "----------------------------------------" ) ; System . out . println ( "----------------------------------------" ) ; if ( options . isPrintErrors ( ) ) { log . error ( "Plugin installations fails" , e ) ; } else { log . info ( "Plugin installations fails. Please, execute walkmod with -e to see the details" ) ; } if ( options . isThrowException ( ) ) { RuntimeException re = new RuntimeException ( ) ; re . setStackTrace ( e . getStackTrace ( ) ) ; throw re ; } } else { throw new InvalidConfigurationException ( e ) ; } } if ( ! error ) { System . setProperty ( "user.dir" , userDir ) ; if ( options . isVerbose ( ) ) { endTime = System . currentTimeMillis ( ) ; double time = 0 ; if ( endTime > startTime ) { time = ( double ) ( endTime - startTime ) / ( double ) 1000 ; } String timeMsg = myFormatter . format ( time ) ; System . out . print ( "----------------------------------------" ) ; System . out . println ( "----------------------------------------" ) ; System . out . println ( ) ; log . info ( "PLUGIN INSTALLATION COMPLETE" ) ; System . out . print ( "----------------------------------------" ) ; System . out . println ( "----------------------------------------" ) ; log . info ( "Total time: " + timeMsg + " seconds" ) ; log . info ( "Finished at: " + df . format ( new Date ( ) ) ) ; log . info ( "Final memory: " + ( Runtime . getRuntime ( ) . freeMemory ( ) ) / 1048576 + " M/ " + ( Runtime . getRuntime ( ) . totalMemory ( ) / 1048576 ) + " M" ) ; System . out . print ( "----------------------------------------" ) ; System . out . println ( "----------------------------------------" ) ; } } } else { if ( options . isVerbose ( ) ) { log . error ( cfg . getAbsolutePath ( ) + " does not exist. The root directory of your project must contain a walkmod.xml" ) ; } else { throw new WalkModException ( cfg . getAbsolutePath ( ) + " does not exist. The root directory of your project must contain a walkmod.xml" ) ; } }
public class JTune { /** * Returns the 3 points of a slur * < br > Method used by { @ link # drawLink ( Graphics2D , TwoNotesLink ) } * @ return [ 0 ] = > start , [ 1 ] = control , [ 2 ] = end */ private Point2D [ ] getLinkPoints ( TwoNotesLink slurDef ) { } }
if ( slurDef . getEnd ( ) == null ) { return new Point2D [ ] { } ; } JNoteElementAbstract elmtStart , elmtEnd ; elmtStart = ( JNoteElementAbstract ) getRenditionObjectFor ( slurDef . getStart ( ) ) ; elmtEnd = ( JNoteElementAbstract ) getRenditionObjectFor ( slurDef . getEnd ( ) ) ; if ( elmtStart == null || elmtStart == null ) { System . err . println ( "getLinkPoints error: elmtStart or elmtEnd are null" ) ; return new Point2D [ ] { } ; } if ( ( elmtStart . getStaffLine ( ) != null ) && ( elmtEnd . getStaffLine ( ) != null ) ) { if ( ! elmtStart . getStaffLine ( ) . equals ( elmtEnd . getStaffLine ( ) ) ) { System . err . println ( "Warning - abc4j limitation : Slurs / ties cannot be drawn accross several lines for now." ) ; return new Point2D [ ] { } ; } } final short UNDER_IN = 0 ; // the only existing in old abc4j final short UNDER_OUT = 1 ; // down and out of stems final short ABOVE_IN = 2 ; // up and can go across the stems final short ABOVE_OUT = 3 ; // up and out of stems ( e . g . for tuplet ) Point2D [ ] [ ] p = new Point2D [ 4 ] [ 3 ] ; // it ' s a tie , anchors are different and curve is flater if ( slurDef instanceof TieDefinition ) { Point2D [ ] above = new Point2D [ 3 ] ; Point2D [ ] under = new Point2D [ 3 ] ; Point2D [ ] ret ; above [ START ] = elmtStart . getTieStartAboveAnchor ( ) ; above [ END ] = elmtEnd . getTieEndAboveAnchor ( ) ; under [ START ] = elmtStart . getTieStartUnderAnchor ( ) ; under [ END ] = elmtEnd . getTieEndUnderAnchor ( ) ; short position = elmtStart . getJSlurDefinition ( ) . getPosition ( ) ; if ( position == JSlurOrTie . POSITION_AUTO && ( elmtStart instanceof JChordNote ) ) { if ( elmtStart . isStemUp ( ) ) { // in a chord stem up , all ties are under // the highest note tie is above position = ( ( JChordNote ) elmtStart ) . isHighest ( ) ? JSlurOrTie . POSITION_ABOVE : JSlurOrTie . POSITION_UNDER ; } else { // in a chord stem down , all ties are above // the lowest note tie is under position = ( ( JChordNote ) elmtStart ) . isLowest ( ) ? JSlurOrTie . POSITION_UNDER : JSlurOrTie . POSITION_ABOVE ; } } int factor ; if ( ( position == JSlurOrTie . POSITION_ABOVE ) || ( position == JSlurOrTie . POSITION_AUTO && ( ! elmtStart . isStemUp ( ) || ! elmtEnd . isStemUp ( ) ) ) ) { // tie above ret = above ; factor = - 1 ; } else { // if ( ( position = = JSlurOrTie . POSITION _ UNDER ) // | | ( position = = JSlurOrTie . POSITION _ AUTO // & & elmtStart . isStemUp ( ) & & elmtEnd . isStemUp ( ) ) ) { // tie under ret = under ; factor = 1 ; } ret [ CONTROL ] = new Point2D . Double ( ret [ START ] . getX ( ) + ( ret [ END ] . getX ( ) - ret [ START ] . getX ( ) ) / 2 , ret [ START ] . getY ( ) + factor * getMetrics ( ) . getSlurAnchorYOffset ( ) ) ; return ret ; } p [ UNDER_IN ] [ START ] = elmtStart . getSlurUnderAnchor ( ) ; p [ UNDER_OUT ] [ START ] = elmtStart . getSlurUnderAnchorOutOfStem ( ) ; p [ ABOVE_IN ] [ START ] = elmtStart . getSlurAboveAnchor ( ) ; p [ ABOVE_OUT ] [ START ] = elmtStart . getSlurAboveAnchorOutOfStem ( ) ; p [ UNDER_IN ] [ END ] = elmtEnd . getSlurUnderAnchor ( ) ; p [ UNDER_OUT ] [ END ] = elmtEnd . getSlurUnderAnchorOutOfStem ( ) ; p [ ABOVE_IN ] [ END ] = elmtEnd . getSlurAboveAnchor ( ) ; p [ ABOVE_OUT ] [ END ] = elmtEnd . getSlurAboveAnchorOutOfStem ( ) ; // determinate peaks ( lowest / highest ) note and note glyph // FIXME : ties should enclose note decorations NoteAbstract [ ] peakNote = new NoteAbstract [ 4 ] ; JNoteElementAbstract [ ] peakNoteGlyph = new JNoteElementAbstract [ 4 ] ; NoteAbstract startNote = // ( NoteAbstract ) m _ music . getElementByReference ( slurDef . getStart ( ) ) ; ( NoteAbstract ) elmtStart . getMusicElement ( ) ; NoteAbstract endNote = // ( NoteAbstract ) m _ music . getElementByReference ( slurDef . getEnd ( ) ) ; ( NoteAbstract ) elmtEnd . getMusicElement ( ) ; String voiceNo = startNote . getReference ( ) . getVoice ( ) ; // under in peakNote [ UNDER_IN ] = m_music . getVoice ( voiceNo ) . getLowestNoteBewteen ( startNote , endNote ) ; peakNoteGlyph [ UNDER_IN ] = ( JNoteElementAbstract ) getRenditionObjectFor ( peakNote [ UNDER_IN ] ) ; // under out of stems peakNoteGlyph [ UNDER_OUT ] = getLowestNoteGlyphBetween ( startNote , endNote , true ) ; // enhance : if lowest glyph strictly between start / end is at same Y than start or end if ( peakNoteGlyph [ UNDER_OUT ] == null ) { // no notes between start and end peakNoteGlyph [ UNDER_OUT ] = p [ UNDER_OUT ] [ START ] . getY ( ) > p [ UNDER_OUT ] [ END ] . getY ( ) ? elmtStart : elmtEnd ; // Low } else { Point2D lowAnchor = peakNoteGlyph [ UNDER_OUT ] . getSlurUnderAnchorOutOfStem ( ) ; if ( lowAnchor . getY ( ) < p [ UNDER_OUT ] [ START ] . getY ( ) ) peakNoteGlyph [ UNDER_OUT ] = elmtStart ; // Low ; if ( lowAnchor . getY ( ) < p [ UNDER_OUT ] [ END ] . getY ( ) ) peakNoteGlyph [ UNDER_OUT ] = elmtEnd ; // Low ; } peakNote [ UNDER_OUT ] = ( NoteAbstract ) peakNoteGlyph [ UNDER_OUT ] . getMusicElement ( ) ; // above in peakNote [ ABOVE_IN ] = m_music . getVoice ( voiceNo ) . getHighestNoteBewteen ( startNote , endNote ) ; peakNoteGlyph [ ABOVE_IN ] = ( JNoteElementAbstract ) getRenditionObjectFor ( peakNote [ ABOVE_IN ] ) ; // above out of stems ( e . g . tuplet ) peakNoteGlyph [ ABOVE_OUT ] = getHighestNoteGlyphBetween ( startNote , endNote , true ) ; // enhance : if lowest glyph strictly between start / end is at same Y than start or end if ( peakNoteGlyph [ ABOVE_OUT ] == null ) { // no notes between start and end peakNoteGlyph [ ABOVE_OUT ] = p [ ABOVE_OUT ] [ START ] . getY ( ) < p [ ABOVE_OUT ] [ END ] . getY ( ) ? elmtStart : elmtEnd ; // High } else { Point2D highAnchor = peakNoteGlyph [ ABOVE_OUT ] . getSlurAboveAnchorOutOfStem ( ) ; if ( highAnchor . getY ( ) > p [ ABOVE_OUT ] [ START ] . getY ( ) ) peakNoteGlyph [ ABOVE_OUT ] = elmtStart ; // High if ( highAnchor . getY ( ) > p [ ABOVE_OUT ] [ END ] . getY ( ) ) peakNoteGlyph [ ABOVE_OUT ] = elmtEnd ; // High } peakNote [ ABOVE_OUT ] = ( NoteAbstract ) peakNoteGlyph [ ABOVE_OUT ] . getMusicElement ( ) ; // if peak = start then control = start // if peak = end then control = end // else control = peak for ( short i = 0 ; i < 4 ; i ++ ) { int factor = ( i <= UNDER_OUT ) ? ( 1 ) : ( - 1 ) ; if ( peakNote [ i ] . getReference ( ) . equals ( slurDef . getStart ( ) ) ) { p [ i ] [ CONTROL ] = new Point2D . Double ( p [ i ] [ START ] . getX ( ) + ( p [ i ] [ END ] . getX ( ) - p [ i ] [ START ] . getX ( ) ) / 2 , p [ i ] [ START ] . getY ( ) + factor * getMetrics ( ) . getSlurAnchorYOffset ( ) ) ; } else if ( peakNote [ i ] . getReference ( ) . equals ( slurDef . getEnd ( ) ) ) { p [ i ] [ CONTROL ] = new Point2D . Double ( p [ i ] [ START ] . getX ( ) + ( p [ i ] [ END ] . getX ( ) - p [ i ] [ START ] . getX ( ) ) / 2 , p [ i ] [ END ] . getY ( ) + factor * getMetrics ( ) . getSlurAnchorYOffset ( ) ) ; } else { // control = peak switch ( i ) { case UNDER_IN : p [ i ] [ CONTROL ] = peakNoteGlyph [ UNDER_IN ] . getSlurUnderAnchor ( ) ; break ; case UNDER_OUT : p [ i ] [ CONTROL ] = peakNoteGlyph [ UNDER_OUT ] . getSlurUnderAnchorOutOfStem ( ) ; break ; case ABOVE_IN : p [ i ] [ CONTROL ] = peakNoteGlyph [ ABOVE_IN ] . getSlurAboveAnchor ( ) ; break ; case ABOVE_OUT : p [ i ] [ CONTROL ] = peakNoteGlyph [ ABOVE_OUT ] . getSlurAboveAnchorOutOfStem ( ) ; break ; } } // we don ' t like straight slurs , if Ystart = Ycontrol = Yend , Ycontrol + = slurOffset double Ycontrol = p [ i ] [ CONTROL ] . getY ( ) , Ystart = p [ i ] [ START ] . getY ( ) , Yend = p [ i ] [ END ] . getY ( ) ; if ( ( Ycontrol == Ystart ) || ( Ycontrol == Yend ) ) { double Xoffset = 0 ; if ( Ycontrol != Ystart ) Xoffset = - getMetrics ( ) . getNoteWidth ( ) / 2 ; else if ( Ycontrol != Yend ) Xoffset = getMetrics ( ) . getNoteWidth ( ) / 2 ; p [ i ] [ CONTROL ] = new Point2D . Double ( p [ i ] [ CONTROL ] . getX ( ) + Xoffset , p [ i ] [ CONTROL ] . getY ( ) + factor * getMetrics ( ) . getSlurAnchorYOffset ( ) * 2 ) ; } } // Now the funny part , determinate which curve is the best // Point2D [ ] ret = null ; Vector curves = new Vector ( ) ; // Combinaison to compare JSlurOrTie jSlurDef = getJSlurOrTie ( slurDef ) ; if ( ! jSlurDef . isOutOfStems ( ) ) { // we can add all curves Vector underCurves = new Vector ( ) , aboveCurves = new Vector ( ) ; if ( ! jSlurDef . isAbove ( ) ) { // down or auto curves . add ( p [ UNDER_IN ] ) ; underCurves . add ( new Point2D [ ] { p [ UNDER_IN ] [ START ] , p [ UNDER_IN ] [ CONTROL ] , p [ UNDER_OUT ] [ END ] } ) ; underCurves . add ( new Point2D [ ] { p [ UNDER_IN ] [ START ] , p [ UNDER_OUT ] [ CONTROL ] , p [ UNDER_IN ] [ END ] } ) ; underCurves . add ( new Point2D [ ] { p [ UNDER_IN ] [ START ] , p [ UNDER_OUT ] [ CONTROL ] , p [ UNDER_OUT ] [ END ] } ) ; underCurves . add ( new Point2D [ ] { p [ UNDER_OUT ] [ START ] , p [ UNDER_IN ] [ CONTROL ] , p [ UNDER_IN ] [ END ] } ) ; underCurves . add ( new Point2D [ ] { p [ UNDER_OUT ] [ START ] , p [ UNDER_IN ] [ CONTROL ] , p [ UNDER_OUT ] [ END ] } ) ; underCurves . add ( new Point2D [ ] { p [ UNDER_OUT ] [ START ] , p [ UNDER_OUT ] [ CONTROL ] , p [ UNDER_IN ] [ END ] } ) ; } if ( ! jSlurDef . isUnder ( ) ) { // up or auto curves . add ( p [ ABOVE_IN ] ) ; aboveCurves . add ( new Point2D [ ] { p [ ABOVE_IN ] [ START ] , p [ ABOVE_IN ] [ CONTROL ] , p [ ABOVE_OUT ] [ END ] } ) ; aboveCurves . add ( new Point2D [ ] { p [ ABOVE_IN ] [ START ] , p [ ABOVE_OUT ] [ CONTROL ] , p [ ABOVE_IN ] [ END ] } ) ; aboveCurves . add ( new Point2D [ ] { p [ ABOVE_IN ] [ START ] , p [ ABOVE_OUT ] [ CONTROL ] , p [ ABOVE_OUT ] [ END ] } ) ; aboveCurves . add ( new Point2D [ ] { p [ ABOVE_OUT ] [ START ] , p [ ABOVE_IN ] [ CONTROL ] , p [ ABOVE_IN ] [ END ] } ) ; aboveCurves . add ( new Point2D [ ] { p [ ABOVE_OUT ] [ START ] , p [ ABOVE_IN ] [ CONTROL ] , p [ ABOVE_OUT ] [ END ] } ) ; aboveCurves . add ( new Point2D [ ] { p [ ABOVE_OUT ] [ START ] , p [ ABOVE_OUT ] [ CONTROL ] , p [ ABOVE_IN ] [ END ] } ) ; } // verify above and under curves must be above / under for ( Object aboveCurve : aboveCurves ) { Point2D [ ] p2d = ( Point2D [ ] ) aboveCurve ; if ( ( p2d [ CONTROL ] . getY ( ) < p2d [ START ] . getY ( ) ) && ( p2d [ CONTROL ] . getY ( ) < p2d [ END ] . getY ( ) ) ) curves . add ( p2d ) ; } for ( Object underCurve : underCurves ) { Point2D [ ] p2d = ( Point2D [ ] ) underCurve ; if ( ( p2d [ CONTROL ] . getY ( ) > p2d [ START ] . getY ( ) ) && ( p2d [ CONTROL ] . getY ( ) > p2d [ END ] . getY ( ) ) ) curves . add ( p2d ) ; } } // Out of stems if ( ! jSlurDef . isAbove ( ) ) // under or auto curves . add ( p [ UNDER_OUT ] ) ; if ( ! jSlurDef . isUnder ( ) ) // above or auto curves . add ( p [ ABOVE_OUT ] ) ; // Correct controlPoint ( center it ) if the curve gets ugly // ( something like a little nose lefter / righter than min / maxX // and compute a mark to determinate best curve // vector for newly generated curves Vector additionnalCurves = new Vector ( ) ; Point2D [ ] bestCurve = null ; float bestMark = - 99 ; // int cpt = 0 ; int bestCurveIdx = - 1; for ( Iterator itCurves = curves . iterator ( ) ; itCurves . hasNext ( ) ; ) { Point2D [ ] p2d = ( Point2D [ ] ) itCurves . next ( ) ; // System . out . print ( ( cpt + + ) + " : " ) ; try { SlurInfos slurInfos = new SlurInfos ( p2d , getNoteGlyphesBetween ( startNote , endNote ) , getMetrics ( ) ) ; // Check results // If some intersections , try to draw a new curve // by moving the control point if ( slurInfos . intersect > 0 ) { Point2D newPtControl ; double yOffset = getMetrics ( ) . getNoteHeight ( ) ; double xOffset = getMetrics ( ) . getNoteWidth ( ) ; int yFactor = slurInfos . isAbove ? - 1 : 1 ; double distanceControlEnd = p2d [ END ] . getX ( ) - p2d [ CONTROL ] . getX ( ) ; double distanceControlStart = p2d [ CONTROL ] . getX ( ) - p2d [ START ] . getX ( ) ; int xFactor = 0 ; if ( distanceControlEnd < distanceControlStart ) xFactor = - 1 ; else if ( distanceControlEnd > distanceControlStart ) xFactor = 1 ; for ( float xO = 0f ; xO <= 1 ; xO += .25f ) { for ( float yO = .5f ; yO <= 2f ; yO += .25f ) { // move X note width , Y * factor note height newPtControl = new Point2D . Double ( p2d [ CONTROL ] . getX ( ) + xO * xFactor * xOffset , p2d [ CONTROL ] . getY ( ) + yO * yFactor * yOffset ) ; additionnalCurves . add ( new Point2D [ ] { p2d [ START ] , newPtControl , p2d [ END ] } ) ; } } // / / move Y = 1 note height // newPtControl = new Point2D . Double ( p2d [ CONTROL ] . getX ( ) , // p2d [ CONTROL ] . getY ( ) + factor * yOffset ) ; // additionnalCurves . add ( new Point2D [ ] { p2d [ START ] , newPtControl , p2d [ END ] } ) ; // / / move Y = 1.5 note height // newPtControl = new Point2D . Double ( p2d [ CONTROL ] . getX ( ) , // p2d [ CONTROL ] . getY ( ) + 1.5 * factor * yOffset ) ; // additionnalCurves . add ( new Point2D [ ] { p2d [ START ] , newPtControl , p2d [ END ] } ) ; // / / move Y = 2 note height // newPtControl = new Point2D . Double ( p2d [ CONTROL ] . getX ( ) , // p2d [ CONTROL ] . getY ( ) + 2 * factor * yOffset ) ; // additionnalCurves . add ( new Point2D [ ] { p2d [ START ] , newPtControl , p2d [ END ] } ) ; } // TODO if flatness is high , away start / end position from original // ( distance allowed = nb notes * m _ metrics . getNoteHeight ) if ( slurInfos . mark > bestMark || ( bestCurve == null ) ) { bestMark = slurInfos . mark ; bestCurve = p2d ; // bestCurveIdx = cpt - 1; } } catch ( MalFormedCurveException mfce ) { // not good curve , remove it if ( curves . size ( ) > 1 ) { itCurves . remove ( ) ; } else { break ; } } } // Checks additional curves for ( Object additionnalCurve : additionnalCurves ) { Point2D [ ] p2d = ( Point2D [ ] ) additionnalCurve ; // System . out . print ( ( cpt + + ) + " : " ) ; try { SlurInfos slurInfos = new SlurInfos ( p2d , getNoteGlyphesBetween ( startNote , endNote ) , getMetrics ( ) ) ; if ( slurInfos . mark > bestMark || ( bestCurve == null ) ) { bestMark = slurInfos . mark ; bestCurve = p2d ; // bestComboIdx = cpt - 1; } curves . add ( p2d ) ; } catch ( MalFormedCurveException mfce ) { // nothing to do } } // System . out . println ( " curves . size ( ) = " + curves . size ( ) ) ; // System . out . println ( " best curve n � " + bestCurveIdx + " , mark = " + bestMark ) ; return bestCurve != null ? bestCurve : ( curves . size ( ) == 0 ? new Point2D [ ] { } : ( Point2D [ ] ) curves . get ( 0 ) ) ;
public class AgentProperties { /** * Creates a new bean from properties . * @ param props non - null properties * @ return a non - null bean * @ throws IOException if there were files that failed to be written */ public static AgentProperties readIaasProperties ( Properties props ) throws IOException { } }
// Deal with files transferred through user data . // Store files in the system ' s temporary directory . // In Karaf , this will point to the " data / tmp " directory . File msgResourcesDirectory = new File ( System . getProperty ( "java.io.tmpdir" ) , "roboconf-messaging" ) ; props = UserDataHelpers . processUserData ( props , msgResourcesDirectory ) ; // Given # 213 , we have to replace some characters escaped by AWS ( and probably Openstack too ) . AgentProperties result = new AgentProperties ( ) ; result . setApplicationName ( updatedField ( props , UserDataHelpers . APPLICATION_NAME ) ) ; result . setScopedInstancePath ( updatedField ( props , UserDataHelpers . SCOPED_INSTANCE_PATH ) ) ; result . setDomain ( updatedField ( props , UserDataHelpers . DOMAIN ) ) ; final Map < String , String > messagingConfiguration = new LinkedHashMap < > ( ) ; List < String > toSkip = Arrays . asList ( UserDataHelpers . APPLICATION_NAME , UserDataHelpers . DOMAIN , UserDataHelpers . SCOPED_INSTANCE_PATH ) ; for ( String k : props . stringPropertyNames ( ) ) { if ( ! toSkip . contains ( k ) ) { // All other properties are considered messaging - specific . messagingConfiguration . put ( k , updatedField ( props , k ) ) ; } } result . setMessagingConfiguration ( Collections . unmodifiableMap ( messagingConfiguration ) ) ; return result ;
public class TouchState { /** * Gets the Point matching the given ID . if available * @ param id The Point ID to match . A value of - 1 matches any Point . * @ return a matching Point , or null if there is no point with that ID . */ Point getPointForID ( int id ) { } }
for ( int i = 0 ; i < pointCount ; i ++ ) { if ( id == - 1 || points [ i ] . id == id ) { return points [ i ] ; } } return null ;
public class CacheInstance { /** * Destroy the caching service when no longer needed . */ public void destroy ( ) { } }
mom . cleanUp ( ) ; mThread . stop ( ) ; si = null ; mom = null ; cache = null ; mThread = null ;
public class JLanguageTool { /** * The main check method . Tokenizes the text into sentences and matches these * sentences against all currently active rules depending on { @ code mode } . * @ since 4.3 */ public List < RuleMatch > check ( AnnotatedText annotatedText , boolean tokenizeText , ParagraphHandling paraMode , RuleMatchListener listener , Mode mode ) throws IOException { } }
List < String > sentences ; if ( tokenizeText ) { sentences = sentenceTokenize ( annotatedText . getPlainText ( ) ) ; } else { sentences = new ArrayList < > ( ) ; sentences . add ( annotatedText . getPlainText ( ) ) ; } List < Rule > allRules = getAllRules ( ) ; if ( printStream != null ) { printIfVerbose ( allRules . size ( ) + " rules activated for language " + language ) ; } unknownWords = new HashSet < > ( ) ; List < AnalyzedSentence > analyzedSentences = analyzeSentences ( sentences ) ; List < RuleMatch > ruleMatches = performCheck ( analyzedSentences , sentences , allRules , paraMode , annotatedText , listener , mode ) ; ruleMatches = new SameRuleGroupFilter ( ) . filter ( ruleMatches ) ; // no sorting : SameRuleGroupFilter sorts rule matches already if ( cleanOverlappingMatches ) { ruleMatches = new CleanOverlappingFilter ( language ) . filter ( ruleMatches ) ; } return ruleMatches ;
public class Jdt2Ecore { /** * Add the given constructors to the Ecore container . * @ param codeBuilder the code builder to use . * @ param superClassConstructors the constructors defined in the super class . * @ param context the context of the constructors . * @ throws JavaModelException if the Java model is invalid . */ public void createStandardConstructorsWith ( ConstructorBuilder codeBuilder , Collection < IMethod > superClassConstructors , XtendTypeDeclaration context ) throws JavaModelException { } }
if ( superClassConstructors != null ) { for ( final IMethod constructor : superClassConstructors ) { if ( ! isGeneratedOperation ( constructor ) ) { final ISarlConstructorBuilder cons = codeBuilder . addConstructor ( ) ; // Create parameters final IFormalParameterBuilder [ ] sarlParams = createFormalParametersWith ( name -> cons . addParameter ( name ) , constructor ) ; // Create the block final IBlockExpressionBuilder block = cons . getExpression ( ) ; // Create thre super - call expression final IExpressionBuilder superCall = block . addExpression ( ) ; final XFeatureCall call = XbaseFactory . eINSTANCE . createXFeatureCall ( ) ; superCall . setXExpression ( call ) ; // Set the todo comment . superCall . setDocumentation ( block . getAutoGeneratedActionString ( ) ) ; // Create the super - call XExpression call . setFeature ( getJvmConstructor ( constructor , context ) ) ; call . setExplicitOperationCall ( true ) ; final List < XExpression > arguments = call . getFeatureCallArguments ( ) ; for ( final IFormalParameterBuilder currentParam : sarlParams ) { final XFeatureCall argumentSource = XbaseFactory . eINSTANCE . createXFeatureCall ( ) ; arguments . add ( argumentSource ) ; currentParam . setReferenceInto ( argumentSource ) ; } } } }
public class Uploader { /** * Send the data from the stream to the server . This is less efficient than * < i > upload ( File ) < / i > , but if you already have a stream , it ' s convenient . * This method takes care of temporarily making a File out of the stream , * making the request , and removing the temporary file . Having a File source * for the upload is necessary because the content - length must be sent along * with the request as per the HTTP Multipart POST protocol spec . */ public String upload ( InputStream in ) throws IOException { } }
File tempFile = File . createTempFile ( "fedora-upload-" , null ) ; FileOutputStream out = new FileOutputStream ( tempFile ) ; try { StreamUtility . pipeStream ( in , out , 8192 ) ; return upload ( tempFile ) ; } finally { in . close ( ) ; out . close ( ) ; if ( ! tempFile . delete ( ) ) { System . err . println ( "WARNING: Could not remove temporary file: " + tempFile . getName ( ) ) ; tempFile . deleteOnExit ( ) ; } }
public class GroupApi { /** * Creates a new project group . Available only for users who can create groups . * < pre > < code > GitLab Endpoint : POST / groups < / code > < / pre > * @ param name the name of the group to add * @ param path the path for the group * @ return the created Group instance * @ throws GitLabApiException if any exception occurs */ public Group addGroup ( String name , String path ) throws GitLabApiException { } }
Form formData = new Form ( ) ; formData . param ( "name" , name ) ; formData . param ( "path" , path ) ; Response response = post ( Response . Status . CREATED , formData , "groups" ) ; return ( response . readEntity ( Group . class ) ) ;
public class Calendar { /** * Creates a < code > DateFormat < / code > appropriate to this calendar . * This is a framework method for subclasses to override . This method * is responsible for creating the calendar - specific DateFormat and * DateFormatSymbols objects as needed . * @ param pattern the pattern , specific to the < code > DateFormat < / code > * subclass * @ param locale the locale for which the symbols should be drawn * @ return a < code > DateFormat < / code > appropriate to this calendar */ protected DateFormat handleGetDateFormat ( String pattern , Locale locale ) { } }
return handleGetDateFormat ( pattern , null , ULocale . forLocale ( locale ) ) ;
public class CssHelper { /** * Creates a List of CssMetaData instances that is merged by the given parameters * @ param metaData An array of CssMetaData instances . All instances will be in the returned list * @ return A list with all given CssMetaData instances */ public static List < CssMetaData < ? extends Styleable , ? > > createCssMetaDataList ( CssMetaData < ? extends Styleable , ? > ... metaData ) { } }
return createCssMetaDataList ( new ArrayList < > ( ) , metaData ) ;
public class HSBColor { /** * { @ inheritDoc } */ @ Override protected List < ? extends Object > getFieldValues ( ) { } }
return Arrays . asList ( hue ( ) , saturation ( ) , brightness ( ) , opacity ( ) ) ;
public class Client { /** * Make a call , passing < code > param < / code > , to the IPC server running at * < code > address < / code > which is servicing the < code > protocol < / code > protocol , * with the < code > ticket < / code > credentials and < code > rpcTimeout < / code > , * returning the value . * Throws exceptions if there are network problems or if the remote code * threw an exception . */ public Writable call ( Writable param , InetSocketAddress addr , Class < ? > protocol , UserGroupInformation ticket , int rpcTimeout , boolean fastProtocol ) throws IOException { } }
Call call = new Call ( param ) ; Connection connection = getConnection ( addr , protocol , ticket , rpcTimeout , call ) ; try { connection . sendParam ( call , fastProtocol ) ; // send the parameter } catch ( RejectedExecutionException e ) { throw new ConnectionClosedException ( "connection has been closed" , e ) ; } catch ( InterruptedException e ) { throw ( InterruptedIOException ) new InterruptedIOException ( ) . initCause ( e ) ; } synchronized ( call ) { while ( ! call . done ) { try { call . wait ( ) ; // wait for the result } catch ( InterruptedException ie ) { // Exit on interruption . throw ( InterruptedIOException ) new InterruptedIOException ( ) . initCause ( ie ) ; } } if ( call . error != null ) { if ( call . error instanceof RemoteException ) { call . error . fillInStackTrace ( ) ; throw call . error ; } else { // local exception throw wrapException ( addr , call . error ) ; } } else { return call . value ; } }
public class IOUtils { /** * Serializes the given { @ link java . io . Serializable } object into an array fo bytes . * @ param obj the { @ link java . io . Serializable } object to serialize into an array of bytes . * @ return the byte array of the serialized object . * @ throws IOException if an I / O error occurs during the serialization process . * @ see # deserialize ( byte [ ] ) * @ see java . io . ByteArrayOutputStream * @ see java . io . ObjectOutputStream * @ see java . io . Serializable */ public static byte [ ] serialize ( Object obj ) throws IOException { } }
ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; ObjectOutputStream objOut = null ; try { objOut = new ObjectOutputStream ( out ) ; objOut . writeObject ( obj ) ; objOut . flush ( ) ; return out . toByteArray ( ) ; } finally { close ( objOut ) ; }
public class Positions { /** * Positions the owner to the bottom inside its parent . < br > * Respects the parent padding . * @ param < T > the generic type * @ param < U > the generic type * @ param offset the offset * @ return the int supplier */ public static < T extends ISized & IChild < U > , U extends ISized > IntSupplier middleAligned ( T owner , int offset ) { } }
return ( ) -> { U parent = owner . getParent ( ) ; if ( owner . getParent ( ) == null ) return 0 ; return ( int ) ( Math . ceil ( ( ( float ) parent . size ( ) . height ( ) - Padding . of ( parent ) . vertical ( ) - owner . size ( ) . height ( ) ) / 2 ) + offset ) ; } ;
public class AdminKeymatchAction { private HtmlResponse asListHtml ( ) { } }
return asHtml ( path_AdminKeymatch_AdminKeymatchJsp ) . renderWith ( data -> { RenderDataUtil . register ( data , "keyMatchItems" , keyMatchService . getKeyMatchList ( keyMatchPager ) ) ; // page navi } ) . useForm ( SearchForm . class , setup -> { setup . setup ( form -> { copyBeanToBean ( keyMatchPager , form , op -> op . include ( "id" ) ) ; } ) ; } ) ;
public class WeeklyAutoScalingSchedule { /** * The schedule for Saturday . * @ param saturday * The schedule for Saturday . * @ return Returns a reference to this object so that method calls can be chained together . */ public WeeklyAutoScalingSchedule withSaturday ( java . util . Map < String , String > saturday ) { } }
setSaturday ( saturday ) ; return this ;
public class JsonReader { /** * Returns the type of the next token without consuming it . */ public JsonToken peek ( ) throws IOException { } }
int p = peeked ; if ( p == PEEKED_NONE ) { p = doPeek ( ) ; } switch ( p ) { case PEEKED_BEGIN_OBJECT : return JsonToken . BEGIN_OBJECT ; case PEEKED_END_OBJECT : return JsonToken . END_OBJECT ; case PEEKED_BEGIN_ARRAY : return JsonToken . BEGIN_ARRAY ; case PEEKED_END_ARRAY : return JsonToken . END_ARRAY ; case PEEKED_SINGLE_QUOTED_NAME : case PEEKED_DOUBLE_QUOTED_NAME : case PEEKED_UNQUOTED_NAME : return JsonToken . NAME ; case PEEKED_TRUE : case PEEKED_FALSE : return JsonToken . BOOLEAN ; case PEEKED_NULL : return JsonToken . NULL ; case PEEKED_SINGLE_QUOTED : case PEEKED_DOUBLE_QUOTED : case PEEKED_UNQUOTED : case PEEKED_BUFFERED : return JsonToken . STRING ; case PEEKED_LONG : case PEEKED_NUMBER : return JsonToken . NUMBER ; case PEEKED_EOF : return JsonToken . END_DOCUMENT ; default : throw new AssertionError ( ) ; }
public class EndPointImpl { /** * For the purposes of asterisk IAX and SIP are both considered SIP . */ @ Override public boolean isSIP ( ) { } }
return this . _tech == TechType . SIP || this . _tech == TechType . IAX || this . _tech == TechType . IAX2 ;
public class ComplexNumber { /** * Multiply scalar value to a complex number . * @ param z1 Complex Number . * @ param scalar Scalar value . * @ return Returns new ComplexNumber instance containing the multiply of specified complex number with the scalar value . */ public static ComplexNumber Multiply ( ComplexNumber z1 , double scalar ) { } }
return new ComplexNumber ( z1 . real * scalar , z1 . imaginary * scalar ) ;
public class RocksDbWrapper { /** * Open a { @ link RocksDB } with default options in read / write mode . * @ param directory * directory to store { @ link RocksDB } data * @ param columnFamilies * list of column families to store key / value ( the column family * " default " will be automatically added ) * @ return * @ throws RocksDbException * @ throws IOException */ public static RocksDbWrapper openReadWrite ( File directory , String ... columnFamilies ) throws RocksDbException , IOException { } }
RocksDbWrapper rocksDbWrapper = new RocksDbWrapper ( directory , false ) ; rocksDbWrapper . setColumnFamilies ( RocksDbUtils . buildColumnFamilyDescriptors ( columnFamilies ) ) ; rocksDbWrapper . init ( ) ; return rocksDbWrapper ;
public class ObjectFactory { /** * Create an instance of { @ link MsRun . Spotting . Plate . Pattern . Orientation } */ public MsRun . Spotting . Plate . Pattern . Orientation createMsRunSpottingPlatePatternOrientation ( ) { } }
return new MsRun . Spotting . Plate . Pattern . Orientation ( ) ;
public class Stream { /** * Fetch the next event from a given stream * @ return the next event * @ throws IOException any io exception that could occur */ public StitchEvent < T > nextEvent ( ) throws IOException { } }
final Event nextEvent = eventStream . nextEvent ( ) ; if ( nextEvent == null ) { return null ; } return StitchEvent . fromEvent ( nextEvent , this . decoder ) ;
public class OAuthRequirements { /** * < pre > * The list of publicly documented OAuth scopes that are allowed access . An * OAuth token containing any of these scopes will be accepted . * Example : * canonical _ scopes : https : / / www . googleapis . com / auth / calendar , * https : / / www . googleapis . com / auth / calendar . read * < / pre > * < code > string canonical _ scopes = 1 ; < / code > */ public com . google . protobuf . ByteString getCanonicalScopesBytes ( ) { } }
java . lang . Object ref = canonicalScopes_ ; if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; canonicalScopes_ = b ; return b ; } else { return ( com . google . protobuf . ByteString ) ref ; }
public class ReviewedPagesTitles { /** * picks the article name from a MediaWiki api response . * @ param s text for parsing */ @ Override protected ImmutableList < String > parseElements ( String s ) { } }
XmlElement root = XmlConverter . getRootElement ( s ) ; List < String > titleCollection = Lists . newArrayList ( ) ; findContent ( root , titleCollection ) ; return ImmutableList . copyOf ( titleCollection ) ;
public class WireFeedOutput { /** * Writes to an Writer the XML representation for the given WireFeed . * If the feed encoding is not NULL , it will be used in the XML prolog encoding attribute . It is * the responsibility of the developer to ensure the Writer instance is using the same charset * encoding . * NOTE : This method delages to the ' Document WireFeedOutput # outputJDom ( WireFeed ) ' . * @ param feed Abstract feed to create XML representation from . The type of the WireFeed must * match the type given to the FeedOuptut constructor . * @ param writer Writer to write the XML representation for the given WireFeed . * @ throws IllegalArgumentException thrown if the feed type of the WireFeedOutput and WireFeed * don ' t match . * @ throws IOException thrown if there was some problem writing to the Writer . * @ throws FeedException thrown if the XML representation for the feed could not be created . */ public void output ( final WireFeed feed , final Writer writer ) throws IllegalArgumentException , IOException , FeedException { } }
this . output ( feed , writer , true ) ;
public class ScriptContainer { /** * Write out the body content and report any errors that occured . * @ throws JspException if a JSP exception has occurred */ public int doEndTag ( ) throws JspException { } }
popIdScope ( ) ; // writeout the script . WriteRenderAppender writer = new WriteRenderAppender ( pageContext ) ; // if we wrote out the scopeId then we end it . if ( _idScope != null ) { writer . append ( "</div>" ) ; } writeFrameworkScript ( writer ) ; writeAfterBlocks ( writer ) ; localRelease ( ) ; return EVAL_PAGE ;
public class MonetaryAmountDecimalFormatBuilder { /** * Creates the { @ link MonetaryAmountFormat } * If @ { link Locale } didn ' t set the default value is { @ link Locale # getDefault ( ) } * If @ { link MonetaryAmountProducer } didn ' t set the default value is { @ link MoneyProducer } * If @ { link CurrencyUnit } didn ' t set the default value is a currency from { @ link Locale } * @ return { @ link MonetaryAmountFormat } */ public MonetaryAmountFormat build ( ) { } }
if ( Objects . isNull ( locale ) ) { locale = Locale . getDefault ( ) ; } if ( Objects . isNull ( decimalFormat ) ) { decimalFormat = ( DecimalFormat ) NumberFormat . getCurrencyInstance ( locale ) ; } if ( Objects . isNull ( currencyUnit ) ) { currencyUnit = Monetary . getCurrency ( locale ) ; } if ( Objects . isNull ( producer ) ) { producer = new MoneyProducer ( ) ; } decimalFormat . setCurrency ( Currency . getInstance ( currencyUnit . getCurrencyCode ( ) ) ) ; return new MonetaryAmountDecimalFormat ( decimalFormat , producer , currencyUnit ) ;
public class SparseVector { /** * Parses { @ link SparseVector } from the given CSV string . * @ param csv the CSV string representing a vector * @ return a parsed vector */ public static SparseVector fromCSV ( String csv ) { } }
return Vector . fromCSV ( csv ) . to ( Vectors . SPARSE ) ;
public class Duration { /** * Obtains an instance of { @ code Duration } from an amount . * This obtains a duration based on the specified amount . * A TemporalAmount represents an amount of time , which may be date - based * or time - based , which this factory extracts to a duration . * The conversion loops around the set of units from the amount and uses * the duration of the unit to calculate the total Duration . * Only a subset of units are accepted by this method . * The unit must either have an exact duration or be ChronoUnit . DAYS which * is treated as 24 hours . If any other units are found then an exception is thrown . * @ param amount the amount to convert , not null * @ return a { @ code Duration } , not null * @ throws DateTimeException if the amount cannot be converted * @ throws ArithmeticException if a numeric overflow occurs */ public static Duration from ( TemporalAmount amount ) { } }
Jdk8Methods . requireNonNull ( amount , "amount" ) ; Duration duration = ZERO ; for ( TemporalUnit unit : amount . getUnits ( ) ) { duration = duration . plus ( amount . get ( unit ) , unit ) ; } return duration ;
public class ResourceInjectionBinding { /** * F743-22218.3 */ Object getEnvEntryType ( EnvEntry envEntry ) throws InjectionConfigurationException { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; Object type = null ; String typeName = envEntry . getTypeName ( ) ; if ( typeName != null ) { for ( Class < ? > typeClass : ENV_ENTRY_TYPES ) { if ( typeName . equals ( typeClass . getName ( ) ) ) { type = typeClass ; break ; } } if ( type == null ) { if ( ivNameSpaceConfig . getClassLoader ( ) == null ) { // F743-32443 - We don ' t have a class loader , so we can ' t // validate the type . Store it as a string for now ; // EnvEntryEnumSerializable will validate it later when used . type = typeName ; } else { Class < ? > classType = loadClass ( typeName ) ; if ( classType == null || ! classType . isEnum ( ) ) { Tr . error ( tc , "INVALID_ENV_ENTRY_TYPE_CWNEN0064E" , envEntry . getName ( ) , ivModule , ivApplication , typeName ) ; throw new InjectionConfigurationException ( "A type, which is not valid, has been specified for the " + envEntry . getName ( ) + " simple environment entry in the " + ivModule + " module of the " + ivApplication + " application: '" + typeName + "'." ) ; } type = classType ; } } } // d654504 else { // Default to type of Object , to avoid later NPE when checking to // see if the specified injection type is compatible with the // variable we are injecting into . if ( isTraceOn && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "EnvEntry XML type is not set." ) ; } } if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "env-entry-type = " + ( ( type == null ) ? "null" : type . getClass ( ) . getName ( ) ) ) ; return type ;
public class ListDeliveryStreamsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListDeliveryStreamsRequest listDeliveryStreamsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listDeliveryStreamsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listDeliveryStreamsRequest . getLimit ( ) , LIMIT_BINDING ) ; protocolMarshaller . marshall ( listDeliveryStreamsRequest . getDeliveryStreamType ( ) , DELIVERYSTREAMTYPE_BINDING ) ; protocolMarshaller . marshall ( listDeliveryStreamsRequest . getExclusiveStartDeliveryStreamName ( ) , EXCLUSIVESTARTDELIVERYSTREAMNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class InternalSimpleAntlrParser { /** * InternalSimpleAntlr . g : 290:1 : ruleIdOrInt returns [ AntlrDatatypeRuleToken current = new AntlrDatatypeRuleToken ( ) ] : ( this _ ID _ 0 = RULE _ ID | this _ INT _ 1 = RULE _ INT ) ; */ public final AntlrDatatypeRuleToken ruleIdOrInt ( ) throws RecognitionException { } }
AntlrDatatypeRuleToken current = new AntlrDatatypeRuleToken ( ) ; Token this_ID_0 = null ; Token this_INT_1 = null ; enterRule ( ) ; try { // InternalSimpleAntlr . g : 293:28 : ( ( this _ ID _ 0 = RULE _ ID | this _ INT _ 1 = RULE _ INT ) ) // InternalSimpleAntlr . g : 294:1 : ( this _ ID _ 0 = RULE _ ID | this _ INT _ 1 = RULE _ INT ) { // InternalSimpleAntlr . g : 294:1 : ( this _ ID _ 0 = RULE _ ID | this _ INT _ 1 = RULE _ INT ) int alt4 = 2 ; int LA4_0 = input . LA ( 1 ) ; if ( ( LA4_0 == RULE_ID ) ) { alt4 = 1 ; } else if ( ( LA4_0 == RULE_INT ) ) { alt4 = 2 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return current ; } NoViableAltException nvae = new NoViableAltException ( "" , 4 , 0 , input ) ; throw nvae ; } switch ( alt4 ) { case 1 : // InternalSimpleAntlr . g : 294:6 : this _ ID _ 0 = RULE _ ID { this_ID_0 = ( Token ) match ( input , RULE_ID , FOLLOW_2 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current . merge ( this_ID_0 ) ; } if ( state . backtracking == 0 ) { newLeafNode ( this_ID_0 , grammarAccess . getIdOrIntAccess ( ) . getIDTerminalRuleCall_0 ( ) ) ; } } break ; case 2 : // InternalSimpleAntlr . g : 302:10 : this _ INT _ 1 = RULE _ INT { this_INT_1 = ( Token ) match ( input , RULE_INT , FOLLOW_2 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current . merge ( this_INT_1 ) ; } if ( state . backtracking == 0 ) { newLeafNode ( this_INT_1 , grammarAccess . getIdOrIntAccess ( ) . getINTTerminalRuleCall_1 ( ) ) ; } } break ; } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class NodeSetDTM { /** * Returns the next node in the set and advances the position of the * iterator in the set . After a DTMIterator is created , the first call * to nextNode ( ) returns the first node in the set . * @ return The next < code > Node < / code > in the set being iterated over , or * < code > DTM . NULL < / code > if there are no more members in that set . * @ throws DOMException * INVALID _ STATE _ ERR : Raised if this method is called after the * < code > detach < / code > method was invoked . */ public int nextNode ( ) { } }
if ( ( m_next ) < this . size ( ) ) { int next = this . elementAt ( m_next ) ; m_next ++ ; return next ; } else return DTM . NULL ;
public class FixedBucketsHistogram { /** * Merge another histogram into this one . Only the state of this histogram is updated . * If the two histograms have identical buckets , a simpler algorithm is used . * @ param otherHistogram */ public void combineHistogram ( FixedBucketsHistogram otherHistogram ) { } }
if ( otherHistogram == null ) { return ; } readWriteLock . writeLock ( ) . lock ( ) ; otherHistogram . getReadWriteLock ( ) . readLock ( ) . lock ( ) ; try { missingValueCount += otherHistogram . getMissingValueCount ( ) ; if ( bucketSize == otherHistogram . getBucketSize ( ) && lowerLimit == otherHistogram . getLowerLimit ( ) && upperLimit == otherHistogram . getUpperLimit ( ) ) { combineHistogramSameBuckets ( otherHistogram ) ; } else { combineHistogramDifferentBuckets ( otherHistogram ) ; } } finally { readWriteLock . writeLock ( ) . unlock ( ) ; otherHistogram . getReadWriteLock ( ) . readLock ( ) . unlock ( ) ; }
public class DescribeFleetResult { /** * A list of robots . * @ param robots * A list of robots . */ public void setRobots ( java . util . Collection < Robot > robots ) { } }
if ( robots == null ) { this . robots = null ; return ; } this . robots = new java . util . ArrayList < Robot > ( robots ) ;
public class WhileyFileParser { /** * Parse an " lval " expression , which is a subset of the possible expressions * forms permitted on the left - hand side of an assignment . LVals are of the * form : * < pre > * LVal : : = LValTerm ( ' , ' LValTerm ) * ' ) ' * < / pre > * @ param scope * The enclosing scope for this statement , which determines the * set of visible ( i . e . declared ) variables and also the current * indentation level . * @ return */ private Tuple < LVal > parseLVals ( EnclosingScope scope ) { } }
int start = index ; ArrayList < LVal > elements = new ArrayList < > ( ) ; elements . add ( parseLVal ( index , scope ) ) ; // Check whether we have a multiple lvals or not while ( tryAndMatch ( true , Comma ) != null ) { // Add all expressions separated by a comma elements . add ( parseLVal ( index , scope ) ) ; // Done } return new Tuple < > ( elements ) ;
public class FieldFactory { /** * Returns the element finder . If it is not yet initialzed a new one is generated together with * the document containing the element . Caution : This ElementFinder is only valid for the * lifecycle of this Factory instance ! */ private XmlElementFinder getElementFinder ( final Element element ) { } }
if ( elementFinder == null ) { elementFinder = new XmlElementFinder ( element . getDocument ( ) ) ; } return elementFinder ;
public class GenKeys { /** * Operations */ @ Override public Msg genKeys ( ) { } }
final Msg infoLines = new Msg ( ) ; try { final PKITools pki = new PKITools ( ) ; if ( getPrivKeyFileName ( ) == null ) { infoLines . add ( "Must provide a -privkey <file> parameter" ) ; return infoLines ; } if ( getPublicKeyFileName ( ) == null ) { infoLines . add ( "Must provide a -pubkey <file> parameter" ) ; return infoLines ; } final PKITools . RSAKeys keys = pki . genRSAKeysIntoFiles ( getPrivKeyFileName ( ) , getPublicKeyFileName ( ) , true ) ; if ( keys == null ) { infoLines . add ( "Generation of keys failed" ) ; return infoLines ; } // Now try the keys on the test text . final int numKeys = pki . countKeys ( getPrivKeyFileName ( ) ) ; // if ( debug ) { // infoLines . add ( " Number of keys : " + numKeys ) ; infoLines . add ( "test with---->" + testText ) ; final String etext = pki . encryptWithKeyFile ( getPublicKeyFileName ( ) , testText , numKeys - 1 ) ; infoLines . add ( "encrypts to-->" + etext ) ; final String detext = pki . decryptWithKeyFile ( getPrivKeyFileName ( ) , etext , numKeys - 1 ) ; infoLines . add ( "decrypts to-->" + detext ) ; if ( ! testText . equals ( detext ) ) { infoLines . add ( "Validity check failed: encrypt/decrypt failure" ) ; } else { infoLines . add ( "" ) ; infoLines . add ( "Validity check succeeded" ) ; } } catch ( final Throwable t ) { error ( t ) ; infoLines . add ( "Exception - check logs: " + t . getMessage ( ) ) ; } return infoLines ;
public class PluginBundleImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < PluginBundleVersion > getAvailableVersions ( ) { } }
return ( EList < PluginBundleVersion > ) eGet ( StorePackage . Literals . PLUGIN_BUNDLE__AVAILABLE_VERSIONS , true ) ;
public class Converters { /** * Looks for an aproppiate converter for the given operation id . * @ param operId The operation id * @ return The first converter that matches this operation . */ public Converter getConverterForOperation ( String operId ) { } }
if ( getConverters ( ) != null ) { for ( Converter converter : getConverters ( ) ) { if ( check ( converter , converter . getOperations ( ) , operId ) ) { return converter ; } } } if ( getExternalConverters ( ) != null ) { for ( ExternalConverter ecs : getExternalConverters ( ) ) { final Converter c = PresentationManager . getPm ( ) . findExternalConverter ( ecs . getId ( ) ) ; if ( c != null && check ( c , ( ecs . getOperations ( ) == null ) ? c . getOperations ( ) : ecs . getOperations ( ) , operId ) ) { // TODO Add override of properties . return c ; } } } return null ;
public class Application { /** * add a blacklist item to the collection of blacklist associated to the application . * @ param blacklist */ public void addToBlacklists ( final Blacklist blacklist ) { } }
if ( null == getBlacklists ( ) ) { setBlacklists ( new java . util . TreeSet < Blacklist > ( ) ) ; } getBlacklists ( ) . add ( blacklist ) ;
public class GeoIntents { /** * Intent that should allow opening a map showing the given location ( if it exists ) * @ param latitude The latitude of the center of the map * @ param longitude The longitude of the center of the map * @ return the intent */ public static Intent newNavigationIntent ( float latitude , float longitude ) { } }
StringBuilder sb = new StringBuilder ( ) ; sb . append ( "google.navigation:q=" ) ; sb . append ( latitude ) ; sb . append ( "," ) ; sb . append ( longitude ) ; return new Intent ( Intent . ACTION_VIEW , Uri . parse ( sb . toString ( ) ) ) ;
public class TypesWalker { /** * When both wildcards are lower bounded ( ? super ) then bounds must be compatible . * For example , ? super Integer and ? super BigInteger are not compatible ( Integer , BigInteger ) * and ? super Comparable and ? super Number are compatible ( Number assignable to Comparable ) . * Of course , even incompatible lower bounds share some commons ( at least object ) but these types * could not be casted to one another and so no compatibility . * @ param one first wildcard type * @ param two second wildcard type * @ return true if onw of wildcards is not lower bounded or lower bounds are compatible , false when * lower bounds are incompatible */ private static boolean isLowerBoundsCompatible ( final WildcardType one , final WildcardType two ) { } }
boolean res = true ; final Type [ ] oneLower = one . getLowerBounds ( ) ; final Type [ ] twoLower = two . getLowerBounds ( ) ; if ( oneLower . length > 0 && twoLower . length > 0 ) { res = isCompatible ( GenericsUtils . resolveClassIgnoringVariables ( oneLower [ 0 ] ) , GenericsUtils . resolveClassIgnoringVariables ( twoLower [ 0 ] ) ) ; } return res ;
public class DateHelper { /** * Calculates a human readable date / time difference . * @ param d1 * a starting date * @ param d2 * an end date * @ return a string which reads like x days , y hours , z minutes . . . */ public static String formatDateDifference ( Date d1 , Date d2 ) { } }
long [ ] td = DateHelper . getTimeDifference ( d1 , d2 ) ; if ( td [ 0 ] > 0 ) { return td [ 0 ] + " day(s), " + td [ 1 ] + " hour(s)" ; } if ( td [ 1 ] > 0 ) { return td [ 1 ] + " hour(s), " + td [ 2 ] + " minute(s)" ; } if ( td [ 2 ] > 0 ) { return td [ 2 ] + " minute(s), " + td [ 3 ] + " second(s)" ; } return td [ 3 ] + " second(s)" ;
public class LazyList { /** * This method sets an offset of a resultset . For instance , if the offset is 101 , then the resultset will skip the * first 100 records . * It can be used in combination wit the limit like this : * < code > List < Event > events = Event . find ( " mnemonic = ? " , " GLUC " ) . offset ( 101 ) . limit ( 20 ) . orderBy ( " history _ event _ id " ) ; < / code > * This will produce 20 records , starting from record 101 . This is an efficient method , it will only retrieve records * that are necessary . * @ param offset * @ return instance of this < code > LazyList < / code > */ public < E extends Model > LazyList < E > offset ( long offset ) { } }
if ( fullQuery != null && ! forPaginator ) throw new IllegalArgumentException ( "Cannot use .offset() if using free form SQL" ) ; if ( offset < 0 ) throw new IllegalArgumentException ( "offset cannot be negative" ) ; this . offset = offset ; return ( LazyList < E > ) this ;
public class OctTreeNode { /** * Invoked when this object must be deserialized . * @ param in is the input stream . * @ throws IOException in case of input stream access error . * @ throws ClassNotFoundException if some class was not found . */ @ SuppressWarnings ( "checkstyle:npathcomplexity" ) private void readObject ( ObjectInputStream in ) throws IOException , ClassNotFoundException { } }
in . defaultReadObject ( ) ; final N me = toN ( ) ; if ( this . child1 != null ) { this . child1 . setParentNodeReference ( me , false ) ; } if ( this . child2 != null ) { this . child2 . setParentNodeReference ( me , false ) ; } if ( this . child3 != null ) { this . child3 . setParentNodeReference ( me , false ) ; } if ( this . child4 != null ) { this . child4 . setParentNodeReference ( me , false ) ; } if ( this . child5 != null ) { this . child5 . setParentNodeReference ( me , false ) ; } if ( this . child6 != null ) { this . child6 . setParentNodeReference ( me , false ) ; } if ( this . child7 != null ) { this . child7 . setParentNodeReference ( me , false ) ; } if ( this . child8 != null ) { this . child8 . setParentNodeReference ( me , false ) ; }
public class Say { /** * - - - - - ERROR - - - - - */ public static void error ( Object message ) { } }
log ( Level . ERROR , message , null , ( Object [ ] ) null ) ;
public class MsgMgr { /** * Warn the user of a problem . * @ param styleNode Stylesheet node * @ param sourceNode Source tree node * @ param msg Message text to issue * @ param args Arguments to pass to the message * @ throws XSLProcessorException thrown if the active ProblemListener and XPathContext decide * the error condition is severe enough to halt processing . * @ throws TransformerException * @ xsl . usage internal */ public void warn ( SourceLocator srcLctr , Node styleNode , Node sourceNode , String msg , Object args [ ] ) throws TransformerException { } }
String formattedMsg = XSLMessages . createWarning ( msg , args ) ; ErrorListener errHandler = m_transformer . getErrorListener ( ) ; if ( null != errHandler ) errHandler . warning ( new TransformerException ( formattedMsg , srcLctr ) ) ; else System . out . println ( formattedMsg ) ;