signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DefaultGroovyMethods { /** * Collates this iterable into sub - lists of length < code > size < / code > stepping through the code < code > step < / code > * elements for each sub - list . Any remaining elements in the iterable after the subdivision will be dropped if * < code > keepRemainder < / code > is false . * Example : * < pre class = " groovyTestCase " > * def list = [ 1 , 2 , 3 , 4 ] * assert list . collate ( 2 , 2 , true ) = = [ [ 1 , 2 ] , [ 3 , 4 ] ] * assert list . collate ( 3 , 1 , true ) = = [ [ 1 , 2 , 3 ] , [ 2 , 3 , 4 ] , [ 3 , 4 ] , [ 4 ] ] * assert list . collate ( 3 , 1 , false ) = = [ [ 1 , 2 , 3 ] , [ 2 , 3 , 4 ] ] * < / pre > * @ param self an Iterable * @ param size the length of each sub - list in the returned list * @ param step the number of elements to step through for each sub - list * @ param keepRemainder if true , any remaining elements are returned as sub - lists . Otherwise they are discarded * @ return a List containing the data collated into sub - lists * @ throws IllegalArgumentException if the step is zero . * @ since 2.4.0 */ public static < T > List < List < T > > collate ( Iterable < T > self , int size , int step , boolean keepRemainder ) { } }
List < T > selfList = asList ( self ) ; List < List < T > > answer = new ArrayList < List < T > > ( ) ; if ( size <= 0 ) { answer . add ( selfList ) ; } else { if ( step == 0 ) throw new IllegalArgumentException ( "step cannot be zero" ) ; for ( int pos = 0 ; pos < selfList . size ( ) && pos > - 1 ; pos += step ) { if ( ! keepRemainder && pos > selfList . size ( ) - size ) { break ; } List < T > element = new ArrayList < T > ( ) ; for ( int offs = pos ; offs < pos + size && offs < selfList . size ( ) ; offs ++ ) { element . add ( selfList . get ( offs ) ) ; } answer . add ( element ) ; } } return answer ;
public class BlockPlacementPolicyConfigurable { /** * Verifies if testing nodes are within right windows of first node * @ param first first node being considered * @ param testing1 node we are testing to check if it is within window or not * @ param testing2 node we are testing to check if it is within window or not * @ return We return true if it is successful , and not otherwise */ private boolean inWindow ( DatanodeDescriptor first , DatanodeDescriptor testing1 , DatanodeDescriptor testing2 ) { } }
readLock ( ) ; try { if ( ! testing1 . getNetworkLocation ( ) . equals ( testing2 . getNetworkLocation ( ) ) ) { return false ; } RackRingInfo rackInfo = racksMap . get ( first . getNetworkLocation ( ) ) ; assert ( rackInfo != null ) ; Integer machineId = rackInfo . findNode ( first ) ; assert ( machineId != null ) ; final int rackWindowStart = rackInfo . index ; final RackRingInfo rackTest = racksMap . get ( testing1 . getNetworkLocation ( ) ) ; assert ( rackTest != null ) ; final int rackDist = ( rackTest . index - rackWindowStart + racks . size ( ) ) % racks . size ( ) ; if ( rackDist < rackWindow + 1 && rackTest . index != rackInfo . index ) { // inside rack window final int rackSize = rackTest . rackNodes . size ( ) ; Integer idN2 = rackTest . findNode ( testing1 ) ; assert ( idN2 != null ) ; Integer idN3 = rackTest . findNode ( testing2 ) ; assert ( idN3 != null ) ; final Integer idFirst = rackInfo . findNode ( first ) ; assert ( idFirst != null ) ; final int sizeFirstRack = rackInfo . rackNodes . size ( ) ; final int end = idFirst * rackSize / sizeFirstRack ; // proportional to previous of idFirst final int prevIdFirst = ( idFirst + sizeFirstRack - 1 ) % sizeFirstRack ; int start = ( prevIdFirst * rackSize / sizeFirstRack ) ; int distPropWindow = ( end - start + rackSize ) % rackSize ; if ( distPropWindow > 0 ) { start = ( start + 1 ) % rackSize ; distPropWindow -- ; } int distIdN2 = ( idN2 - start + rackSize ) % rackSize ; int distIdN3 = ( idN3 - start + rackSize ) % rackSize ; int distN3N2 = ( idN3 - idN2 + rackSize ) % rackSize ; int distN2N3 = ( idN2 - idN3 + rackSize ) % rackSize ; if ( distIdN2 <= distPropWindow && distN3N2 < machineWindow ) return true ; if ( distIdN3 <= distPropWindow && distN2N3 < machineWindow ) return true ; } return false ; } finally { readUnlock ( ) ; }
public class Modbus { /** * Shutdown / stop any shared resources that me be in use , blocking until finished or interrupted . * @ param timeout the duration to wait . * @ param unit the { @ link TimeUnit } of the { @ code timeout } duration . */ public static void releaseSharedResources ( long timeout , TimeUnit unit ) throws InterruptedException { } }
sharedExecutor ( ) . awaitTermination ( timeout , unit ) ; sharedEventLoop ( ) . shutdownGracefully ( ) . await ( timeout , unit ) ; sharedWheelTimer ( ) . stop ( ) ;
public class HexUtil { /** * 将指定int值转换为Unicode字符串形式 , 常用于特殊字符 ( 例如汉字 ) 转Unicode形式 < br > * 转换的字符串如果u后不足4位 , 则前面用0填充 , 例如 : * < pre > * ' 我 ' = 》 \ u4f60 * < / pre > * @ param value int值 , 也可以是char * @ return Unicode表现形式 */ public static String toUnicodeHex ( int value ) { } }
final StringBuilder builder = new StringBuilder ( 6 ) ; builder . append ( "\\u" ) ; String hex = toHex ( value ) ; int len = hex . length ( ) ; if ( len < 4 ) { builder . append ( "0000" , 0 , 4 - len ) ; // 不足4位补0 } builder . append ( hex ) ; return builder . toString ( ) ;
public class TransmissionData { /** * Sets the layout to use for this transmission to be a primary header only . * @ param segmentLength * @ param priority * @ param isPooled * @ param isExchange * @ param packetNumber * @ param segmentType * @ param sendListener */ protected void setLayoutToPrimary ( int segmentLength , int priority , boolean isPooled , boolean isExchange , int packetNumber , int segmentType , SendListener sendListener ) { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "setLayoutToPrimary" , new Object [ ] { "" + segmentLength , "" + priority , "" + isPooled , "" + isExchange , "" + packetNumber , "" + segmentType , sendListener } ) ; primaryHeaderFields . segmentLength = segmentLength ; primaryHeaderFields . priority = priority ; primaryHeaderFields . isPooled = isPooled ; primaryHeaderFields . isExchange = isExchange ; primaryHeaderFields . packetNumber = packetNumber ; primaryHeaderFields . segmentType = segmentType ; this . sendListener = sendListener ; transmissionRemaining = segmentLength ; layout = JFapChannelConstants . XMIT_PRIMARY_ONLY ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "setLayoutToPrimary" ) ;
public class PrimitiveParameter { /** * Returns this parameter ' s timestamp ( or other kind of position value ) . A * < code > null < / code > value means the timestamp is unknown . * @ return a { @ link Long } . */ public Long getPosition ( ) { } }
Interval interval = getInterval ( ) ; if ( interval != null ) { return interval . getMinStart ( ) ; } else { return null ; }
public class QOrderBySection { /** * { @ inheritDoc } */ @ Override public QOrderBySection prepare ( final AbstractObjectQuery < ? > _query ) throws EFapsException { } }
for ( final AbstractQPart part : this . parts ) { part . prepare ( _query , null ) ; } return this ;
public class Job { /** * Check and update the state of this job . The state changes * depending on its current state and the states of the depending jobs . */ synchronized int checkState ( ) { } }
if ( this . state == Job . RUNNING ) { checkRunningState ( ) ; } if ( this . state != Job . WAITING ) { return this . state ; } if ( this . dependingJobs == null || this . dependingJobs . size ( ) == 0 ) { this . state = Job . READY ; return this . state ; } Job pred = null ; int n = this . dependingJobs . size ( ) ; for ( int i = 0 ; i < n ; i ++ ) { pred = this . dependingJobs . get ( i ) ; int s = pred . checkState ( ) ; if ( s == Job . WAITING || s == Job . READY || s == Job . RUNNING ) { break ; // a pred is still not completed , continue in WAITING // state } if ( s == Job . FAILED || s == Job . DEPENDENT_FAILED ) { this . state = Job . DEPENDENT_FAILED ; this . message = "depending job " + i + " with jobID " + pred . getJobID ( ) + " failed. " + pred . getMessage ( ) ; break ; } // pred must be in success state if ( i == n - 1 ) { this . state = Job . READY ; } } return this . state ;
public class Strings { /** * 将两个用delimiter串起来的字符串 , 合并成新的串 , 重复的 " 单词 " 只出现一次 . * 如果第一个字符串以delimiter开头 , 第二个字符串以delimiter结尾 , < br > * 合并后的字符串仍以delimiter开头和结尾 . < br > * < blockquote > * < pre > * mergeSeq ( & quot ; , 1,2 , & quot ; , & quot ; & quot ; ) = & quot ; , 1,2 , & quot ; ; * mergeSeq ( & quot ; , 1,2 , & quot ; , null ) = & quot ; , 1,2 , & quot ; ; * mergeSeq ( & quot ; 1,2 & quot ; , & quot ; 3 & quot ; ) = & quot ; 1,2,3 & quot ; ; * mergeSeq ( & quot ; 1,2 & quot ; , & quot ; 3 , & quot ; ) = & quot ; 1,2,3 , & quot ; ; * mergeSeq ( & quot ; , 1,2 & quot ; , & quot ; 3 , & quot ; ) = & quot ; , 1,2,3 , & quot ; ; * mergeSeq ( & quot ; , 1,2 , & quot ; , & quot ; , 3 , & quot ; ) = & quot ; , 1,2,3 , & quot ; ; * < / pre > * < / blockquote > * @ param first * a { @ link java . lang . String } object . * @ param second * a { @ link java . lang . String } object . * @ param delimiter * a { @ link java . lang . String } object . * @ return a { @ link java . lang . String } object . */ public static String mergeSeq ( final String first , final String second , final String delimiter ) { } }
if ( isNotEmpty ( second ) && isNotEmpty ( first ) ) { List < String > firstSeq = Arrays . asList ( split ( first , delimiter ) ) ; List < String > secondSeq = Arrays . asList ( split ( second , delimiter ) ) ; Collection < String > rs = CollectUtils . union ( firstSeq , secondSeq ) ; StringBuilder buf = new StringBuilder ( ) ; for ( final String ele : rs ) buf . append ( delimiter ) . append ( ele ) ; if ( buf . length ( ) > 0 ) buf . append ( delimiter ) ; return buf . toString ( ) ; } else { return ( ( first == null ) ? "" : first ) + ( ( second == null ) ? "" : second ) ; }
public class PresentsSession { /** * Called by the client manager when a new connection arrives that authenticates as this * already established client . This must only be called from the congmr thread . */ protected void resumeSession ( AuthRequest req , PresentsConnection conn ) { } }
// check to see if we ' ve already got a connection object , in which case it ' s probably stale Connection oldconn = getConnection ( ) ; if ( oldconn != null && ! oldconn . isClosed ( ) ) { log . info ( "Closing stale connection" , "old" , oldconn , "new" , conn ) ; // close the old connection ( which results in everything being properly unregistered ) oldconn . close ( ) ; } // note our new auth request ( so that we can deliver the proper bootstrap services ) _areq = req ; // start using the new connection setConnection ( conn ) ; // if a client connects , drops the connection and reconnects within the span of a very // short period of time , we ' ll find ourselves in resumeSession ( ) before their client object // was resolved from the initial connection ; in such a case , we can simply bail out here // and let the original session establishment code take care of initializing this resumed // session if ( _clobj == null ) { log . info ( "Rapid-fire reconnect caused us to arrive in resumeSession() before the " + "original session resolved its client object " + this + "." ) ; return ; } // we need to get onto the dobj thread so that we can finalize resumption of the session _omgr . postRunnable ( new Runnable ( ) { public void run ( ) { // now that we ' re on the dobjmgr thread we can resume our session resumption finishResumeSession ( ) ; } } ) ;
public class JCuda { /** * [ C + + API ] Binds an array to a texture * < pre > * template < class T , int dim , enum cudaTextureReadMode readMode > * cudaError _ t cudaBindTextureToArray ( * const texture < T , * dim , * readMode > & tex , * cudaArray _ const _ t array , * const cudaChannelFormatDesc & desc ) [ inline ] * < / pre > * < div > * < p > [ C + + API ] Binds an array to a texture * Binds the CUDA array < tt > array < / tt > to the texture reference < tt > tex < / tt > . < tt > desc < / tt > describes how the memory is interpreted when * fetching values from the texture . Any CUDA array previously bound to * < tt > tex < / tt > is unbound . * < div > * < span > Note : < / span > * < p > Note that this * function may also return error codes from previous , asynchronous * launches . * < / div > * < / div > * @ param texref Texture to bind * @ param array Memory array on device * @ param desc Channel format * @ param tex Texture to bind * @ param array Memory array on device * @ param tex Texture to bind * @ param array Memory array on device * @ param desc Channel format * @ return cudaSuccess , cudaErrorInvalidValue , cudaErrorInvalidDevicePointer , * cudaErrorInvalidTexture * @ see JCuda # cudaCreateChannelDesc * @ see JCuda # cudaGetChannelDesc * @ see JCuda # cudaGetTextureReference * @ see JCuda # cudaBindTexture * @ see JCuda # cudaBindTexture * @ see JCuda # cudaBindTexture2D * @ see JCuda # cudaBindTexture2D * @ see JCuda # cudaBindTextureToArray * @ see JCuda # cudaBindTextureToArray * @ see JCuda # cudaUnbindTexture * @ see JCuda # cudaGetTextureAlignmentOffset */ public static int cudaBindTextureToArray ( textureReference texref , cudaArray array , cudaChannelFormatDesc desc ) { } }
return checkResult ( cudaBindTextureToArrayNative ( texref , array , desc ) ) ;
public class DeepLearning { /** * Compute the actual train _ samples _ per _ iteration size from the user - given parameter * @ param mp Model parameter ( DeepLearning object ) * @ param numRows number of training rows * @ param model DL Model * @ return The total number of training rows to be processed per iteration ( summed over on all nodes ) */ private static long computeTrainSamplesPerIteration ( final DeepLearning mp , final long numRows , DeepLearningModel model ) { } }
long tspi = mp . train_samples_per_iteration ; assert ( tspi == 0 || tspi == - 1 || tspi == - 2 || tspi >= 1 ) ; if ( tspi == 0 || ( ! mp . replicate_training_data && tspi == - 1 ) ) { tspi = numRows ; if ( ! mp . quiet_mode ) Log . info ( "Setting train_samples_per_iteration (" + mp . train_samples_per_iteration + ") to one epoch: #rows (" + tspi + ")." ) ; } else if ( tspi == - 1 ) { tspi = ( mp . single_node_mode ? 1 : H2O . CLOUD . size ( ) ) * numRows ; if ( ! mp . quiet_mode ) Log . info ( "Setting train_samples_per_iteration (" + mp . train_samples_per_iteration + ") to #nodes x #rows (" + tspi + ")." ) ; } else if ( tspi == - 2 ) { // automatic tuning based on CPU speed , network speed and model size // measure cpu speed double total_gflops = 0 ; for ( H2ONode h2o : H2O . CLOUD . _memary ) { HeartBeat hb = h2o . _heartbeat ; total_gflops += hb . _gflops ; } if ( mp . single_node_mode ) total_gflops /= H2O . CLOUD . size ( ) ; if ( total_gflops == 0 ) { total_gflops = Linpack . run ( H2O . SELF . _heartbeat . _cpus_allowed ) * ( mp . single_node_mode ? 1 : H2O . CLOUD . size ( ) ) ; } final long model_size = model . model_info ( ) . size ( ) ; int [ ] msg_sizes = new int [ ] { ( int ) ( model_size * 4 ) == ( model_size * 4 ) ? ( int ) ( model_size * 4 ) : Integer . MAX_VALUE } ; double [ ] microseconds_collective = new double [ msg_sizes . length ] ; NetworkTest . NetworkTester nt = new NetworkTest . NetworkTester ( msg_sizes , null , microseconds_collective , model_size > 1e6 ? 1 : 5 /* repeats */ , false , true /* only collectives */ ) ; nt . compute2 ( ) ; // length of the network traffic queue based on log - tree rollup ( 2 log ( nodes ) ) int network_queue_length = mp . single_node_mode || H2O . CLOUD . size ( ) == 1 ? 1 : 2 * ( int ) Math . floor ( Math . log ( H2O . CLOUD . size ( ) ) / Math . log ( 2 ) ) ; // heuristics double flops_overhead_per_row = 30 ; if ( mp . activation == Activation . Maxout || mp . activation == Activation . MaxoutWithDropout ) { flops_overhead_per_row *= 8 ; } else if ( mp . activation == Activation . Tanh || mp . activation == Activation . TanhWithDropout ) { flops_overhead_per_row *= 5 ; } // target fraction of comm vs cpu time : 5% double fraction = mp . single_node_mode || H2O . CLOUD . size ( ) == 1 ? 1e-3 : 0.05 ; // one single node mode , there ' s no model averaging effect , so less need to shorten the M / R iteration // estimate the time for communication ( network ) and training ( compute ) model . time_for_communication_us = ( H2O . CLOUD . size ( ) == 1 ? 1e4 /* add 10ms for single - node */ : 0 ) + network_queue_length * microseconds_collective [ 0 ] ; double time_per_row_us = flops_overhead_per_row * model_size / ( total_gflops * 1e9 ) / H2O . SELF . _heartbeat . _cpus_allowed * 1e6 ; // compute the optimal number of training rows per iteration // fraction : = time _ comm _ us / ( time _ comm _ us + tspi * time _ per _ row _ us ) = = > tspi = ( time _ comm _ us / fraction - time _ comm _ us ) / time _ per _ row _ us tspi = ( long ) ( ( model . time_for_communication_us / fraction - model . time_for_communication_us ) / time_per_row_us ) ; tspi = Math . min ( tspi , ( mp . single_node_mode ? 1 : H2O . CLOUD . size ( ) ) * numRows * 10 ) ; // not more than 10x of what train _ samples _ per _ iteration = - 1 would do // If the number is close to a multiple of epochs , use that - > prettier scoring if ( tspi > numRows && Math . abs ( tspi % numRows ) / ( double ) numRows < 0.2 ) tspi = tspi - tspi % numRows ; tspi = Math . min ( tspi , ( long ) ( mp . epochs * numRows / 10 ) ) ; // limit to number of epochs desired , but at least 10 iterations total tspi = Math . max ( 1 , tspi ) ; // at least 1 point if ( ! mp . quiet_mode ) { Log . info ( "Auto-tuning parameter 'train_samples_per_iteration':" ) ; Log . info ( "Estimated compute power : " + ( int ) total_gflops + " GFlops" ) ; Log . info ( "Estimated time for comm : " + PrettyPrint . usecs ( ( long ) model . time_for_communication_us ) ) ; Log . info ( "Estimated time per row : " + ( ( long ) time_per_row_us > 0 ? PrettyPrint . usecs ( ( long ) time_per_row_us ) : time_per_row_us + " usecs" ) ) ; Log . info ( "Estimated training speed: " + ( int ) ( 1e6 / time_per_row_us ) + " rows/sec" ) ; Log . info ( "Setting train_samples_per_iteration (" + mp . train_samples_per_iteration + ") to auto-tuned value: " + tspi ) ; } } else { // limit user - given value to number of epochs desired tspi = Math . min ( tspi , ( long ) ( mp . epochs * numRows ) ) ; } assert ( tspi != 0 && tspi != - 1 && tspi != - 2 && tspi >= 1 ) ; return tspi ;
public class Quaternion { /** * Sets this quaternion to one that rotates onto the given unit axes . * @ return a reference to this quaternion , for chaining . */ public Quaternion fromAxes ( IVector3 nx , IVector3 ny , IVector3 nz ) { } }
double nxx = nx . x ( ) , nyy = ny . y ( ) , nzz = nz . z ( ) ; double x2 = ( 1f + nxx - nyy - nzz ) / 4f ; double y2 = ( 1f - nxx + nyy - nzz ) / 4f ; double z2 = ( 1f - nxx - nyy + nzz ) / 4f ; double w2 = ( 1f - x2 - y2 - z2 ) ; return set ( Math . sqrt ( x2 ) * ( ny . z ( ) >= nz . y ( ) ? + 1f : - 1f ) , Math . sqrt ( y2 ) * ( nz . x ( ) >= nx . z ( ) ? + 1f : - 1f ) , Math . sqrt ( z2 ) * ( nx . y ( ) >= ny . x ( ) ? + 1f : - 1f ) , Math . sqrt ( w2 ) ) ;
public class MerkleTree { /** * Find the { @ link Hashable } node that matches the given { @ code range } . * @ param range Range to find * @ return { @ link Hashable } found . If nothing found , return { @ link Leaf } with null hash . */ private Hashable find ( Range < Token > range ) { } }
try { return findHelper ( root , new Range < Token > ( fullRange . left , fullRange . right ) , range ) ; } catch ( StopRecursion e ) { return new Leaf ( ) ; }
public class S3ReferenceDataSourceMarshaller { /** * Marshall the given parameter object . */ public void marshall ( S3ReferenceDataSource s3ReferenceDataSource , ProtocolMarshaller protocolMarshaller ) { } }
if ( s3ReferenceDataSource == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( s3ReferenceDataSource . getBucketARN ( ) , BUCKETARN_BINDING ) ; protocolMarshaller . marshall ( s3ReferenceDataSource . getFileKey ( ) , FILEKEY_BINDING ) ; protocolMarshaller . marshall ( s3ReferenceDataSource . getReferenceRoleARN ( ) , REFERENCEROLEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FailClosureTarget { /** * Throws { @ link MojoExecutionException } . */ public Object call ( final @ Nullable Object [ ] args ) throws Exception { } }
if ( args == null || args . length == 0 ) { throw new MojoExecutionException ( FAILED ) ; } else if ( args . length == 1 ) { if ( args [ 0 ] instanceof Throwable ) { Throwable cause = ( Throwable ) args [ 0 ] ; throw new MojoExecutionException ( cause . getMessage ( ) , cause ) ; } else { throw new MojoExecutionException ( String . valueOf ( args [ 0 ] ) ) ; } } else if ( args . length == 2 ) { if ( args [ 1 ] instanceof Throwable ) { throw new MojoExecutionException ( String . valueOf ( args [ 0 ] ) , ( Throwable ) args [ 1 ] ) ; } else { throw new Error ( "Invalid arguments to fail(Object, Throwable), second argument must be a Throwable" ) ; } } else { throw new Error ( "Too many arguments; expected one of: fail(), fail(Object) or fail(Object, Throwable)" ) ; }
public class AssociationValue { /** * Get the primitive attributes for the associated object . * @ return attributes for associated objects * @ deprecated replaced by { @ link # getAllAttributes ( ) } after introduction of nested associations */ @ Deprecated @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) public Map < String , PrimitiveAttribute < ? > > getAttributes ( ) { if ( ! isPrimitiveOnly ( ) ) { throw new UnsupportedOperationException ( "Primitive API not supported for nested association values" ) ; } return ( Map ) attributes ;
public class DefaultContainerDescription { /** * Queries the running container and attempts to lookup the information from the running container . * @ param client the client used to execute the management operation * @ return the container description * @ throws IOException if an error occurs while executing the management operation * @ throws OperationExecutionException if the operation used to query the container fails */ static DefaultContainerDescription lookup ( final ModelControllerClient client ) throws IOException , OperationExecutionException { } }
final ModelNode op = Operations . createReadResourceOperation ( new ModelNode ( ) . setEmptyList ( ) ) ; op . get ( ClientConstants . INCLUDE_RUNTIME ) . set ( true ) ; final ModelNode result = client . execute ( op ) ; if ( Operations . isSuccessfulOutcome ( result ) ) { final ModelNode model = Operations . readResult ( result ) ; final String productName = getValue ( model , "product-name" , "WildFly" ) ; final String productVersion = getValue ( model , "product-version" ) ; final String releaseVersion = getValue ( model , "release-version" ) ; final String launchType = getValue ( model , "launch-type" ) ; return new DefaultContainerDescription ( productName , productVersion , releaseVersion , launchType , "DOMAIN" . equalsIgnoreCase ( launchType ) ) ; } throw new OperationExecutionException ( op , result ) ;
public class JobInProgress { /** * The job is done since all it ' s component tasks are either * successful or have failed . */ private void jobComplete ( ) { } }
final JobTrackerInstrumentation metrics = jobtracker . getInstrumentation ( ) ; // All tasks are complete , then the job is done ! if ( this . status . getRunState ( ) == JobStatus . RUNNING || this . status . getRunState ( ) == JobStatus . PREP ) { changeStateTo ( JobStatus . SUCCEEDED ) ; this . status . setCleanupProgress ( 1.0f ) ; if ( maps . length == 0 ) { this . status . setMapProgress ( 1.0f ) ; } if ( reduces . length == 0 ) { this . status . setReduceProgress ( 1.0f ) ; } this . finishTime = JobTracker . getClock ( ) . getTime ( ) ; LOG . info ( "Job " + this . status . getJobID ( ) + " has completed successfully." ) ; // Log the job summary ( this should be done prior to logging to // job - history to ensure job - counters are in - sync JobSummary . logJobSummary ( this , jobtracker . getClusterStatus ( false ) ) ; Counters counters = getCounters ( ) ; // Log job - history JobHistory . JobInfo . logFinished ( this . status . getJobID ( ) , finishTime , this . finishedMapTasks , this . finishedReduceTasks , failedMapTasks , failedReduceTasks , killedMapTasks , killedReduceTasks , getMapCounters ( ) , getReduceCounters ( ) , counters ) ; // Note that finalize will close the job history handles which garbage collect // might try to finalize garbageCollect ( ) ; metrics . completeJob ( this . conf , this . status . getJobID ( ) ) ; }
public class KeyStoreManager { /** * Open the provided filename as an outputstream . * @ param fileName * @ return OutputStream * @ throws MalformedURLException * @ throws IOException */ public OutputStream getOutputStream ( String fileName ) throws MalformedURLException , IOException { } }
try { GetKeyStoreOutputStreamAction action = new GetKeyStoreOutputStreamAction ( fileName ) ; return AccessController . doPrivileged ( action ) ; } catch ( PrivilegedActionException e ) { Exception ex = e . getException ( ) ; FFDCFilter . processException ( e , getClass ( ) . getName ( ) , "getOutputStream" , new Object [ ] { fileName , this } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Exception opening keystore; " + ex ) ; if ( ex instanceof MalformedURLException ) throw ( MalformedURLException ) ex ; else if ( ex instanceof IOException ) throw ( IOException ) ex ; throw new IOException ( ex . getMessage ( ) ) ; }
public class ImageManipulation { /** * Method automatically called by browser to handle image manipulations . * @ param req * Browser request to servlet res Response sent back to browser after * image manipulation * @ throws IOException * If an input or output exception occurred ServletException If a * servlet exception occurred */ @ Override public void doGet ( HttpServletRequest req , HttpServletResponse res ) throws ServletException , IOException { } }
System . setProperty ( "java.awt.headless" , "true" ) ; // collect all possible parameters for servlet String url = req . getParameter ( "url" ) ; String op = req . getParameter ( "op" ) ; String newWidth = req . getParameter ( "newWidth" ) ; String brightAmt = req . getParameter ( "brightAmt" ) ; String zoomAmt = req . getParameter ( "zoomAmt" ) ; String wmText = req . getParameter ( "wmText" ) ; String cropX = req . getParameter ( "cropX" ) ; String cropY = req . getParameter ( "cropY" ) ; String cropWidth = req . getParameter ( "cropWidth" ) ; String cropHeight = req . getParameter ( "cropHeight" ) ; String convertTo = req . getParameter ( "convertTo" ) ; if ( convertTo != null ) { convertTo = convertTo . toLowerCase ( ) ; } try { if ( op == null ) { throw new ServletException ( "op parameter not specified." ) ; } String outputMimeType ; // get the image via url and put it into the ImagePlus processor . BufferedImage img = getImage ( url ) ; // do watermarking stuff if ( op . equals ( "watermark" ) ) { if ( wmText == null ) { throw new ServletException ( "Must specify wmText." ) ; } Graphics g = img . getGraphics ( ) ; int fontSize = img . getWidth ( ) * 3 / 100 ; if ( fontSize < 10 ) { fontSize = 10 ; } g . setFont ( new Font ( "Lucida Sans" , Font . BOLD , fontSize ) ) ; FontMetrics fm = g . getFontMetrics ( ) ; int stringWidth = ( int ) fm . getStringBounds ( wmText , g ) . getWidth ( ) ; int x = img . getWidth ( ) / 2 - stringWidth / 2 ; int y = img . getHeight ( ) - fm . getHeight ( ) ; g . setColor ( new Color ( 180 , 180 , 180 ) ) ; g . fill3DRect ( x - 10 , y - fm . getHeight ( ) - 4 , stringWidth + 20 , fm . getHeight ( ) + 12 , true ) ; g . setColor ( new Color ( 100 , 100 , 100 ) ) ; g . drawString ( wmText , x + 2 , y + 2 ) ; g . setColor ( new Color ( 240 , 240 , 240 ) ) ; g . drawString ( wmText , x , y ) ; } ImageProcessor ip = new ImagePlus ( "temp" , img ) . getProcessor ( ) ; // if the inputMimeType is image / gif , need to convert to RGB in any case if ( inputMimeType . equals ( "image/gif" ) ) { ip = ip . convertToRGB ( ) ; alreadyConvertedToRGB = true ; } // causes scale ( ) and resize ( ) to do bilinear interpolation ip . setInterpolate ( true ) ; if ( ! op . equals ( "convert" ) ) { if ( op . equals ( "resize" ) ) { ip = resize ( ip , newWidth ) ; } else if ( op . equals ( "zoom" ) ) { ip = zoom ( ip , zoomAmt ) ; } else if ( op . equals ( "brightness" ) ) { ip = brightness ( ip , brightAmt ) ; } else if ( op . equals ( "watermark" ) ) { // this is now taken care of beforehand ( see above ) } else if ( op . equals ( "grayscale" ) ) { ip = grayscale ( ip ) ; } else if ( op . equals ( "crop" ) ) { ip = crop ( ip , cropX , cropY , cropWidth , cropHeight ) ; } else { throw new ServletException ( "Invalid operation: " + op ) ; } outputMimeType = inputMimeType ; } else { if ( convertTo == null ) { throw new ServletException ( "Neither op nor convertTo was specified." ) ; } if ( convertTo . equals ( "jpg" ) || convertTo . equals ( "jpeg" ) ) { outputMimeType = "image/jpeg" ; } else if ( convertTo . equals ( "gif" ) ) { outputMimeType = "image/gif" ; } else if ( convertTo . equals ( "tiff" ) ) { outputMimeType = "image/tiff" ; } else if ( convertTo . equals ( "bmp" ) ) { outputMimeType = "image/bmp" ; } else if ( convertTo . equals ( "png" ) ) { outputMimeType = "image/png" ; } else { throw new ServletException ( "Invalid format: " + convertTo ) ; } } res . setContentType ( outputMimeType ) ; BufferedOutputStream out = new BufferedOutputStream ( res . getOutputStream ( ) ) ; outputImage ( ip , out , outputMimeType ) ; out . flush ( ) ; out . close ( ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; res . sendError ( HttpServletResponse . SC_INTERNAL_SERVER_ERROR , e . getClass ( ) . getName ( ) + ": " + e . getMessage ( ) ) ; }
public class WeakSet { /** * Clear the set * @ param shrink if true , shrink the set to initial size */ public void clear ( final boolean shrink ) { } }
while ( queue . poll ( ) != null ) { ; } if ( elementCount > 0 ) { elementCount = 0 ; } if ( shrink && ( elementData . length > 1024 ) && ( elementData . length > defaultSize ) ) { elementData = newElementArray ( defaultSize ) ; } else { Arrays . fill ( elementData , null ) ; } computeMaxSize ( ) ; while ( queue . poll ( ) != null ) { ; }
public class RequestUtil { /** * 如果客户端通过 request 传递数据 , 那么就可以使用该方法获取数据 * 这种通常是通过 Post方式 * @ param request HttpServletRequest * @ return 客户端上传的数据 * @ throws IOException 因为是通过IO流读取数据 , * 因此很可能读取失败 , 或者NULL , 导致抛出IO异常 , */ public static String getPostData ( HttpServletRequest request ) throws IOException { } }
BufferedReader br = new BufferedReader ( new InputStreamReader ( request . getInputStream ( ) ) ) ; String line ; StringBuilder sb = new StringBuilder ( ) ; while ( ( line = br . readLine ( ) ) != null ) { sb . append ( line ) ; } // return URLDecoder . decode ( sb . toString ( ) , Constant . DEFAULT _ CHATSET ) ; return sb . toString ( ) ;
public class CacheNotifierImpl { /** * Configure event data . Currently used for ' expired ' events . */ private void configureEvent ( CacheEntryListenerInvocation listenerInvocation , EventImpl < K , V > e , K key , V value , Metadata metadata ) { } }
e . setKey ( convertKey ( listenerInvocation , key ) ) ; e . setValue ( convertValue ( listenerInvocation , value ) ) ; e . setMetadata ( metadata ) ; e . setOriginLocal ( true ) ; e . setPre ( false ) ;
public class Nfs3 { /** * / * ( non - Javadoc ) * @ see com . emc . ecs . nfsclient . nfs . Nfs # sendWrite ( com . emc . ecs . nfsclient . nfs . NfsWriteRequest ) */ public Nfs3WriteResponse sendWrite ( NfsWriteRequest request ) throws IOException { } }
Nfs3WriteResponse response = new Nfs3WriteResponse ( ) ; _rpcWrapper . callRpcNaked ( request , response ) ; return response ;
public class Rank { /** * Set this Entity ahead of the passed in Entity in rank order . * @ param assetToRankAheadOf The Entity that will come next in order after * this Entity . */ public void setAbove ( T assetToRankAheadOf ) { } }
instance . rankAbove ( asset , assetToRankAheadOf , rankAttribute ) ; asset . save ( ) ;
public class RecommendationsInner { /** * Get a recommendation rule for an app . * Get a recommendation rule for an app . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param siteName Name of the app . * @ param name Name of the recommendation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the RecommendationRuleInner object */ public Observable < ServiceResponse < RecommendationRuleInner > > getRuleDetailsByWebAppWithServiceResponseAsync ( String resourceGroupName , String siteName , String name ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( siteName == null ) { throw new IllegalArgumentException ( "Parameter siteName is required and cannot be null." ) ; } if ( name == null ) { throw new IllegalArgumentException ( "Parameter name is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } final Boolean updateSeen = null ; final String recommendationId = null ; return service . getRuleDetailsByWebApp ( resourceGroupName , siteName , name , this . client . subscriptionId ( ) , updateSeen , recommendationId , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < RecommendationRuleInner > > > ( ) { @ Override public Observable < ServiceResponse < RecommendationRuleInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < RecommendationRuleInner > clientResponse = getRuleDetailsByWebAppDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class HiveRegistrationPolicyBase { /** * Obtain Hive table names . * The returned { @ link Iterable } contains : * 1 . Table name returned by { @ link # getTableName ( Path ) } * 2 . Table names specified by < code > additional . hive . table . names < / code > * In table names above , the { @ value PRIMARY _ TABLE _ TOKEN } if present is also replaced by the * table name obtained via { @ link # getTableName ( Path ) } . * @ param path Path for the table on filesystem . * @ return Table names to register . */ protected Iterable < String > getTableNames ( Path path ) { } }
List < String > tableNames = getTableNames ( Optional . < String > absent ( ) , path ) ; Preconditions . checkState ( ! tableNames . isEmpty ( ) , "Hive table name not specified" ) ; return tableNames ;
public class MultipartByterangesEntity { /** * { @ inheritDoc } */ public void write ( OutputStream ostream ) throws IOException { } }
try { for ( Range range : ranges_ ) { InputStream istream = null ; if ( resource_ instanceof VersionResource ) istream = ( ( VersionResource ) resource_ ) . getContentAsStream ( ) ; else istream = ( ( FileResource ) resource_ ) . getContentAsStream ( ) ; println ( ostream ) ; // boundary print ( "--" + WebDavConst . BOUNDARY , ostream ) ; println ( ostream ) ; // content - type print ( ExtHttpHeaders . CONTENT_TYPE + ": " + contentType_ , ostream ) ; println ( ostream ) ; // current range print ( ExtHttpHeaders . CONTENTRANGE + ": bytes " + range . getStart ( ) + "-" + range . getEnd ( ) + "/" + contentLength_ , ostream ) ; println ( ostream ) ; println ( ostream ) ; // range data RangedInputStream rangedInputStream = new RangedInputStream ( istream , range . getStart ( ) , range . getEnd ( ) ) ; byte buff [ ] = new byte [ 0x1000 ] ; int rd = - 1 ; while ( ( rd = rangedInputStream . read ( buff ) ) != - 1 ) ostream . write ( buff , 0 , rd ) ; rangedInputStream . close ( ) ; } println ( ostream ) ; print ( "--" + WebDavConst . BOUNDARY + "--" , ostream ) ; println ( ostream ) ; } catch ( IOException exc ) { LOG . error ( exc . getMessage ( ) , exc ) ; throw new IOException ( "Can't write to stream, caused " + exc , exc ) ; } catch ( RepositoryException exc ) { LOG . error ( exc . getMessage ( ) , exc ) ; throw new IOException ( "Can't write to stream, caused " + exc , exc ) ; }
public class DefaultTraceCollector { /** * This method pops an existing node from the trace fragment . * @ param location The instrumentation location * @ param The class to pop * @ param The optional URI to match * @ return The node */ protected < T extends Node > T pop ( String location , FragmentBuilder builder , Class < T > cls , String uri ) { } }
if ( builder == null ) { if ( log . isLoggable ( Level . WARNING ) ) { log . warning ( "No fragment builder for this thread (" + Thread . currentThread ( ) + ") - trying to pop node of type: " + cls ) ; } return null ; } if ( builder . getCurrentNode ( ) == null ) { if ( log . isLoggable ( Level . FINEST ) ) { log . finest ( "WARNING: No 'current node' for this thread (" + Thread . currentThread ( ) + ") - trying to pop node of type: " + cls ) ; } return null ; } // Check if any in or out content should be processed for the current node processInContent ( location , builder , - 1 ) ; processOutContent ( location , builder , - 1 ) ; Node node = builder . popNode ( cls , uri ) ; if ( node != null ) { builder . finishNode ( node ) ; return cls . cast ( node ) ; } if ( log . isLoggable ( Level . FINEST ) ) { log . finest ( "Current node (type=" + builder . getCurrentNode ( ) . getClass ( ) + ") does not match required cls=" + cls + " and uri=" + uri + " at location=" + location ) ; } return null ;
public class FileItem { /** * Defines the file represented by this item . * @ param file A file . */ public void setFile ( File file ) { } }
if ( file != null ) { this . file = file ; // Replace the label by the file ' s name . this . setLabel ( file . getName ( ) ) ; // Change the icon , depending if the file is a folder or not . this . updateIcon ( ) ; }
public class CyclomaticComplexity { /** * overrides the visitor to store the class context * @ param context * the context object for the currently parsed class */ @ Override public void visitClassContext ( final ClassContext context ) { } }
try { classContext = context ; classContext . getJavaClass ( ) . accept ( this ) ; } finally { classContext = null ; }
public class ThrowUnchecked { /** * Throws the given exception if it is unchecked or an instance of any of * the given declared types . Otherwise , it is thrown as an * UndeclaredThrowableException . This method only returns normally if the * exception is null . * @ param t exception to throw * @ param declaredTypes if exception is checked and is not an instance of * any of these types , then it is thrown as an * UndeclaredThrowableException . */ public static void fireDeclared ( Throwable t , Class ... declaredTypes ) { } }
org . cojen . util . ThrowUnchecked . fireDeclared ( t , declaredTypes ) ;
public class RangeTombstoneList { /** * Removes all range tombstones whose local deletion time is older than gcBefore . */ public void purge ( int gcBefore ) { } }
int j = 0 ; for ( int i = 0 ; i < size ; i ++ ) { if ( delTimes [ i ] >= gcBefore ) setInternal ( j ++ , starts [ i ] , ends [ i ] , markedAts [ i ] , delTimes [ i ] ) ; } size = j ;
public class Base32Utils { /** * mask : F8 07 C0 3E 01 F0 0F 80 7C 03 E0 1F F8 07 */ static private void decodeCharacter ( byte chunk , int characterIndex , byte [ ] bytes , int index ) { } }
int byteIndex = index + ( characterIndex * 5 ) / 8 ; int offset = characterIndex % 8 ; switch ( offset ) { case 0 : bytes [ byteIndex ] |= chunk << 3 ; break ; case 1 : bytes [ byteIndex ] |= chunk >>> 2 ; bytes [ byteIndex + 1 ] |= chunk << 6 ; break ; case 2 : bytes [ byteIndex ] |= chunk << 1 ; break ; case 3 : bytes [ byteIndex ] |= chunk >>> 4 ; bytes [ byteIndex + 1 ] |= chunk << 4 ; break ; case 4 : bytes [ byteIndex ] |= chunk >>> 1 ; bytes [ byteIndex + 1 ] |= chunk << 7 ; break ; case 5 : bytes [ byteIndex ] |= chunk << 2 ; break ; case 6 : bytes [ byteIndex ] |= chunk >>> 3 ; bytes [ byteIndex + 1 ] |= chunk << 5 ; break ; case 7 : bytes [ byteIndex ] |= chunk ; break ; }
public class ImageClient { /** * Creates an image in the specified project using the data included in the request . * < p > Sample code : * < pre > < code > * try ( ImageClient imageClient = ImageClient . create ( ) ) { * Boolean forceCreate = false ; * ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ; * Image imageResource = Image . newBuilder ( ) . build ( ) ; * Operation response = imageClient . insertImage ( forceCreate , project , imageResource ) ; * < / code > < / pre > * @ param forceCreate Force image creation if true . * @ param project Project ID for this request . * @ param imageResource An Image resource . ( = = resource _ for beta . images = = ) ( = = resource _ for * v1 . images = = ) * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation insertImage ( Boolean forceCreate , ProjectName project , Image imageResource ) { } }
InsertImageHttpRequest request = InsertImageHttpRequest . newBuilder ( ) . setForceCreate ( forceCreate ) . setProject ( project == null ? null : project . toString ( ) ) . setImageResource ( imageResource ) . build ( ) ; return insertImage ( request ) ;
public class FileSystemSafetyNet { /** * Closes the safety net for a thread . This closes all remaining unclosed streams that were opened * by safety - net - guarded file systems . After this method was called , no streams can be opened any more * from any FileSystem instance that was obtained while the thread was guarded by the safety net . * < p > This method should be called at the very end of a guarded thread . */ @ Internal public static void closeSafetyNetAndGuardedResourcesForThread ( ) { } }
SafetyNetCloseableRegistry registry = REGISTRIES . get ( ) ; if ( null != registry ) { REGISTRIES . remove ( ) ; IOUtils . closeQuietly ( registry ) ; }
public class TimeBaseMarshaller { /** * from interface TimeBaseService */ public void getTimeOid ( String arg1 , TimeBaseService . GotTimeBaseListener arg2 ) { } }
TimeBaseMarshaller . GotTimeBaseMarshaller listener2 = new TimeBaseMarshaller . GotTimeBaseMarshaller ( ) ; listener2 . listener = arg2 ; sendRequest ( GET_TIME_OID , new Object [ ] { arg1 , listener2 } ) ;
public class Stream { /** * Returns a metadata property about the stream . Note that { @ code Stream } wrappers obtained * through intermediate operations don ' t have their own properties , but instead access the * metadata properties of the source { @ code Stream } . * @ param name * the name of the property * @ param type * the type of the property value ( conversion will be attempted if available value * has a different type ) * @ param < V > * the type of value * @ return the value of the property , or null if the property is undefined */ public final < V > V getProperty ( final String name , final Class < V > type ) { } }
Preconditions . checkNotNull ( name ) ; try { Object value = null ; synchronized ( this . state ) { if ( this . state . properties != null ) { value = this . state . properties . get ( name ) ; } } return Data . convert ( value , type ) ; } catch ( final Throwable ex ) { throw Throwables . propagate ( ex ) ; }
public class FileHdr { /** * Set up the key areas . */ public void setupKeys ( ) { } }
KeyAreaInfo keyArea = null ; keyArea = new KeyAreaInfo ( this , Constants . UNIQUE , ID_KEY ) ; keyArea . addKeyField ( ID , Constants . ASCENDING ) ; keyArea = new KeyAreaInfo ( this , Constants . UNIQUE , FILE_NAME_KEY ) ; keyArea . addKeyField ( FILE_NAME , Constants . ASCENDING ) ;
public class DatabasesInner { /** * Imports a bacpac into a new database . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param parameters The required parameters for importing a Bacpac into a database . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ImportExportResponseInner object */ public Observable < ServiceResponse < ImportExportResponseInner > > beginImportMethodWithServiceResponseAsync ( String resourceGroupName , String serverName , ImportRequest parameters ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( serverName == null ) { throw new IllegalArgumentException ( "Parameter serverName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } if ( parameters == null ) { throw new IllegalArgumentException ( "Parameter parameters is required and cannot be null." ) ; } Validator . validate ( parameters ) ; return service . beginImportMethod ( this . client . subscriptionId ( ) , resourceGroupName , serverName , this . client . apiVersion ( ) , parameters , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < ImportExportResponseInner > > > ( ) { @ Override public Observable < ServiceResponse < ImportExportResponseInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < ImportExportResponseInner > clientResponse = beginImportMethodDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class ConnectionImpl { /** * ( non - Javadoc ) * @ see com . ibm . wsspi . sib . core . SICoreConnection # createUncoordinatedTransaction ( ) */ @ Override public SIUncoordinatedTransaction createUncoordinatedTransaction ( ) throws SIConnectionUnavailableException , SIConnectionDroppedException , SIErrorException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createUncoordinatedTransaction" ) ; SIUncoordinatedTransaction tran = createUncoordinatedTransaction ( true ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createUncoordinatedTransaction" , tran ) ; return tran ;
public class FactoryPointTracker { /** * Creates a tracker which detects Shi - Tomasi corner features , describes them with SURF , and * nominally tracks them using KLT . * @ see ShiTomasiCornerIntensity * @ see DescribePointSurf * @ see boofcv . abst . feature . tracker . DdaManagerDetectDescribePoint * @ param configExtract Configuration for extracting features * @ param kltConfig Configuration for KLT * @ param reactivateThreshold Tracks are reactivated after this many have been dropped . Try 10 % of maxMatches * @ param configDescribe Configuration for SURF descriptor * @ param configOrientation Configuration for region orientation . If null then orientation isn ' t estimated * @ param imageType Type of image the input is . * @ param derivType Image derivative type . @ return SURF based tracker . */ public static < I extends ImageGray < I > , D extends ImageGray < D > > PointTracker < I > combined_ST_SURF_KLT ( ConfigGeneralDetector configExtract , PkltConfig kltConfig , int reactivateThreshold , ConfigSurfDescribe . Stability configDescribe , ConfigSlidingIntegral configOrientation , Class < I > imageType , @ Nullable Class < D > derivType ) { } }
if ( derivType == null ) derivType = GImageDerivativeOps . getDerivativeType ( imageType ) ; GeneralFeatureDetector < I , D > corner = createShiTomasi ( configExtract , derivType ) ; InterestPointDetector < I > detector = FactoryInterestPoint . wrapPoint ( corner , 1 , imageType , derivType ) ; DescribeRegionPoint < I , BrightFeature > regionDesc = FactoryDescribeRegionPoint . surfStable ( configDescribe , imageType ) ; ScoreAssociation < TupleDesc_F64 > score = FactoryAssociation . scoreEuclidean ( TupleDesc_F64 . class , true ) ; AssociateSurfBasic assoc = new AssociateSurfBasic ( FactoryAssociation . greedy ( score , 100000 , true ) ) ; AssociateDescription < BrightFeature > generalAssoc = new WrapAssociateSurfBasic ( assoc ) ; OrientationImage < I > orientation = null ; if ( configOrientation != null ) { Class integralType = GIntegralImageOps . getIntegralType ( imageType ) ; OrientationIntegral orientationII = FactoryOrientationAlgs . sliding_ii ( configOrientation , integralType ) ; orientation = FactoryOrientation . convertImage ( orientationII , imageType ) ; } return combined ( detector , orientation , regionDesc , generalAssoc , kltConfig , reactivateThreshold , imageType ) ;
public class SelectiveAccessHandler { /** * Returns information which infomations are selected by the actual configuration */ public String getSelectionInfo ( ) { } }
StringBuilder result = new StringBuilder ( ) ; result . append ( "SelectionInfo: " + this . getClass ( ) . toString ( ) + "\n" ) ; result . append ( "Page:" + CITInfo ( pageHandling ) + "\n" ) ; result . append ( "FirstParagraph:" + CITInfo ( firstParagraphHandling ) + "\n" ) ; for ( String key : sectionHandling . keySet ( ) ) { final String uss = SectionType . USER_SECTION . toString ( ) ; if ( key . startsWith ( uss ) ) result . append ( uss + "[" + key . substring ( uss . length ( ) ) + "]:\n" ) ; else result . append ( key + ":\n" ) ; result . append ( SITInfo ( sectionHandling . get ( key ) ) + "\n" ) ; } return result . toString ( ) ;
public class RoaringBitmap { /** * Complements the bits in the given range , from rangeStart ( inclusive ) rangeEnd ( exclusive ) . The * given bitmap is unchanged . * @ param bm bitmap being negated * @ param rangeStart inclusive beginning of range , in [ 0 , 0xfffff ] * @ param rangeEnd exclusive ending of range , in [ 0 , 0xfffff + 1] * @ return a new Bitmap */ public static RoaringBitmap flip ( RoaringBitmap bm , final long rangeStart , final long rangeEnd ) { } }
rangeSanityCheck ( rangeStart , rangeEnd ) ; if ( rangeStart >= rangeEnd ) { return bm . clone ( ) ; } RoaringBitmap answer = new RoaringBitmap ( ) ; final int hbStart = Util . toIntUnsigned ( Util . highbits ( rangeStart ) ) ; final int lbStart = Util . toIntUnsigned ( Util . lowbits ( rangeStart ) ) ; final int hbLast = Util . toIntUnsigned ( Util . highbits ( rangeEnd - 1 ) ) ; final int lbLast = Util . toIntUnsigned ( Util . lowbits ( rangeEnd - 1 ) ) ; // copy the containers before the active area answer . highLowContainer . appendCopiesUntil ( bm . highLowContainer , ( short ) hbStart ) ; for ( int hb = hbStart ; hb <= hbLast ; ++ hb ) { final int containerStart = ( hb == hbStart ) ? lbStart : 0 ; final int containerLast = ( hb == hbLast ) ? lbLast : Util . maxLowBitAsInteger ( ) ; final int i = bm . highLowContainer . getIndex ( ( short ) hb ) ; final int j = answer . highLowContainer . getIndex ( ( short ) hb ) ; assert j < 0 ; if ( i >= 0 ) { Container c = bm . highLowContainer . getContainerAtIndex ( i ) . not ( containerStart , containerLast + 1 ) ; if ( ! c . isEmpty ( ) ) { answer . highLowContainer . insertNewKeyValueAt ( - j - 1 , ( short ) hb , c ) ; } } else { // * think * the range of ones must never be // empty . answer . highLowContainer . insertNewKeyValueAt ( - j - 1 , ( short ) hb , Container . rangeOfOnes ( containerStart , containerLast + 1 ) ) ; } } // copy the containers after the active area . answer . highLowContainer . appendCopiesAfter ( bm . highLowContainer , ( short ) hbLast ) ; return answer ;
public class CmsSerialDateBeanFactory { /** * Factory method for creating a serial date bean . * @ param widgetValue the value for the series as stored by the { @ link org . opencms . widgets . CmsSerialDateWidget } * @ return the serial date bean . */ public static I_CmsSerialDateBean createSerialDateBean ( String widgetValue ) { } }
I_CmsSerialDateValue value ; value = new CmsSerialDateValue ( widgetValue ) ; return createSerialDateBean ( value ) ;
public class PlaybackView { @ Override public void onProgressChanged ( SeekBar seekBar , int progress , boolean fromUser ) { } }
int [ ] secondMinute = getSecondMinutes ( progress ) ; mCurrentTime . setText ( String . format ( getResources ( ) . getString ( R . string . playback_view_time ) , secondMinute [ 0 ] , secondMinute [ 1 ] ) ) ;
public class IOSMobileCommandHelper { /** * This method forms a { @ link Map } of parameters for the touchId simulator . * @ param match If true , simulates a successful fingerprint scan . If false , simulates a failed fingerprint scan . * @ return a key - value pair . The key is the command name . The value is a { @ link Map } command arguments . */ public static Map . Entry < String , Map < String , ? > > touchIdCommand ( boolean match ) { } }
return new AbstractMap . SimpleEntry < > ( TOUCH_ID , prepareArguments ( "match" , match ) ) ;
public class ClientConnecter { /** * Thread */ @ Override public void run ( ) { } }
isRunning = true ; while ( isRunning ) { try { server . notifyNewClientConnected ( serverSocket . accept ( ) ) ; } catch ( final SocketException exception ) { Verbose . exception ( exception ) ; isRunning = false ; } catch ( final IOException exception ) { Verbose . exception ( exception ) ; } }
public class TimeLine { /** * Return the next index into the TIMELINE array */ private static int next_idx ( long [ ] tl ) { } }
// Spin until we can CAS - acquire a fresh index while ( true ) { int oldidx = ( int ) tl [ 0 ] ; int newidx = ( oldidx + 1 ) & ( MAX_EVENTS - 1 ) ; if ( CAS ( tl , 0 , oldidx , newidx ) ) return oldidx ; }
public class SparseTensor { /** * Same as { @ link # fromUnorderedKeyValues } , except that neither * input array is copied . These arrays must not be modified by the * caller after invoking this method . * @ param dimensionNumbers * @ param dimensionSizes * @ param keyNums * @ param values */ public static SparseTensor fromUnorderedKeyValuesNoCopy ( int [ ] dimensionNumbers , int [ ] dimensionSizes , long [ ] keyNums , double [ ] values ) { } }
ArrayUtils . sortKeyValuePairs ( keyNums , values , 0 , keyNums . length ) ; return new SparseTensor ( dimensionNumbers , dimensionSizes , keyNums , values ) ;
public class AFPChainer { /** * derive the compabitle AFP lists for AFP - chaining * this is important for speeding up the process * for a given AFP ( i1 , j1 ) , there are three regions that could be the starting * point for the compabitle AFPs of AFP ( i1 , j1) * a1 a2 a3 * i1 - G i1 - f - c i1 - f + 1 i1 * b1 j1 - G - | - - - - - | | * | | C | 3 | | * b2 j1 - f - c - | | - - - - - | | * | | 2 | 1 | | * b3 j1 - f + 1 - | - - - - - | * j1 - | - - - - - \ * | \ ( AFP ( i1 , j1 ) ) * f : the length of AFPs ( we use segments of same length ) * G : g + f , where g is the maximum allowed gaps * c : the maximum allowed cross - over in AFP - connection , * here we use c = f , and j1 - f - c = j1-2f * incompatible region A : its AFPs overlap with given AFP ( i1 , j1) * incompatible region B : the gaps between its AFP with given AFP is larger than g * incompatible region C : its AFPs connect with given AFP but cross a given threshold . * compatible region 1 : [ i1 - f - c , i1 - f + 1 > , [ j1 - f - c , j1 - f + 1 > or [ a2 , a3 ] , [ b2 , b3] * compatible region 2 : [ i1 - G , i1 - f - c ] , [ j1 - f - c , j1 - f ] or [ a1 , a2 ] , [ b2 , b3] * combine 1 and 2 : [ i1 - G , i1 - f ] , [ j1 - f - c , j1 - f ] or [ a1 , a3 ] , [ b2 , b3] * compatible region 3 : [ i1 - f - c , i1 - f ] , [ j1 - G , j1 - f - c ] or [ a2 , a3 ] , [ b1 , b2] * c - > misCut * f - > fragLen * G - > fragLen + maxGap - > maxGapFrag */ private static int getCompatibleAfps ( int afp , int [ ] list , FatCatParameters params , AFPChain afpChain ) { } }
int i , j , i1 , j1 , f , G , c , a1 , a2 , a3 , b1 , b2 , b3 , s1 , s2 ; int fragLen = params . getFragLen ( ) ; int maxGapFrag = params . getMaxGapFrag ( ) ; int misCut = params . getMisCut ( ) ; int maxTra = params . getMaxTra ( ) ; List < AFP > afpSet = afpChain . getAfpSet ( ) ; f = fragLen ; G = maxGapFrag ; c = misCut ; i1 = afpSet . get ( afp ) . getP1 ( ) ; j1 = afpSet . get ( afp ) . getP2 ( ) ; a3 = i1 - f ; a2 = a3 - c ; a1 = i1 - G ; a2 = a2 > 0 ? a2 : 0 ; a1 = a1 > 0 ? a1 : 0 ; b3 = j1 - f ; b2 = b3 - c ; b1 = j1 - G ; b2 = ( b2 > 0 ) ? b2 : 0 ; b1 = ( b1 > 0 ) ? b1 : 0 ; int [ ] [ ] afpAftIndex = afpChain . getAfpAftIndex ( ) ; int [ ] [ ] afpBefIndex = afpChain . getAfpBefIndex ( ) ; int [ ] twi = afpChain . getTwi ( ) ; int n = 0 ; // compatible region 1-2 , [ a1 , a3 ] [ b2 , b3] for ( i = a1 ; i <= a3 ; i ++ ) { // i < = a3 instead of i < a3 s1 = afpAftIndex [ i ] [ b2 ] ; // note afpAftIndex , not afpIndex if ( s1 < 0 ) continue ; // no AFP for the given i with j > b2 s2 = afpBefIndex [ i ] [ b3 ] ; // afps is sorted by j given a i , it ' s sparse matrix if ( s2 < 0 ) continue ; // no AFP for the given i with j < b3 for ( j = s1 ; j <= s2 ; j ++ ) { // j < = s2 instead of j < s2 if ( twi [ j ] <= maxTra ) { list [ n ++ ] = j ; } } } // compatible region 3 [ a2 , a3 ] [ b1 , b2] for ( i = a2 ; i <= a3 ; i ++ ) { s1 = afpAftIndex [ i ] [ b1 ] ; if ( s1 < 0 ) continue ; s2 = afpBefIndex [ i ] [ b2 ] ; // afps is sorted by j given a i if ( s2 < 0 ) continue ; // note j < s2 , as the cases of j = = s2 is alread considered in previous region for ( j = s1 ; j < s2 ; j ++ ) { if ( twi [ j ] <= maxTra ) { list [ n ++ ] = j ; } } } return n ;
public class InternalXbaseParser { /** * InternalXbase . g : 5605:1 : ruleJvmParameterizedTypeReference returns [ EObject current = null ] : ( ( ( ruleQualifiedName ) ) ( ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * ) ? ) ; */ public final EObject ruleJvmParameterizedTypeReference ( ) throws RecognitionException { } }
EObject current = null ; Token otherlv_1 = null ; Token otherlv_3 = null ; Token otherlv_5 = null ; Token otherlv_7 = null ; Token otherlv_9 = null ; Token otherlv_11 = null ; Token otherlv_13 = null ; EObject lv_arguments_2_0 = null ; EObject lv_arguments_4_0 = null ; EObject lv_arguments_10_0 = null ; EObject lv_arguments_12_0 = null ; enterRule ( ) ; try { // InternalXbase . g : 5611:2 : ( ( ( ( ruleQualifiedName ) ) ( ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * ) ? ) ) // InternalXbase . g : 5612:2 : ( ( ( ruleQualifiedName ) ) ( ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * ) ? ) { // InternalXbase . g : 5612:2 : ( ( ( ruleQualifiedName ) ) ( ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * ) ? ) // InternalXbase . g : 5613:3 : ( ( ruleQualifiedName ) ) ( ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * ) ? { // InternalXbase . g : 5613:3 : ( ( ruleQualifiedName ) ) // InternalXbase . g : 5614:4 : ( ruleQualifiedName ) { // InternalXbase . g : 5614:4 : ( ruleQualifiedName ) // InternalXbase . g : 5615:5 : ruleQualifiedName { if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getJvmParameterizedTypeReferenceRule ( ) ) ; } } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getTypeJvmTypeCrossReference_0_0 ( ) ) ; } pushFollow ( FOLLOW_74 ) ; ruleQualifiedName ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } } } // InternalXbase . g : 5629:3 : ( ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * ) ? int alt101 = 2 ; alt101 = dfa101 . predict ( input ) ; switch ( alt101 ) { case 1 : // InternalXbase . g : 5630:4 : ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * { // InternalXbase . g : 5630:4 : ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) // InternalXbase . g : 5631:5 : ( ' < ' ) = > otherlv _ 1 = ' < ' { otherlv_1 = ( Token ) match ( input , 19 , FOLLOW_25 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_1 , grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getLessThanSignKeyword_1_0 ( ) ) ; } } // InternalXbase . g : 5637:4 : ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) // InternalXbase . g : 5638:5 : ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) { // InternalXbase . g : 5638:5 : ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) // InternalXbase . g : 5639:6 : lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_26 ) ; lv_arguments_2_0 = ruleJvmArgumentTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getJvmParameterizedTypeReferenceRule ( ) ) ; } add ( current , "arguments" , lv_arguments_2_0 , "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalXbase . g : 5656:4 : ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * loop97 : do { int alt97 = 2 ; int LA97_0 = input . LA ( 1 ) ; if ( ( LA97_0 == 48 ) ) { alt97 = 1 ; } switch ( alt97 ) { case 1 : // InternalXbase . g : 5657:5 : otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) { otherlv_3 = ( Token ) match ( input , 48 , FOLLOW_25 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_3 , grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getCommaKeyword_1_2_0 ( ) ) ; } // InternalXbase . g : 5661:5 : ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) // InternalXbase . g : 5662:6 : ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) { // InternalXbase . g : 5662:6 : ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) // InternalXbase . g : 5663:7 : lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_2_1_0 ( ) ) ; } pushFollow ( FOLLOW_26 ) ; lv_arguments_4_0 = ruleJvmArgumentTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getJvmParameterizedTypeReferenceRule ( ) ) ; } add ( current , "arguments" , lv_arguments_4_0 , "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : break loop97 ; } } while ( true ) ; otherlv_5 = ( Token ) match ( input , 20 , FOLLOW_70 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_5 , grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getGreaterThanSignKeyword_1_3 ( ) ) ; } // InternalXbase . g : 5685:4 : ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * loop100 : do { int alt100 = 2 ; int LA100_0 = input . LA ( 1 ) ; if ( ( LA100_0 == 45 ) ) { int LA100_2 = input . LA ( 2 ) ; if ( ( LA100_2 == RULE_ID ) ) { int LA100_3 = input . LA ( 3 ) ; if ( ( synpred45_InternalXbase ( ) ) ) { alt100 = 1 ; } } } switch ( alt100 ) { case 1 : // InternalXbase . g : 5686:5 : ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? { // InternalXbase . g : 5686:5 : ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) // InternalXbase . g : 5687:6 : ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) { // InternalXbase . g : 5693:6 : ( ( ) otherlv _ 7 = ' . ' ) // InternalXbase . g : 5694:7 : ( ) otherlv _ 7 = ' . ' { // InternalXbase . g : 5694:7 : ( ) // InternalXbase . g : 5695:8: { if ( state . backtracking == 0 ) { current = forceCreateModelElementAndSet ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getJvmInnerTypeReferenceOuterAction_1_4_0_0_0 ( ) , current ) ; } } otherlv_7 = ( Token ) match ( input , 45 , FOLLOW_61 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_7 , grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getFullStopKeyword_1_4_0_0_1 ( ) ) ; } } } // InternalXbase . g : 5707:5 : ( ( ruleValidID ) ) // InternalXbase . g : 5708:6 : ( ruleValidID ) { // InternalXbase . g : 5708:6 : ( ruleValidID ) // InternalXbase . g : 5709:7 : ruleValidID { if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getJvmParameterizedTypeReferenceRule ( ) ) ; } } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getTypeJvmTypeCrossReference_1_4_1_0 ( ) ) ; } pushFollow ( FOLLOW_75 ) ; ruleValidID ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } } } // InternalXbase . g : 5723:5 : ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? int alt99 = 2 ; alt99 = dfa99 . predict ( input ) ; switch ( alt99 ) { case 1 : // InternalXbase . g : 5724:6 : ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' { // InternalXbase . g : 5724:6 : ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) // InternalXbase . g : 5725:7 : ( ' < ' ) = > otherlv _ 9 = ' < ' { otherlv_9 = ( Token ) match ( input , 19 , FOLLOW_25 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_9 , grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getLessThanSignKeyword_1_4_2_0 ( ) ) ; } } // InternalXbase . g : 5731:6 : ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) // InternalXbase . g : 5732:7 : ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) { // InternalXbase . g : 5732:7 : ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) // InternalXbase . g : 5733:8 : lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_4_2_1_0 ( ) ) ; } pushFollow ( FOLLOW_26 ) ; lv_arguments_10_0 = ruleJvmArgumentTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getJvmParameterizedTypeReferenceRule ( ) ) ; } add ( current , "arguments" , lv_arguments_10_0 , "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalXbase . g : 5750:6 : ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * loop98 : do { int alt98 = 2 ; int LA98_0 = input . LA ( 1 ) ; if ( ( LA98_0 == 48 ) ) { alt98 = 1 ; } switch ( alt98 ) { case 1 : // InternalXbase . g : 5751:7 : otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) { otherlv_11 = ( Token ) match ( input , 48 , FOLLOW_25 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_11 , grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getCommaKeyword_1_4_2_2_0 ( ) ) ; } // InternalXbase . g : 5755:7 : ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) // InternalXbase . g : 5756:8 : ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) { // InternalXbase . g : 5756:8 : ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) // InternalXbase . g : 5757:9 : lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_4_2_2_1_0 ( ) ) ; } pushFollow ( FOLLOW_26 ) ; lv_arguments_12_0 = ruleJvmArgumentTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getJvmParameterizedTypeReferenceRule ( ) ) ; } add ( current , "arguments" , lv_arguments_12_0 , "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : break loop98 ; } } while ( true ) ; otherlv_13 = ( Token ) match ( input , 20 , FOLLOW_70 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_13 , grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getGreaterThanSignKeyword_1_4_2_3 ( ) ) ; } } break ; } } break ; default : break loop100 ; } } while ( true ) ; } break ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class TransactionManager { /** * Replay all logged edits from the given transaction logs . */ private void replayLogs ( Collection < TransactionLog > logs ) { } }
for ( TransactionLog log : logs ) { LOG . info ( "Replaying edits from transaction log " + log . getName ( ) ) ; int editCnt = 0 ; try { TransactionLogReader reader = log . getReader ( ) ; // reader may be null in the case of an empty file if ( reader == null ) { continue ; } TransactionEdit edit = null ; while ( ( edit = reader . next ( ) ) != null ) { editCnt ++ ; switch ( edit . getState ( ) ) { case INPROGRESS : long expiration = edit . getExpiration ( ) ; TransactionType type = edit . getType ( ) ; // Check if transaction needs to be migrated to have expiration and type . Previous version of // long running transactions were represented with expiration time as - 1. // This can be removed when we stop supporting TransactionEditCodecV2. if ( expiration < 0 ) { expiration = getTxExpirationFromWritePointer ( edit . getWritePointer ( ) , defaultLongTimeout ) ; type = TransactionType . LONG ; } else if ( type == null ) { type = TransactionType . SHORT ; } addInProgressAndAdvance ( edit . getWritePointer ( ) , edit . getVisibilityUpperBound ( ) , expiration , type ) ; break ; case COMMITTING : addCommittingChangeSet ( edit . getWritePointer ( ) , edit . getChanges ( ) ) ; break ; case COMMITTED : // TODO : need to reconcile usage of transaction id v / s write pointer TEPHRA - 140 long transactionId = edit . getWritePointer ( ) ; long [ ] checkpointPointers = edit . getCheckpointPointers ( ) ; long writePointer = checkpointPointers == null || checkpointPointers . length == 0 ? transactionId : checkpointPointers [ checkpointPointers . length - 1 ] ; doCommit ( transactionId , writePointer , edit . getChanges ( ) , edit . getCommitPointer ( ) , edit . getCanCommit ( ) ) ; break ; case INVALID : doInvalidate ( edit . getWritePointer ( ) ) ; break ; case ABORTED : type = edit . getType ( ) ; // Check if transaction edit needs to be migrated to have type . Previous versions of // ABORTED edits did not contain type . // This can be removed when we stop supporting TransactionEditCodecV2. if ( type == null ) { InProgressTx inProgressTx = inProgress . get ( edit . getWritePointer ( ) ) ; if ( inProgressTx != null ) { type = inProgressTx . getType ( ) ; } else { // If transaction is not in - progress , then it has either been already aborted or invalidated . // We cannot determine the transaction ' s state based on current information , to be safe invalidate it . LOG . warn ( "Invalidating transaction {} as it's type cannot be determined during replay" , edit . getWritePointer ( ) ) ; doInvalidate ( edit . getWritePointer ( ) ) ; break ; } } doAbort ( edit . getWritePointer ( ) , edit . getCheckpointPointers ( ) , type ) ; break ; case TRUNCATE_INVALID_TX : if ( edit . getTruncateInvalidTxTime ( ) != 0 ) { doTruncateInvalidTxBefore ( edit . getTruncateInvalidTxTime ( ) ) ; } else { doTruncateInvalidTx ( edit . getTruncateInvalidTx ( ) ) ; } break ; case CHECKPOINT : doCheckpoint ( edit . getWritePointer ( ) , edit . getParentWritePointer ( ) ) ; break ; default : // unknown type ! throw new IllegalArgumentException ( "Invalid state for WAL entry: " + edit . getState ( ) ) ; } } } catch ( IOException ioe ) { throw Throwables . propagate ( ioe ) ; } catch ( InvalidTruncateTimeException e ) { throw Throwables . propagate ( e ) ; } LOG . info ( "Read " + editCnt + " edits from log " + log . getName ( ) ) ; }
public class KafkaClient { /** * Removes a topic message listener . * @ param consumerGroupId * @ param topic * @ param messageListener * @ return { @ code true } if successful , { @ code false } otherwise ( the topic * may have no such listener added before ) */ public boolean removeMessageListener ( String consumerGroupId , String topic , IKafkaMessageListener messageListener ) { } }
KafkaMsgConsumer kafkaConsumer = cacheConsumers . get ( consumerGroupId ) ; return kafkaConsumer != null ? kafkaConsumer . removeMessageListener ( topic , messageListener ) : false ;
public class CommerceNotificationTemplateLocalServiceWrapper { /** * Returns the commerce notification template matching the UUID and group . * @ param uuid the commerce notification template ' s UUID * @ param groupId the primary key of the group * @ return the matching commerce notification template * @ throws PortalException if a matching commerce notification template could not be found */ @ Override public com . liferay . commerce . notification . model . CommerceNotificationTemplate getCommerceNotificationTemplateByUuidAndGroupId ( String uuid , long groupId ) throws com . liferay . portal . kernel . exception . PortalException { } }
return _commerceNotificationTemplateLocalService . getCommerceNotificationTemplateByUuidAndGroupId ( uuid , groupId ) ;
public class DataModelFactory { /** * Generates a comment regarding the parameters . * @ param entityId - id of the commented entity * @ param entityType - type of the entity * @ param action - the action performed by the user * @ param commentedText - comment text * @ param user - comment left by * @ param date - date comment was created * @ return - comment entity */ public static Comment createComment ( final String entityId , final String entityType , final String action , final String commentedText , final String user , final Date date ) { } }
final Comment comment = new Comment ( ) ; comment . setEntityId ( entityId ) ; comment . setEntityType ( entityType ) ; comment . setAction ( action ) ; comment . setCommentText ( commentedText ) ; comment . setCommentedBy ( user ) ; comment . setCreatedDateTime ( date ) ; return comment ;
public class GenericDao { /** * 根据查询条件获取结果集列表 * @ param sql * @ param params 无参数时可以为null * @ return */ public List < ENTITY > findBySQL ( String sql , List < ? > params ) { } }
RowMapper < ENTITY > mapper = new GenericMapper < ENTITY , KEY > ( orMapping ) ; return findBySQL ( sql , params , mapper ) ;
public class SchemaTypeChecker { /** * A special check for the magic @ deprecated directive * @ param errors the list of errors * @ param directive the directive to check * @ param errorSupplier the error supplier function */ static void checkDeprecatedDirective ( List < GraphQLError > errors , Directive directive , Supplier < InvalidDeprecationDirectiveError > errorSupplier ) { } }
if ( "deprecated" . equals ( directive . getName ( ) ) ) { // it can have zero args List < Argument > arguments = directive . getArguments ( ) ; if ( arguments . size ( ) == 0 ) { return ; } // but if has more than it must have 1 called " reason " of type StringValue if ( arguments . size ( ) == 1 ) { Argument arg = arguments . get ( 0 ) ; if ( "reason" . equals ( arg . getName ( ) ) && arg . getValue ( ) instanceof StringValue ) { return ; } } // not valid errors . add ( errorSupplier . get ( ) ) ; }
public class SU { /** * Starts a new privilege - escalated environment , execute a closure , and shut it down . */ public static < V , T extends Throwable > V execute ( TaskListener listener , String rootUsername , String rootPassword , final Callable < V , T > closure ) throws T , IOException , InterruptedException { } }
VirtualChannel ch = start ( listener , rootUsername , rootPassword ) ; try { return ch . call ( closure ) ; } finally { ch . close ( ) ; ch . join ( 3000 ) ; // give some time for orderly shutdown , but don ' t block forever . }
public class SARLLabelProvider { /** * Replies the text for the given element . * @ param element the element . * @ return the text . */ @ SuppressWarnings ( "static-method" ) protected StyledString text ( SarlCapacityUses element ) { } }
return new StyledString ( Messages . SARLLabelProvider_0 , StyledString . QUALIFIER_STYLER ) ;
public class PPatternAssistantTC { /** * Get a complete list of all definitions , including duplicates . This method should only be used only by PP */ private List < PDefinition > getAllDefinitions ( PPattern pattern , PType ptype , NameScope scope ) { } }
try { return pattern . apply ( af . getAllDefinitionLocator ( ) , new AllDefinitionLocator . NewQuestion ( ptype , scope ) ) ; } catch ( AnalysisException e ) { return null ; }
public class LocalFileSystemStateManager { /** * Make utils class protected for easy unit testing */ protected ListenableFuture < Boolean > setData ( String path , byte [ ] data , boolean overwrite ) { } }
final SettableFuture < Boolean > future = SettableFuture . create ( ) ; boolean ret = FileUtils . writeToFile ( path , data , overwrite ) ; safeSetFuture ( future , ret ) ; return future ;
public class SmartBinder { /** * Terminate this binder by looking up the named static method on the * given target type . Perform the actual method lookup using the given * Lookup object . * @ param lookup the Lookup to use for handle lookups * @ param target the type on which to find the static method * @ param name the name of the target static method * @ return a SmartHandle with this binder ' s starting signature , bound * to the target method * @ throws NoSuchMethodException if the named method with current signature ' s types does not exist * @ throws IllegalAccessException if the named method is not accessible to the given Lookup */ public SmartHandle invokeStatic ( Lookup lookup , Class < ? > target , String name ) throws NoSuchMethodException , IllegalAccessException { } }
return new SmartHandle ( start , binder . invokeStatic ( lookup , target , name ) ) ;
public class RestBuilder { /** * Creates a provider that delivers type when needed * @ param provider to be executed when needed * @ param < T > provided object as argument * @ return builder */ public < T > RestBuilder addProvider ( Class < T > clazz , Class < ? extends ContextProvider < T > > provider ) { } }
Assert . notNull ( clazz , "Missing provided class type!" ) ; Assert . notNull ( provider , "Missing context provider!" ) ; registeredProviders . put ( clazz , provider ) ; return this ;
public class Attach { /** * { @ inheritDoc } */ public final void validate ( ) throws ValidationException { } }
/* * ; the following is optional , ; but MUST NOT occur more than once ( " ; " fmttypeparam ) / */ ParameterValidator . getInstance ( ) . assertOneOrLess ( Parameter . FMTTYPE , getParameters ( ) ) ; /* * ; the following is optional , ; and MAY occur more than once ( " ; " xparam ) */ /* * If the value type parameter is " ; VALUE = BINARY " , then the inline encoding parameter MUST be specified with the * value " ; ENCODING = BASE64 " . */ if ( Value . BINARY . equals ( getParameter ( Parameter . VALUE ) ) ) { ParameterValidator . getInstance ( ) . assertOne ( Parameter . ENCODING , getParameters ( ) ) ; if ( ! Encoding . BASE64 . equals ( getParameter ( Parameter . ENCODING ) ) ) { throw new ValidationException ( "If the value type parameter is [BINARY], the inline" + "encoding parameter MUST be specified with the value [BASE64]" ) ; } }
public class ZkLeaderElection { /** * 如果参与了选举 , 那么退出主节点选举 * @ deprecated curator的选举算法有问题 , 在最后一个唯一节点 , 同时也是主节点退出选举时 , 它抛出java . lang . InterruptedException 。 * 所以请直接依赖zk断开连接的方式退出节点选举 , 而不是调用本方法来退出选举 */ public static void stop ( ) { } }
synchronized ( lock ) { if ( singleton == null ) return ; LeaderSelector leaderSelector = singleton . leaderSelector ; if ( leaderSelector == null ) { return ; } LOG . info ( "节点退出zk选举" ) ; leaderSelector . close ( ) ; singleton = null ; LOG . info ( "退出选举 完毕" ) ; }
public class LibUtils { /** * Loads the specified library . < br > * < br > * The method will attempt to load the library using the usual * < code > System . loadLibrary < / code > call . In this case , the specified * dependent libraries are ignored , because they are assumed to be * loaded automatically in the same way as the main library . < br > * < br > * If the library can < b > not < / b > be loaded with the * < code > System . loadLibrary < / code > call , then this method will attempt * to load the file as a resource ( usually one that is contained in * a JAR file ) . In this case , the library is assumed to be located * in subdirectory called < code > " / lib " < / code > inside the JAR file . * The method will try to load a resource that has the platform - specific * { @ link # createLibraryFileName ( String ) library file name } from * this directory , extract it into the default directory for temporary * files , and load the library from there . < br > * < br > * In this case , the specified dependent libraries may also be loaded * as resources . They are assumed to be located in subdirectories * that are named according to the { @ link # osString ( ) } and * { @ link # archString ( ) } of the executing platform . For example , such * a library may be located in a directory inside the JAR that is * called < code > " / lib / windows / x86_64 " < / code > . These dependent libraries * will be extracted and loaded before the main library is loaded . * @ param libraryName The name of the library ( without a platform specific * prefix or file extension ) * @ param dependentLibraryNames The names of libraries that the library * to load depends on . If the library is loaded as a resource , then * it will be attempted to also load these libraries as resources , as * described above * @ throws UnsatisfiedLinkError if the native library * could not be loaded . */ public static void loadLibrary ( String libraryName , String ... dependentLibraryNames ) { } }
logger . log ( level , "Loading library: " + libraryName ) ; // First , try to load the specified library as a file // that is visible in the default search path Throwable throwableFromFile ; try { logger . log ( level , "Loading library as a file" ) ; System . loadLibrary ( libraryName ) ; logger . log ( level , "Loading library as a file DONE" ) ; return ; } catch ( Throwable t ) { logger . log ( level , "Loading library as a file FAILED" ) ; throwableFromFile = t ; } // Now try to load the library by extracting the // corresponding resource from the JAR file try { logger . log ( level , "Loading library as a resource" ) ; loadLibraryResource ( LIBRARY_PATH_IN_JAR , libraryName , "" , dependentLibraryNames ) ; logger . log ( level , "Loading library as a resource DONE" ) ; return ; } catch ( Throwable throwableFromResource ) { logger . log ( level , "Loading library as a resource FAILED" , throwableFromResource ) ; StringWriter sw = new StringWriter ( ) ; PrintWriter pw = new PrintWriter ( sw ) ; pw . println ( "Error while loading native library \"" + libraryName + "\"" ) ; pw . println ( "Operating system name: " + System . getProperty ( "os.name" ) ) ; pw . println ( "Architecture : " + System . getProperty ( "os.arch" ) ) ; pw . println ( "Architecture bit size: " + System . getProperty ( "sun.arch.data.model" ) ) ; pw . println ( "---(start of nested stack traces)---" ) ; pw . println ( "Stack trace from the attempt to " + "load the library as a file:" ) ; throwableFromFile . printStackTrace ( pw ) ; pw . println ( "Stack trace from the attempt to " + "load the library as a resource:" ) ; throwableFromResource . printStackTrace ( pw ) ; pw . println ( "---(end of nested stack traces)---" ) ; pw . close ( ) ; throw new UnsatisfiedLinkError ( sw . toString ( ) ) ; }
public class AnnotatedTypes { /** * Returns the declaring { @ link AnnotatedType } of a given annotated . * For an { @ link AnnotatedMember } , { @ link AnnotatedMember # getDeclaringType ( ) } is returned . * For an { @ link AnnotatedParameter } , the declaring annotated type of { @ link AnnotatedParameter # getDeclaringCallable ( ) } is returned . * If the parameter is an { @ link AnnotatedType } , it is returned . * @ throws IllegalArgumentException if the annotated parameter is an unknown non - standard { @ link Annotated } subclass . */ public static AnnotatedType < ? > getDeclaringAnnotatedType ( Annotated annotated ) { } }
if ( annotated == null ) { throw new IllegalArgumentException ( "Annotated cannot be null" ) ; } if ( annotated instanceof AnnotatedType < ? > ) { return cast ( annotated ) ; } if ( annotated instanceof AnnotatedMember < ? > ) { return Reflections . < AnnotatedMember < ? > > cast ( annotated ) . getDeclaringType ( ) ; } if ( annotated instanceof AnnotatedParameter < ? > ) { return getDeclaringAnnotatedType ( Reflections . < AnnotatedParameter < ? > > cast ( annotated ) . getDeclaringCallable ( ) ) ; } throw new IllegalArgumentException ( "Unrecognized annotated " + annotated ) ;
public class NameSpaceBinderImpl { /** * Creates the binding name used in the java : global lookup . The format is * < app > / < module > / < ejbname > [ ! < fully qualified interface name ] for modules in an application and * < module > / < ejbname > [ ! < fully qualified interface name ] for stand alone modules . * @ param name The EJB name * @ return the key for the lookup . */ private String buildJavaGlobalName ( String name ) { } }
StringBuffer bindingName = new StringBuffer ( ) ; if ( ! moduleMetaData . getEJBApplicationMetaData ( ) . isStandaloneModule ( ) ) { bindingName . append ( moduleMetaData . getEJBApplicationMetaData ( ) . getLogicalName ( ) ) ; bindingName . append ( "/" ) ; } bindingName . append ( moduleMetaData . ivLogicalName ) ; bindingName . append ( "/" ) ; bindingName . append ( name ) ; return bindingName . toString ( ) ;
public class BaseBundleActivator { /** * Copy all the values from one dictionary to another . * @ param sourceDictionary * @ param destDictionary * @ return */ public static Dictionary < String , String > putAll ( Dictionary < String , String > sourceDictionary , Dictionary < String , String > destDictionary ) { } }
if ( destDictionary == null ) destDictionary = new Hashtable < String , String > ( ) ; if ( sourceDictionary != null ) { Enumeration < String > keys = sourceDictionary . keys ( ) ; while ( keys . hasMoreElements ( ) ) { String key = keys . nextElement ( ) ; destDictionary . put ( key , sourceDictionary . get ( key ) ) ; } } return destDictionary ;
public class EventHubConnectionsInner { /** * Returns an Event Hub connection . * @ param resourceGroupName The name of the resource group containing the Kusto cluster . * @ param clusterName The name of the Kusto cluster . * @ param databaseName The name of the database in the Kusto cluster . * @ param eventHubConnectionName The name of the event hub connection . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the EventHubConnectionInner object */ public Observable < EventHubConnectionInner > getAsync ( String resourceGroupName , String clusterName , String databaseName , String eventHubConnectionName ) { } }
return getWithServiceResponseAsync ( resourceGroupName , clusterName , databaseName , eventHubConnectionName ) . map ( new Func1 < ServiceResponse < EventHubConnectionInner > , EventHubConnectionInner > ( ) { @ Override public EventHubConnectionInner call ( ServiceResponse < EventHubConnectionInner > response ) { return response . body ( ) ; } } ) ;
public class SSTableExport { /** * Enumerate row keys from an SSTableReader and write the result to a PrintStream . * @ param desc the descriptor of the file to export the rows from * @ param outs PrintStream to write the output to * @ param metadata Metadata to print keys in a proper format * @ throws IOException on failure to read / write input / output */ public static void enumeratekeys ( Descriptor desc , PrintStream outs , CFMetaData metadata ) throws IOException { } }
KeyIterator iter = new KeyIterator ( desc ) ; try { DecoratedKey lastKey = null ; while ( iter . hasNext ( ) ) { DecoratedKey key = iter . next ( ) ; // validate order of the keys in the sstable if ( lastKey != null && lastKey . compareTo ( key ) > 0 ) throw new IOException ( "Key out of order! " + lastKey + " > " + key ) ; lastKey = key ; outs . println ( metadata . getKeyValidator ( ) . getString ( key . getKey ( ) ) ) ; checkStream ( outs ) ; // flushes } } finally { iter . close ( ) ; }
public class EncryptionProviderFactory { /** * Method which will return a new instance of { @ link EncryptionProvider } * based on the settings of the key passed in . * @ param key * @ return * @ return * @ throws UnsupportedAlgorithmException * @ throws UnsupportedKeySizeException */ public static EncryptionProvider getProvider ( final Key key ) throws UnsupportedAlgorithmException , UnsupportedKeySizeException { } }
String algorithm = key . getAlgorithm ( ) ; SupportedKeyGenAlgorithms keyAlgorithm = getAlgorithm ( algorithm ) ; switch ( keyAlgorithm ) { case AES : return new AESEncryptionProvider ( key ) ; case DES : return new DESEdeEncryptionProvider ( key ) ; default : throw new UnsupportedAlgorithmException ( "Algorithm [" + keyAlgorithm + "] is not supported." ) ; }
public class ModConfigPanel { /** * Never ever call this Method from an event handler for those * two buttons - endless loop ! * @ param newLoopValue * @ since 11.01.2012 */ public void setLoopValue ( int newLoopValue ) { } }
if ( newLoopValue == Helpers . PLAYER_LOOP_DEACTIVATED ) { getPlayerSetUp_fadeOutLoops ( ) . setSelected ( false ) ; getPlayerSetUp_ignoreLoops ( ) . setSelected ( false ) ; } else if ( newLoopValue == Helpers . PLAYER_LOOP_FADEOUT ) { getPlayerSetUp_fadeOutLoops ( ) . setSelected ( true ) ; getPlayerSetUp_ignoreLoops ( ) . setSelected ( false ) ; } else if ( newLoopValue == Helpers . PLAYER_LOOP_IGNORE ) { getPlayerSetUp_fadeOutLoops ( ) . setSelected ( false ) ; getPlayerSetUp_ignoreLoops ( ) . setSelected ( true ) ; }
public class AbstractExecutableMemberWriter { /** * Add the summary link for the member . * @ param context the id of the context where the link will be printed * @ param cd the classDoc that we should link to * @ param member the member being linked to * @ param tdSummary the content tree to which the link will be added */ protected void addSummaryLink ( LinkInfoImpl . Kind context , ClassDoc cd , ProgramElementDoc member , Content tdSummary ) { } }
ExecutableMemberDoc emd = ( ExecutableMemberDoc ) member ; String name = emd . name ( ) ; Content memberLink = HtmlTree . SPAN ( HtmlStyle . memberNameLink , writer . getDocLink ( context , cd , ( MemberDoc ) emd , name , false ) ) ; Content code = HtmlTree . CODE ( memberLink ) ; addParameters ( emd , false , code , name . length ( ) - 1 ) ; tdSummary . addContent ( code ) ;
public class Http2Ping { /** * Registers a callback that is invoked when the ping operation completes . If this ping operation * is already completed , the callback is invoked immediately . * @ param callback the callback to invoke * @ param executor the executor to use */ public void addCallback ( final ClientTransport . PingCallback callback , Executor executor ) { } }
Runnable runnable ; synchronized ( this ) { if ( ! completed ) { callbacks . put ( callback , executor ) ; return ; } // otherwise , invoke callback immediately ( but not while holding lock ) runnable = this . failureCause != null ? asRunnable ( callback , failureCause ) : asRunnable ( callback , roundTripTimeNanos ) ; } doExecute ( executor , runnable ) ;
public class PolicyDefinitionsInner { /** * Creates or updates a policy definition at management group level . * @ param policyDefinitionName The name of the policy definition to create . * @ param managementGroupId The ID of the management group . * @ param parameters The policy definition properties . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PolicyDefinitionInner object */ public Observable < ServiceResponse < PolicyDefinitionInner > > createOrUpdateAtManagementGroupWithServiceResponseAsync ( String policyDefinitionName , String managementGroupId , PolicyDefinitionInner parameters ) { } }
if ( policyDefinitionName == null ) { throw new IllegalArgumentException ( "Parameter policyDefinitionName is required and cannot be null." ) ; } if ( managementGroupId == null ) { throw new IllegalArgumentException ( "Parameter managementGroupId is required and cannot be null." ) ; } if ( parameters == null ) { throw new IllegalArgumentException ( "Parameter parameters is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } Validator . validate ( parameters ) ; return service . createOrUpdateAtManagementGroup ( policyDefinitionName , managementGroupId , parameters , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < PolicyDefinitionInner > > > ( ) { @ Override public Observable < ServiceResponse < PolicyDefinitionInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PolicyDefinitionInner > clientResponse = createOrUpdateAtManagementGroupDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class NtlmUtil { /** * Generate the ANSI DES hash for the password associated with these credentials . * @ param tc * @ param password * @ param challenge * @ return the calculated response * @ throws GeneralSecurityException */ static public byte [ ] getPreNTLMResponse ( CIFSContext tc , String password , byte [ ] challenge ) throws GeneralSecurityException { } }
byte [ ] p14 = new byte [ 14 ] ; byte [ ] p21 = new byte [ 21 ] ; byte [ ] p24 = new byte [ 24 ] ; byte [ ] passwordBytes = Strings . getOEMBytes ( password , tc . getConfig ( ) ) ; int passwordLength = passwordBytes . length ; // Only encrypt the first 14 bytes of the password for Pre 0.12 NT LM if ( passwordLength > 14 ) { passwordLength = 14 ; } System . arraycopy ( passwordBytes , 0 , p14 , 0 , passwordLength ) ; NtlmUtil . E ( p14 , NtlmUtil . S8 , p21 ) ; NtlmUtil . E ( p21 , challenge , p24 ) ; return p24 ;
public class PluginSqlMapDao { /** * used in tests */ @ Override public void deleteAllPlugins ( ) { } }
transactionTemplate . execute ( new TransactionCallbackWithoutResult ( ) { @ Override protected void doInTransactionWithoutResult ( TransactionStatus status ) { sessionFactory . getCurrentSession ( ) . createQuery ( "DELETE FROM " + Plugin . class . getSimpleName ( ) ) . executeUpdate ( ) ; } } ) ;
public class DataSetLookupEditor { /** * View notifications */ void onDataSetSelected ( ) { } }
String selectedUUID = view . getSelectedDataSetId ( ) ; for ( DataSetDef dataSetDef : _dataSetDefList ) { if ( dataSetDef . getUUID ( ) . equals ( selectedUUID ) ) { fetchMetadata ( selectedUUID , new RemoteCallback < DataSetMetadata > ( ) { public void callback ( DataSetMetadata metadata ) { dataSetLookup = lookupConstraints . newDataSetLookup ( metadata ) ; updateDataSetLookup ( ) ; changeEvent . fire ( new DataSetLookupChangedEvent ( dataSetLookup ) ) ; } } ) ; } }
public class PartitionLevelWatermarker { /** * Return the previous high watermark if found in previous state . Else returns 0 * { @ inheritDoc } * @ see org . apache . gobblin . data . management . conversion . hive . watermarker . HiveSourceWatermarker # getPreviousHighWatermark ( org . apache . hadoop . hive . ql . metadata . Partition ) */ @ Override public LongWatermark getPreviousHighWatermark ( Partition partition ) { } }
if ( this . previousWatermarks . hasPartitionWatermarks ( tableKey ( partition . getTable ( ) ) ) ) { // If partition has a watermark return . if ( this . previousWatermarks . get ( tableKey ( partition . getTable ( ) ) ) . containsKey ( partitionKey ( partition ) ) ) { return new LongWatermark ( this . previousWatermarks . getPartitionWatermark ( tableKey ( partition . getTable ( ) ) , partitionKey ( partition ) ) ) ; } } return new LongWatermark ( 0 ) ;
public class Matrix3d { /** * / * ( non - Javadoc ) * @ see org . joml . Matrix3dc # mulComponentWise ( org . joml . Matrix3dc , org . joml . Matrix3d ) */ public Matrix3d mulComponentWise ( Matrix3dc other , Matrix3d dest ) { } }
dest . m00 = m00 * other . m00 ( ) ; dest . m01 = m01 * other . m01 ( ) ; dest . m02 = m02 * other . m02 ( ) ; dest . m10 = m10 * other . m10 ( ) ; dest . m11 = m11 * other . m11 ( ) ; dest . m12 = m12 * other . m12 ( ) ; dest . m20 = m20 * other . m20 ( ) ; dest . m21 = m21 * other . m21 ( ) ; dest . m22 = m22 * other . m22 ( ) ; return dest ;
public class Zipper { /** * Write a JSON Array . * @ param jsonarray The JSONArray to write . * @ throws JSONException If the write fails . */ private void write ( JSONArray jsonarray ) throws JSONException { } }
// JSONzip has three encodings for arrays : // The array is empty ( zipEmptyArray ) . // First value in the array is a string ( zipArrayString ) . // First value in the array is not a string ( zipArrayValue ) . boolean stringy = false ; int length = jsonarray . length ( ) ; if ( length == 0 ) { write ( zipEmptyArray , 3 ) ; } else { Object value = jsonarray . get ( 0 ) ; if ( value == null ) { value = JSONObject . NULL ; } if ( value instanceof String ) { stringy = true ; write ( zipArrayString , 3 ) ; writeString ( ( String ) value ) ; } else { write ( zipArrayValue , 3 ) ; writeValue ( value ) ; } for ( int i = 1 ; i < length ; i += 1 ) { if ( probe ) { log ( ) ; } value = jsonarray . get ( i ) ; if ( value == null ) { value = JSONObject . NULL ; } if ( value instanceof String != stringy ) { zero ( ) ; } one ( ) ; if ( value instanceof String ) { writeString ( ( String ) value ) ; } else { writeValue ( value ) ; } } zero ( ) ; zero ( ) ; }
public class WebservicesType { /** * { @ inheritDoc } */ @ Override public void describe ( Diagnostics diag ) { } }
super . describe ( diag ) ; diag . describe ( "version" , version ) ; diag . describeIfSet ( "webservice-description" , this . webservice_descriptions ) ;
public class SimpleMatrix { /** * Creates a new vector which is drawn from a multivariate normal distribution with zero mean * and the provided covariance . * @ see CovarianceRandomDraw _ DDRM * @ param covariance Covariance of the multivariate normal distribution * @ return Vector randomly drawn from the distribution */ public static SimpleMatrix randomNormal ( SimpleMatrix covariance , Random random ) { } }
SimpleMatrix found = new SimpleMatrix ( covariance . numRows ( ) , 1 , covariance . getType ( ) ) ; switch ( found . getType ( ) ) { case DDRM : { CovarianceRandomDraw_DDRM draw = new CovarianceRandomDraw_DDRM ( random , ( DMatrixRMaj ) covariance . getMatrix ( ) ) ; draw . next ( ( DMatrixRMaj ) found . getMatrix ( ) ) ; } break ; case FDRM : { CovarianceRandomDraw_FDRM draw = new CovarianceRandomDraw_FDRM ( random , ( FMatrixRMaj ) covariance . getMatrix ( ) ) ; draw . next ( ( FMatrixRMaj ) found . getMatrix ( ) ) ; } break ; default : throw new IllegalArgumentException ( "Matrix type is currently not supported" ) ; } return found ;
public class BufferUtils { public static void writeTo ( ByteBuffer buffer , OutputStream out ) throws IOException { } }
if ( buffer . hasArray ( ) ) { out . write ( buffer . array ( ) , buffer . arrayOffset ( ) + buffer . position ( ) , buffer . remaining ( ) ) ; // update buffer position , in way similar to non - array version of writeTo buffer . position ( buffer . position ( ) + buffer . remaining ( ) ) ; } else { byte [ ] bytes = new byte [ TEMP_BUFFER_SIZE ] ; while ( buffer . hasRemaining ( ) ) { int byteCountToWrite = Math . min ( buffer . remaining ( ) , TEMP_BUFFER_SIZE ) ; buffer . get ( bytes , 0 , byteCountToWrite ) ; out . write ( bytes , 0 , byteCountToWrite ) ; } }
public class AlluxioJobMasterProcess { /** * Starts the gRPC server . The AlluxioMaster registers the Services of registered * { @ link Master } s and meta services . */ protected void startServingRPCServer ( ) { } }
try { stopRejectingRpcServer ( ) ; LOG . info ( "Starting gRPC server on address {}" , mRpcBindAddress ) ; GrpcServerBuilder serverBuilder = GrpcServerBuilder . forAddress ( mRpcConnectAddress . getHostName ( ) , mRpcBindAddress , ServerConfiguration . global ( ) ) ; registerServices ( serverBuilder , mJobMaster . getServices ( ) ) ; mGrpcServer = serverBuilder . build ( ) . start ( ) ; LOG . info ( "Started gRPC server on address {}" , mRpcConnectAddress ) ; // Wait until the server is shut down . mGrpcServer . awaitTermination ( ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; }
public class InboundTransmissionParser { /** * Invoked to parse a conversation header structure from the supplied buffer . * May be invoked multiple times to incrementally parse the structure . * Once the structure has been fully parsed , transitions the state machine * into the appropriate next state based on the layout of the transmission . * @ param contextBuffer */ private void parseConversationHeader ( WsByteBuffer contextBuffer ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "parseConversationHeader" , contextBuffer ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) JFapUtils . debugTraceWsByteBufferInfo ( this , tc , contextBuffer , "contextBuffer" ) ; WsByteBuffer parseConversationBuffer = readData ( contextBuffer , unparsedConversationHeader ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) JFapUtils . debugTraceWsByteBufferInfo ( this , tc , parseConversationBuffer , "parseConversationBuffer" ) ; if ( parseConversationBuffer != null ) { conversationHeaderFields . conversationId = parseConversationBuffer . getShort ( ) ; conversationHeaderFields . requestNumber = parseConversationBuffer . getShort ( ) ; transmissionPayloadRemaining -= JFapChannelConstants . SIZEOF_CONVERSATION_HEADER ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "conversationId:" + conversationHeaderFields . conversationId + " requestNumber:" + conversationHeaderFields . requestNumber ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "transmissionLayout:" + transmissionLayout ) ; if ( transmissionLayout == JFapChannelConstants . XMIT_CONVERSATION ) { transmissionPayloadDataLength = primaryHeaderFields . segmentLength - ( JFapChannelConstants . SIZEOF_PRIMARY_HEADER + JFapChannelConstants . SIZEOF_CONVERSATION_HEADER ) ; state = STATE_PARSE_CONVERSATION_PAYLOAD ; } else if ( transmissionLayout == JFapChannelConstants . XMIT_SEGMENT_MIDDLE ) { transmissionPayloadDataLength = primaryHeaderFields . segmentLength - ( JFapChannelConstants . SIZEOF_PRIMARY_HEADER + JFapChannelConstants . SIZEOF_CONVERSATION_HEADER ) ; state = STATE_PARSE_SEGMENT_MIDDLE_PAYLOAD ; } else if ( transmissionLayout == JFapChannelConstants . XMIT_SEGMENT_END ) { transmissionPayloadDataLength = primaryHeaderFields . segmentLength - ( JFapChannelConstants . SIZEOF_PRIMARY_HEADER + JFapChannelConstants . SIZEOF_CONVERSATION_HEADER ) ; state = STATE_PARSE_SEGMENT_END_PAYLOAD ; } else if ( transmissionLayout == JFapChannelConstants . XMIT_SEGMENT_START ) { state = STATE_PARSING_SEGMENT_START_HEADER ; } else if ( transmissionLayout == JFapChannelConstants . XMIT_PRIMARY_ONLY ) { throwable = new SIErrorException ( nls . getFormattedMessage ( "TRANSPARSER_INTERNAL_SICJ0054" , null , "TRANSPARSER_INTERNAL_SICJ0054" ) ) ; // D226223 // This FFDC was generated because we entered the method to parse conversation // headers when the transmission appears to only have a primary header . FFDCFilter . processException ( throwable , "com.ibm.ws.sib.jfapchannel.impl.InboundTransmissionParser" , JFapChannelConstants . INBOUNDXMITPARSER_PARSECONVHDR_01 , getFormattedBytes ( contextBuffer ) ) ; // D267629 state = STATE_ERROR ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Invalid state detected - Entered parseConversationHeader method when transmission only contained a primary header" ) ; } else if ( transmissionLayout == JFapChannelConstants . XMIT_LAYOUT_UNKNOWN ) { throwable = new SIErrorException ( nls . getFormattedMessage ( "TRANSPARSER_INTERNAL_SICJ0054" , null , "TRANSPARSER_INTERNAL_SICJ0054" ) ) ; // D226223 // This FFDC was generated because we encountered a segment type for which // we were unable to determine the layout of the segment . FFDCFilter . processException ( throwable , "com.ibm.ws.sib.jfapchannel.impl.InboundTransmissionParser" , JFapChannelConstants . INBOUNDXMITPARSER_PARSECONVHDR_02 , getFormattedBytes ( contextBuffer ) ) ; // D267629 state = STATE_ERROR ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Invalid state detected - Entered parseConversationHeader method when transmission layout could not be determined" ) ; } else { throwable = new SIErrorException ( nls . getFormattedMessage ( "TRANSPARSER_INTERNAL_SICJ0054" , null , "TRANSPARSER_INTERNAL_SICJ0054" ) ) ; // D226223 // This FFDC was generated because the method used to determine transmission // layout returned a value that we were not expecting . FFDCFilter . processException ( throwable , "com.ibm.ws.sib.jfapchannel.impl.InboundTransmissionParser" , JFapChannelConstants . INBOUNDXMITPARSER_PARSECONVHDR_03 , getFormattedBytes ( contextBuffer ) ) ; // D267629 state = STATE_ERROR ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Unknown transmission layout detected in parseConversationHeader" ) ; } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "need more data" ) ; needMoreData = true ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "parseConversationHeader" ) ;
public class PippoSettings { /** * Override the setting at runtime with the specified value . * This change does not persist . * @ param name * @ param value */ public void overrideSetting ( String name , float value ) { } }
overrides . put ( name , Float . toString ( value ) ) ;
public class TemplateFilter { /** * Method allow to find templates that contains some substring in name . * Filtering is case insensitive . * @ param names is not null list of name substrings * @ return { @ link TemplateFilter } * @ throws java . lang . NullPointerException if { @ code names } is null */ public TemplateFilter nameContains ( String ... names ) { } }
allItemsNotNull ( names , "Name substrings" ) ; predicate = predicate . and ( combine ( TemplateMetadata :: getName , in ( asList ( names ) , Predicates :: containsIgnoreCase ) ) ) ; return this ;
public class BaseNeo4jEntityQueries { /** * Example : * MATCH ( owner : ENTITY : Car { ` carId . maker ` : { 0 } , ` carId . model ` : { 1 } } ) < - [ r : tires ] - ( target ) * OPTIONAL MATCH ( target ) - [ x * 1 . . ] - > ( e : EMBEDDED ) * RETURN id ( target ) , extract ( n IN x | type ( n ) ) , x , e ORDER BY id ( target ) * or for embedded associations : * MATCH ( owner : ENTITY : StoryGame { id : { 0 } } ) - [ : evilBranch ] - > ( : EMBEDDED ) - [ r : additionalEndings ] - > ( target : EMBEDDED ) * OPTIONAL MATCH ( target ) - [ x * 1 . . ] - > ( e : EMBEDDED ) * RETURN id ( target ) , extract ( n IN x | type ( n ) ) , x , e ORDER BY id ( target ) */ protected String getFindAssociationTargetEmbeddedValues ( String relationshipType , AssociationKeyMetadata associationKeyMetadata ) { } }
StringBuilder queryBuilder = findAssociationPartialQuery ( relationshipType , associationKeyMetadata ) ; queryBuilder . append ( "OPTIONAL MATCH (target) -[x*1..]->(e:EMBEDDED) " ) ; // Should we split this in two Queries ? queryBuilder . append ( "RETURN id(target), extract(n IN x| type(n)), x, e ORDER BY id(target)" ) ; return queryBuilder . toString ( ) ;
public class FileColumn { /** * Returns the data type for the given value . * @ param value The data type value * @ return The code for the data type */ private short getDataType ( String value ) { } }
short ret = STRING_TYPE ; if ( value . equals ( "number" ) ) ret = NUMBER_TYPE ; else if ( value . equals ( "integer" ) ) ret = INTEGER_TYPE ; else if ( value . equals ( "decimal" ) ) ret = DECIMAL_TYPE ; else if ( value . equals ( "seconds" ) ) ret = SECONDS_TYPE ; else if ( value . equals ( "datetime" ) ) ret = DATETIME_TYPE ; else if ( value . equals ( "boolean" ) ) ret = BOOLEAN_TYPE ; return ret ;
public class SysViewWorkspaceInitializer { /** * { @ inheritDoc } */ public NodeData initWorkspace ( ) throws RepositoryException { } }
if ( isWorkspaceInitialized ( ) ) { return ( NodeData ) dataManager . getItemData ( Constants . ROOT_UUID ) ; } long start = System . currentTimeMillis ( ) ; isRestoreInProgress = true ; try { doRestore ( ) ; } catch ( Throwable e ) // NOSONAR { throw new RepositoryException ( e ) ; } finally { isRestoreInProgress = false ; } final NodeData root = ( NodeData ) dataManager . getItemData ( Constants . ROOT_UUID ) ; LOG . info ( "Workspace [" + workspaceName + "] restored from storage " + restorePath + " in " + ( System . currentTimeMillis ( ) - start ) * 1d / 1000 + "sec" ) ; return root ;
public class SimpleCycleBasis { private void minimize ( int startIndex ) { } }
if ( isMinimized ) return ; // Implementation of " Algorithm 1 " from [ BGdV04] boolean [ ] [ ] a = getCycleEdgeIncidenceMatrix ( ) ; for ( int i = startIndex ; i < cycles . size ( ) ; i ++ ) { // " Subroutine 2" // Construct kernel vector u boolean [ ] u = constructKernelVector ( edgeList . size ( ) , a , i ) ; // Construct auxiliary graph gu AuxiliaryGraph gu = new AuxiliaryGraph ( graph , u ) ; SimpleCycle shortestCycle = cycles . get ( i ) ; Iterator vertexIterator = graph . vertexSet ( ) . iterator ( ) ; while ( vertexIterator . hasNext ( ) ) { Object vertex = vertexIterator . next ( ) ; // check if the vertex is incident to an edge with u [ edge ] = = 1 boolean shouldSearchCycle = false ; Collection incidentEdges = graph . edgesOf ( vertex ) ; Iterator edgeIterator = incidentEdges . iterator ( ) ; while ( edgeIterator . hasNext ( ) ) { Edge edge = ( Edge ) edgeIterator . next ( ) ; int index = getEdgeIndex ( edge ) ; if ( u [ index ] ) { shouldSearchCycle = true ; break ; } } if ( shouldSearchCycle ) { Object auxVertex0 = gu . auxVertex0 ( vertex ) ; Object auxVertex1 = gu . auxVertex1 ( vertex ) ; // Search for shortest path List auxPath = BFSShortestPath . findPathBetween ( gu , auxVertex0 , auxVertex1 ) ; List edgesOfNewCycle = new Vector ( ) ; Object v = vertex ; edgeIterator = auxPath . iterator ( ) ; while ( edgeIterator . hasNext ( ) ) { Edge auxEdge = ( Edge ) edgeIterator . next ( ) ; // Get the edge corresponding to the aux . edge Edge e = ( Edge ) gu . edge ( auxEdge ) ; if ( edgesOfNewCycle . contains ( e ) ) { edgesOfNewCycle . remove ( e ) ; } else { edgesOfNewCycle . add ( e ) ; } // Get next vertex on path v = e . oppositeVertex ( v ) ; } SimpleCycle newCycle = new SimpleCycle ( graph , edgesOfNewCycle ) ; if ( newCycle . weight ( ) < shortestCycle . weight ( ) ) { shortestCycle = newCycle ; } } } cycles . set ( i , shortestCycle ) ; // insert the new cycle into the matrix for ( int j = 0 ; j < edgeList . size ( ) ; j ++ ) { a [ i ] [ j ] = shortestCycle . containsEdge ( ( Edge ) edgeList . get ( j ) ) ; } // perform gaussian elimination on the inserted row for ( int j = 0 ; j < i ; j ++ ) { if ( a [ i ] [ j ] ) { for ( int k = 0 ; k < edgeList . size ( ) ; k ++ ) { a [ i ] [ k ] = ( a [ i ] [ k ] != a [ j ] [ k ] ) ; } } } } isMinimized = true ; // System . out . println ( " after minimization : " ) ; // printIncidenceMatrix ( ) ;
public class JSONAssert { /** * Asserts that the JSONArray provided does not match the expected string . If it is it throws an * { @ link AssertionError } . * @ param message Error message to be displayed in case of assertion failure * @ param expectedStr Expected JSON string * @ param actualStr String to compare * @ param compareMode Specifies which comparison mode to use * @ throws JSONException JSON parsing error */ public static void assertNotEquals ( String message , String expectedStr , String actualStr , JSONCompareMode compareMode ) throws JSONException { } }
JSONCompareResult result = JSONCompare . compareJSON ( expectedStr , actualStr , compareMode ) ; if ( result . passed ( ) ) { throw new AssertionError ( getCombinedMessage ( message , result . getMessage ( ) ) ) ; }
public class CoreDocumentSynchronizationConfig { /** * Sets that there are some pending writes that occurred at a time for an associated * locally emitted change event . This variant maintains the last version set . * @ param atTime the time at which the write occurred . * @ param changeEvent the description of the write / change . */ public void setSomePendingWritesAndSave ( final long atTime , final ChangeEvent < BsonDocument > changeEvent ) { } }
docLock . writeLock ( ) . lock ( ) ; try { // if we were frozen if ( isPaused ) { // unfreeze the document due to the local write setPaused ( false ) ; // and now the unfrozen document is now stale setStale ( true ) ; } this . lastUncommittedChangeEvent = coalesceChangeEvents ( this . lastUncommittedChangeEvent , changeEvent ) ; this . lastResolution = atTime ; docsColl . replaceOne ( getDocFilter ( namespace , documentId ) , this ) ; } finally { docLock . writeLock ( ) . unlock ( ) ; }
public class Group { /** * The keys that are included in this group . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setKeys ( java . util . Collection ) } or { @ link # withKeys ( java . util . Collection ) } if you want to override the * existing values . * @ param keys * The keys that are included in this group . * @ return Returns a reference to this object so that method calls can be chained together . */ public Group withKeys ( String ... keys ) { } }
if ( this . keys == null ) { setKeys ( new java . util . ArrayList < String > ( keys . length ) ) ; } for ( String ele : keys ) { this . keys . add ( ele ) ; } return this ;
public class Expressions { /** * Create a new NumberExpression * @ param value Number * @ return new NumberExpression */ public static < T extends Number & Comparable < ? > > NumberExpression < T > asNumber ( T value ) { } }
return asNumber ( constant ( value ) ) ;
public class PTBConstituent { /** * setter for ref - sets Th reference from the null constituent to the corresponding lexicalized constituent , O * @ generated * @ param v value to set into the feature */ public void setRef ( Constituent v ) { } }
if ( PTBConstituent_Type . featOkTst && ( ( PTBConstituent_Type ) jcasType ) . casFeat_ref == null ) jcasType . jcas . throwFeatMissing ( "ref" , "de.julielab.jules.types.PTBConstituent" ) ; jcasType . ll_cas . ll_setRefValue ( addr , ( ( PTBConstituent_Type ) jcasType ) . casFeatCode_ref , jcasType . ll_cas . ll_getFSRef ( v ) ) ;