signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class SepaUtil { /** * Fuegt einen Index in den Property - Key ein . Wurde kein Index angegeben , wird der Key * unveraendert zurueckgeliefert . * @ param key Key , der mit einem Index ergaenzt werden soll * @ param index Index oder { @ code null } , wenn kein Index gesetzt werden soll * @ return Key mit Index */ public static String insertIndex ( String key , Integer index ) { } }
if ( index == null ) return key ; int pos = key . indexOf ( '.' ) ; if ( pos >= 0 ) { return key . substring ( 0 , pos ) + '[' + index + ']' + key . substring ( pos ) ; } else { return key + '[' + index + ']' ; }
public class ConfigClient { /** * Creates a sink that exports specified log entries to a destination . The export of * newly - ingested log entries begins immediately , unless the sink ' s ` writer _ identity ` is not * permitted to write to the destination . A sink can export log entries only from the resource * owning the sink . * < p > Sample code : * < pre > < code > * try ( ConfigClient configClient = ConfigClient . create ( ) ) { * ParentName parent = ProjectName . of ( " [ PROJECT ] " ) ; * LogSink sink = LogSink . newBuilder ( ) . build ( ) ; * LogSink response = configClient . createSink ( parent . toString ( ) , sink ) ; * < / code > < / pre > * @ param parent Required . The resource in which to create the sink : * < p > " projects / [ PROJECT _ ID ] " " organizations / [ ORGANIZATION _ ID ] " * " billingAccounts / [ BILLING _ ACCOUNT _ ID ] " " folders / [ FOLDER _ ID ] " * < p > Examples : ` " projects / my - logging - project " ` , ` " organizations / 123456789 " ` . * @ param sink Required . The new sink , whose ` name ` parameter is a sink identifier that is not * already in use . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final LogSink createSink ( String parent , LogSink sink ) { } }
CreateSinkRequest request = CreateSinkRequest . newBuilder ( ) . setParent ( parent ) . setSink ( sink ) . build ( ) ; return createSink ( request ) ;
public class BsSearchLogCA { public void filter ( String name , EsAbstractConditionQuery . OperatorCall < BsSearchLogCQ > queryLambda , ConditionOptionCall < FilterAggregationBuilder > opLambda , OperatorCall < BsSearchLogCA > aggsLambda ) { } }
SearchLogCQ cq = new SearchLogCQ ( ) ; if ( queryLambda != null ) { queryLambda . callback ( cq ) ; } FilterAggregationBuilder builder = regFilterA ( name , cq . getQuery ( ) ) ; if ( opLambda != null ) { opLambda . callback ( builder ) ; } if ( aggsLambda != null ) { SearchLogCA ca = new SearchLogCA ( ) ; aggsLambda . callback ( ca ) ; ca . getAggregationBuilderList ( ) . forEach ( builder :: subAggregation ) ; }
public class ServicesInner { /** * Get DMS Service Instance . * The services resource is the top - level resource that represents the Data Migration Service . The GET method retrieves information about a service instance . * @ param groupName Name of the resource group * @ param serviceName Name of the service * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ApiErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the DataMigrationServiceInner object if successful . */ public DataMigrationServiceInner getByResourceGroup ( String groupName , String serviceName ) { } }
return getByResourceGroupWithServiceResponseAsync ( groupName , serviceName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class LdapAdapter { @ Override protected Scalar transform ( ActivityContext context , Scalar scalar ) throws ActivityException { } }
if ( ! ( scalar . get ( ) instanceof LdapQuery ) ) { throw new ActivityException ( "Input to LdapAdapter must be an LDAP query" ) ; } LdapQuery query = ( LdapQuery ) scalar . get ( ) ; logger . trace ( "running LDAP query: '{}'" , query . toString ( ) ) ; return null ;
public class Matrix3D { /** * Calculate the determinant of a 3x3 matrix . * @ return result */ private double determinant3x3 ( double t00 , double t01 , double t02 , double t10 , double t11 , double t12 , double t20 , double t21 , double t22 ) { } }
return ( t00 * ( t11 * t22 - t12 * t21 ) + t01 * ( t12 * t20 - t10 * t22 ) + t02 * ( t10 * t21 - t11 * t20 ) ) ;
public class Weeks { /** * Obtains an instance of < code > Weeks < / code > that may be cached . * < code > Weeks < / code > is immutable , so instances can be cached and shared . * This factory method provides access to shared instances . * @ param weeks the number of weeks to obtain an instance for * @ return the instance of Weeks */ public static Weeks weeks ( int weeks ) { } }
switch ( weeks ) { case 0 : return ZERO ; case 1 : return ONE ; case 2 : return TWO ; case 3 : return THREE ; case Integer . MAX_VALUE : return MAX_VALUE ; case Integer . MIN_VALUE : return MIN_VALUE ; default : return new Weeks ( weeks ) ; }
public class WhitespaceLexer { /** * Refills the input buffer . * @ return < code > false < / code > , iff there was new input . * @ exception java . io . IOException if any I / O - Error occurs */ private boolean zzRefill ( ) throws java . io . IOException { } }
/* first : make room ( if you can ) */ if ( zzStartRead > 0 ) { System . arraycopy ( zzBuffer , zzStartRead , zzBuffer , 0 , zzEndRead - zzStartRead ) ; /* translate stored positions */ zzEndRead -= zzStartRead ; zzCurrentPos -= zzStartRead ; zzMarkedPos -= zzStartRead ; zzStartRead = 0 ; } /* is the buffer big enough ? */ if ( zzCurrentPos >= zzBuffer . length ) { /* if not : blow it up */ char newBuffer [ ] = new char [ zzCurrentPos * 2 ] ; System . arraycopy ( zzBuffer , 0 , newBuffer , 0 , zzBuffer . length ) ; zzBuffer = newBuffer ; } /* finally : fill the buffer with new input */ int numRead = zzReader . read ( zzBuffer , zzEndRead , zzBuffer . length - zzEndRead ) ; if ( numRead > 0 ) { zzEndRead += numRead ; return false ; } // unlikely but not impossible : read 0 characters , but not at end of stream if ( numRead == 0 ) { int c = zzReader . read ( ) ; if ( c == - 1 ) { return true ; } else { zzBuffer [ zzEndRead ++ ] = ( char ) c ; return false ; } } // numRead < 0 return true ;
public class Graphics { /** * Returns the array normalized from 0-255 to 0-1.0. */ private static float [ ] normalize ( float [ ] in ) { } }
float [ ] out = new float [ in . length ] ; for ( int i = 0 ; i < in . length ; i ++ ) { out [ i ] = ( in [ i ] / 255.0f ) ; } return out ;
public class JobTracker { /** * Adds a job to the jobtracker . Make sure that the checks are inplace before * adding a job . This is the core job submission logic * @ param jobId The id for the job submitted which needs to be added */ protected synchronized JobStatus addJob ( JobID jobId , JobInProgress job ) { } }
totalSubmissions ++ ; synchronized ( jobs ) { synchronized ( taskScheduler ) { jobs . put ( job . getProfile ( ) . getJobID ( ) , job ) ; for ( JobInProgressListener listener : jobInProgressListeners ) { try { listener . jobAdded ( job ) ; } catch ( IOException ioe ) { LOG . warn ( "Failed to add and so skipping the job : " + job . getJobID ( ) + ". Exception : " + ioe ) ; } } } } myInstrumentation . submitJob ( job . getJobConf ( ) , jobId ) ; String jobName = job . getJobConf ( ) . getJobName ( ) ; int jobNameLen = 64 ; if ( jobName . length ( ) > jobNameLen ) { jobName = jobName . substring ( 0 , jobNameLen ) ; // Truncate for logging . } LOG . info ( "Job " + jobId + "(" + jobName + ") added successfully for user '" + job . getJobConf ( ) . getUser ( ) + "' to queue '" + job . getJobConf ( ) . getQueueName ( ) + "'" + ", source " + job . getJobConf ( ) . getJobSource ( ) ) ; return job . getStatus ( ) ;
public class EncodingHandler { /** * This should ONLY be called once all training threads have completed * @ return */ public ThresholdAlgorithm getAverageThresholdAlgorithm ( ) { } }
Collection < ThresholdAlgorithm > c = this . allThreadThresholdAlgorithms . values ( ) ; if ( c . isEmpty ( ) ) { return null ; } if ( c . size ( ) == 1 ) { return c . iterator ( ) . next ( ) ; } Iterator < ThresholdAlgorithm > iter = c . iterator ( ) ; ThresholdAlgorithmReducer r = null ; while ( iter . hasNext ( ) ) { ThresholdAlgorithm ta = iter . next ( ) ; if ( r == null ) { r = ta . newReducer ( ) ; } r . add ( ta ) ; } ThresholdAlgorithm ta = r . getFinalResult ( ) ; // Remove the old instances in preparation for use in next epoch , if required thresholdAlgorithm = new ThreadLocal < > ( ) ; allThreadThresholdAlgorithms . clear ( ) ; return ta ;
public class DescribeFleetCapacityRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeFleetCapacityRequest describeFleetCapacityRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeFleetCapacityRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeFleetCapacityRequest . getFleetIds ( ) , FLEETIDS_BINDING ) ; protocolMarshaller . marshall ( describeFleetCapacityRequest . getLimit ( ) , LIMIT_BINDING ) ; protocolMarshaller . marshall ( describeFleetCapacityRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PlatformSummary { /** * The tiers in which the platform runs . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setSupportedTierList ( java . util . Collection ) } or { @ link # withSupportedTierList ( java . util . Collection ) } if * you want to override the existing values . * @ param supportedTierList * The tiers in which the platform runs . * @ return Returns a reference to this object so that method calls can be chained together . */ public PlatformSummary withSupportedTierList ( String ... supportedTierList ) { } }
if ( this . supportedTierList == null ) { setSupportedTierList ( new com . amazonaws . internal . SdkInternalList < String > ( supportedTierList . length ) ) ; } for ( String ele : supportedTierList ) { this . supportedTierList . add ( ele ) ; } return this ;
public class ZapToggleButton { /** * Helper method that takes care to instantiate and add ( as an ItemListener ) the instance variable * { @ code toolTipTextUpdaterOnSelectionStateChange } when a tool tip text that depends on the selection state ( either * selectedToolTipText or disabledSelectedToolTipText ) is not { @ code null } or set to null and remove ( as an ItemListener ) if * it is . * @ see # toolTipTextUpdaterOnSelectionStateChange * @ see # selectedToolTipText * @ see # disabledSelectedToolTipText * @ see # addItemListener ( ItemListener ) * @ see # removeItemListener ( ItemListener ) */ private void addRemoveToolTipTextUpdaterOnSelectionStateChangeAsNeeded ( ) { } }
if ( selectedToolTipText == null && disabledSelectedToolTipText == null ) { if ( toolTipTextUpdaterOnSelectionStateChange != null ) { removeItemListener ( toolTipTextUpdaterOnSelectionStateChange ) ; toolTipTextUpdaterOnSelectionStateChange = null ; } } else if ( toolTipTextUpdaterOnSelectionStateChange == null ) { toolTipTextUpdaterOnSelectionStateChange = new ToolTipTextUpdaterOnSelectionStateChange ( ) ; addItemListener ( toolTipTextUpdaterOnSelectionStateChange ) ; }
public class RestoreManagerImpl { /** * / * ( non - Javadoc ) * @ see org . duracloud . snapshot . service . RestoreManager # cancelRestore ( java . lang . String ) */ @ Override @ Transactional public void cancelRestore ( String restoreId ) throws SnapshotException { } }
this . jobManager . cancelRestore ( restoreId ) ; this . restoreRepo . deleteByRestorationId ( restoreId ) ;
public class HDFSDistributor { /** * Convert a set of SearchProblem objects to Strings of JSON text , writing the array to * the HDFS location given by the HDFS file root . The written file serves as input to the * Mapper tasks ( one Mapper per line in the file , which is also one SearchProblem ) * @ param problems A List of Search Problems to write * @ throws IOException if the file cannot be written to HDFS */ public void writeProblemsToHDFS ( List < Frontier > problems ) throws IOException { } }
FileSystem fs = FileSystem . get ( configuration ) ; log . info ( "hdfsFileRoot = " + hdfsFileRoot ) ; StringBuilder sb = new StringBuilder ( ) ; for ( Frontier problem : problems ) { SCXMLGapper gapper = new SCXMLGapper ( ) ; Map < String , String > decomposition = gapper . decompose ( problem , stateMachineText ) ; String problemString = decomposition . get ( "target" ) + "|" + decomposition . get ( "variables" ) + "|" ; sb . append ( problemString . replace ( "\n" , "" ) . replace ( "\t" , "" ) . replace ( "\r" , "" ) ) ; sb . append ( "\n" ) ; } try ( FSDataOutputStream out = fs . create ( mapperInputFilePath ) ) { out . write ( sb . toString ( ) . getBytes ( ) ) ; } catch ( IOException e ) { log . error ( "Problem writing " + mapperInputFilePath + " prior to MR job execution" ) ; }
public class CmsImportResultList { /** * Adds a single line of the import result to the widget . < p > * @ param leftText the text to display on the left * @ param rightText the text to display on the right * @ param styleName the style which should be applied to the right text */ public void addRow ( String leftText , String rightText , String styleName ) { } }
ensureTable ( ) ; ensureNoEmptyLabel ( ) ; int row = m_table . getRowCount ( ) ; m_table . setWidget ( row , 0 , new Label ( leftText ) ) ; Label rightLabel = new Label ( rightText ) ; rightLabel . addStyleName ( styleName ) ; rightLabel . addStyleName ( RESOURCES . css ( ) . rightLabel ( ) ) ; m_table . setWidget ( row , 1 , rightLabel ) ;
public class ISUPMessageFactoryImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . isup . ISUPMessageFactory # createFOT ( ) */ @ Override public ForwardTransferMessage createFOT ( ) { } }
ForwardTransferMessage msg = new ForwardTransferMessageImpl ( _FOT_HOLDER . mandatoryCodes , _FOT_HOLDER . mandatoryVariableCodes , _FOT_HOLDER . optionalCodes , _FOT_HOLDER . mandatoryCodeToIndex , _FOT_HOLDER . mandatoryVariableCodeToIndex , _FOT_HOLDER . optionalCodeToIndex ) ; return msg ;
public class CucumberITGeneratorByScenario { /** * Generates a Cucumber runner for each scenario , or example in a scenario outline . * @ param outputDirectory the output directory to place generated files * @ param featureFiles The feature files to create runners for * @ throws MojoExecutionException if something goes wrong */ public void generateCucumberITFiles ( final File outputDirectory , final Collection < File > featureFiles ) throws MojoExecutionException { } }
Parser < GherkinDocument > parser = new Parser < GherkinDocument > ( new AstBuilder ( ) ) ; TagPredicate tagPredicate = new TagPredicate ( overriddenParameters . getTags ( ) ) ; TokenMatcher matcher = new TokenMatcher ( ) ; for ( final File file : featureFiles ) { GherkinDocument gherkinDocument = null ; final List < Pickle > acceptedPickles = new ArrayList < Pickle > ( ) ; try { String source = FileUtils . readFileToString ( file ) ; gherkinDocument = parser . parse ( source , matcher ) ; Compiler compiler = new Compiler ( ) ; List < Pickle > pickles = compiler . compile ( gherkinDocument ) ; for ( Pickle pickle : pickles ) { if ( tagPredicate . apply ( pickle . getTags ( ) ) ) { acceptedPickles . add ( pickle ) ; } } } catch ( final IOException e ) { // should never happen // TODO - proper logging System . out . println ( format ( "WARNING: Failed to parse '%s'...IGNORING" , file . getName ( ) ) ) ; } for ( Pickle pickle : acceptedPickles ) { int locationIndex = pickle . getLocations ( ) . size ( ) ; final Location location = findLocationByIndex ( pickle , 0 ) ; // Scenario Outline has a first location the position on the table // and second one is the position of scenario self . final Location locationToCompare = findLocationByIndex ( pickle , locationIndex - 1 ) ; outputFileName = classNamingScheme . generate ( file . getName ( ) ) ; setFeatureFileLocation ( file , location ) ; setParsedFeature ( gherkinDocument . getFeature ( ) ) ; setParsedScenario ( findScenarioDefinitionViaLocation ( locationToCompare , gherkinDocument ) ) ; writeFile ( outputDirectory ) ; } }
public class NettyUtils { /** * Returns the correct ( client ) SocketChannel class based on IOMode . */ public static Class < ? extends Channel > getClientChannelClass ( IOMode mode ) { } }
switch ( mode ) { case NIO : return NioSocketChannel . class ; case EPOLL : return EpollSocketChannel . class ; default : throw new IllegalArgumentException ( "Unknown io mode: " + mode ) ; }
public class PairtreeFactory { /** * Gets a file system based Pairtree using the supplied directory as the Pairtree root and the supplied prefix as * the Pairtree prefix . * @ param aPrefix A Pairtree prefix * @ param aDirectory A directory to use for the Pairtree root * @ return A Pairtree root * @ throws PairtreeException If there is trouble creating the Pairtree */ public Pairtree getPrefixedPairtree ( final String aPrefix , final File aDirectory ) throws PairtreeException { } }
return new FsPairtree ( aPrefix , myVertx , getDirPath ( aDirectory ) ) ;
public class Viewer { /** * main . * @ param args String [ ] * @ throws Exception e */ public static void main ( String [ ] args ) throws Exception { } }
final String storageDirectory = Parameter . STORAGE_DIRECTORY . getValue ( ) ; if ( storageDirectory == null ) { throw new IllegalArgumentException ( "Please give the javamelody storage directory with -Djavamelody.storage-directory=... containing directories with the data of one or more instances of an application" ) ; } // merge and copy the data of one or more instances into a temporary directory final String tmpApplication = "tmpjavamelody" + new Random ( ) . nextInt ( ) ; final String mergedDirectory = System . getProperty ( "java.io.tmpdir" ) ; // Parameters . getStorageDirectory ( tmpApplication ) . getPath ( ) ; DataMerge . main ( new String [ ] { storageDirectory , mergedDirectory + '/' + tmpApplication } ) ; addShutdownHook ( new File ( mergedDirectory + '/' + tmpApplication ) ) ; final Map < Parameter , String > parameters = new HashMap < > ( ) ; // set the path of the reports : parameters . put ( Parameter . MONITORING_PATH , "/" ) ; // set the storage directory and temp application name : Parameter . STORAGE_DIRECTORY . setValue ( mergedDirectory ) ; parameters . put ( Parameter . APPLICATION_NAME , tmpApplication ) ; // start the embedded http server with javamelody final String port = System . getProperty ( "javamelody.viewer.port" , "8080" ) ; String url = "http://localhost:" + port + '/' ; System . out . println ( "Starting on " + url ) ; EmbeddedServer . start ( Integer . parseInt ( port ) , parameters ) ; // open the reports in a browser final String lastDay = new SimpleDateFormat ( "yyyy-MM-dd" ) . format ( new Date ( getLatest ( new File ( storageDirectory ) ) ) ) ; url += "?period=" + lastDay + "%7C" + lastDay + "&pattern=yyyy-MM-dd" ; System . out . println ( "Opening the reports in a browser on " + url ) ; Desktop . getDesktop ( ) . browse ( URI . create ( url ) ) ; System . out . println ( "Done" ) ;
public class TemplateImportVisitor { /** * - - - - - private methods - - - - - */ private DOMNode getExistingTemplate ( final String name ) { } }
try ( final Tx tx = app . tx ( ) ) { return Importer . findSharedComponentByName ( name ) ; } catch ( FrameworkException fex ) { logger . warn ( "Unable to determine if template {} already exists, ignoring." , name ) ; } return null ;
public class SeleniumActionBuilder { /** * Page action . */ public PageActionBuilder page ( WebPage page ) { } }
PageAction action = new PageAction ( ) ; action . setPage ( page ) ; action ( action ) ; return new PageActionBuilder ( action ) ;
public class HttpMessageConverter { /** * Determines whether the given message type supports a message body * @ param method The HttpMethod to evaluate * @ return Whether a message body is supported */ private boolean httpMethodSupportsBody ( HttpMethod method ) { } }
return HttpMethod . POST . equals ( method ) || HttpMethod . PUT . equals ( method ) || HttpMethod . DELETE . equals ( method ) || HttpMethod . PATCH . equals ( method ) ;
public class Applications { /** * Creates an iterator yielding the result of the transformation applied by * the function on the elements of the source iterator . This transformation * is evaluated lazily when the resulting iterator is consumed . E . g : * < code > * transform ( [ 1,2,3 ] , toStringTransformer ) - > [ " 1 " , " 2 " , " 3 " ] * < / code > * @ param < R > the result iterator element type parameter * @ param < E > the input iterator element type parameter * @ param iterator the iterator where elements are fetched from * @ param function a function used to transform each element * @ return the transformed iterator */ public static < R , E > Iterator < R > transform ( Iterator < E > iterator , Function < E , R > function ) { } }
return new TransformingIterator < > ( iterator , function ) ;
public class AWSsignerWaiters { /** * Builds a SuccessfulSigningJob waiter by using custom parameters waiterParameters and other parameters defined in * the waiters specification , and then polls until it determines whether the resource entered the desired state or * not , where polling criteria is bound by either default polling strategy or custom polling strategy . */ public Waiter < DescribeSigningJobRequest > successfulSigningJob ( ) { } }
return new WaiterBuilder < DescribeSigningJobRequest , DescribeSigningJobResult > ( ) . withSdkFunction ( new DescribeSigningJobFunction ( client ) ) . withAcceptors ( new SuccessfulSigningJob . IsSucceededMatcher ( ) , new SuccessfulSigningJob . IsFailedMatcher ( ) , new SuccessfulSigningJob . IsResourceNotFoundExceptionMatcher ( ) ) . withDefaultPollingStrategy ( new PollingStrategy ( new MaxAttemptsRetryStrategy ( 25 ) , new FixedDelayStrategy ( 20 ) ) ) . withExecutorService ( executorService ) . build ( ) ;
public class LogEvent { /** * Returns null if there is no exception logged . */ public String getExceptionStackTrace ( ) { } }
Throwable t = getException ( ) ; if ( t == null ) { return null ; } StringWriter sw = new StringWriter ( ) ; PrintWriter pw = new PrintWriter ( sw ) ; t . printStackTrace ( pw ) ; return sw . toString ( ) ;
public class DatabaseUtils { /** * Reads a Long out of a field in a Cursor and writes it to a Map . * @ param cursor The cursor to read from * @ param field The INTEGER field to read * @ param values The { @ link ContentValues } to put the value into , with the field as the key */ public static void cursorLongToContentValues ( Cursor cursor , String field , ContentValues values ) { } }
cursorLongToContentValues ( cursor , field , values , field ) ;
public class Perl5Util { /** * return index of the first occurence of the pattern in input text * @ param strPattern pattern to search * @ param strInput text to search pattern * @ param offset * @ param caseSensitive * @ return position of the first occurence * @ throws MalformedPatternException */ public static Object indexOf ( String strPattern , String strInput , int offset , boolean caseSensitive , boolean matchAll ) throws MalformedPatternException { } }
// Perl5Compiler compiler = new Perl5Compiler ( ) ; PatternMatcherInput input = new PatternMatcherInput ( strInput ) ; Perl5Matcher matcher = new Perl5Matcher ( ) ; int compileOptions = caseSensitive ? 0 : Perl5Compiler . CASE_INSENSITIVE_MASK ; compileOptions += Perl5Compiler . SINGLELINE_MASK ; if ( offset < 1 ) offset = 1 ; Pattern pattern = getPattern ( strPattern , compileOptions ) ; // Pattern pattern = compiler . compile ( strPattern , compileOptions ) ; if ( offset <= strInput . length ( ) ) input . setCurrentOffset ( offset - 1 ) ; if ( offset <= strInput . length ( ) ) { Array matches = new ArrayImpl ( ) ; while ( matcher . contains ( input , pattern ) ) { int match = matcher . getMatch ( ) . beginOffset ( 0 ) + 1 ; if ( ! matchAll ) { return new Double ( match ) ; } matches . appendEL ( match ) ; } if ( matches . size ( ) != 0 ) { return matches ; } } return 0 ;
public class FeatureIO { /** * Reads features from a text file and returns them in an ArrayList < double [ ] > . * @ param featuresFileName * The text file containing the features . * @ param featureLength * The length of each feature . * @ return * @ throws Exception */ public static ArrayList < double [ ] > readText ( String featuresFileName , int featureLength ) throws Exception { } }
ArrayList < double [ ] > features = new ArrayList < double [ ] > ( ) ; BufferedReader in = new BufferedReader ( new FileReader ( new File ( featuresFileName ) ) ) ; String line ; while ( ( line = in . readLine ( ) ) != null ) { String [ ] stringVals = line . split ( "," ) ; if ( stringVals . length != featureLength ) { in . close ( ) ; throw new Exception ( "Line contains " + stringVals . length + " comma separated values instead of " + featureLength + "\n" + line ) ; } double [ ] vals = new double [ featureLength ] ; for ( int j = 0 ; j < featureLength ; j ++ ) { vals [ j ] = Double . parseDouble ( stringVals [ j ] ) ; } features . add ( vals ) ; } in . close ( ) ; return features ;
public class ISO9075 { /** * Decodes the < code > name < / code > . * @ param name * the < code > String < / code > to decode . * @ return the decoded < code > String < / code > . */ public static String decode ( String name ) { } }
// quick check if ( name . indexOf ( "_x" ) < 0 ) { // not encoded return name ; } StringBuffer decoded = new StringBuffer ( ) ; Matcher m = ENCODE_PATTERN . matcher ( name ) ; while ( m . find ( ) ) { m . appendReplacement ( decoded , Character . toString ( ( char ) Integer . parseInt ( m . group ( ) . substring ( 2 , 6 ) , 16 ) ) ) ; } m . appendTail ( decoded ) ; return decoded . toString ( ) ;
public class StringUtils { /** * region Parsing */ public static int parseInt ( String value , int defaultValue ) { } }
try { return Integer . parseInt ( value ) ; } catch ( Exception e ) { return defaultValue ; }
public class UrlBinary { /** * ( non - Javadoc ) * @ see org . fcrepo . kernel . modeshape . FedoraBinaryImpl # getContent ( ) */ @ Override public InputStream getContent ( ) { } }
// todo - this needs to be more complete so the proxy information will // make it up to the higher levels . Ie , so one can pass back the response information try { return getResourceUri ( ) . toURL ( ) . openStream ( ) ; } catch ( final IOException e ) { throw new ExternalContentAccessException ( "Problems getting external content : " + e . getMessage ( ) , e ) ; }
public class RuleClassifier { /** * Get best and second best attributes */ protected double [ ] getBestSecondBestEntropy ( DoubleVector entropy ) { } }
double [ ] entropyValues = new double [ 2 ] ; double best = Double . MAX_VALUE ; double secondBest = Double . MAX_VALUE ; for ( int i = 0 ; i < entropy . numValues ( ) ; i ++ ) { if ( entropy . getValue ( i ) < best ) { secondBest = best ; best = entropy . getValue ( i ) ; } else { if ( entropy . getValue ( i ) < secondBest ) { secondBest = entropy . getValue ( i ) ; } } } entropyValues [ 0 ] = best ; entropyValues [ 1 ] = secondBest ; return entropyValues ;
public class BuilderSpec { /** * Returns a representation of the given { @ code @ AutoValue . Builder } class or interface . If the * class or interface has abstract methods that could not be part of any builder , emits error * messages and returns Optional . empty ( ) . */ private Optional < Builder > builderFrom ( TypeElement builderTypeElement ) { } }
// We require the builder to have the same type parameters as the @ AutoValue class , meaning the // same names and bounds . In principle the type parameters could have different names , but that // would be confusing , and our code would reject it anyway because it wouldn ' t consider that // the return type of Foo < U > build ( ) was really the same as the declaration of Foo < T > . This // check produces a better error message in that case and similar ones . if ( ! sameTypeParameters ( autoValueClass , builderTypeElement ) ) { errorReporter . reportError ( "Type parameters of " + builderTypeElement + " must have same names and bounds as " + "type parameters of " + autoValueClass , builderTypeElement ) ; return Optional . empty ( ) ; } return Optional . of ( new Builder ( builderTypeElement ) ) ;
public class AuditNotifier { /** * Returns the trigger detail information . * @ param trigger The source trigger . * @ return The trigger detail information . */ protected String getTriggerDetails ( Trigger trigger , NotificationContext context ) { } }
if ( trigger != null ) { String triggerString = trigger . toString ( ) ; triggerString = TemplateReplacer . applyTemplateChanges ( context , triggerString ) ; return triggerString . substring ( triggerString . indexOf ( "{" ) + 1 , triggerString . indexOf ( "}" ) ) ; } else { return "" ; }
public class MediaChannel { /** * Enables ICE on the channel . * An ICE - enabled channel will start an ICE Agent which gathers local * candidates and listens to incoming STUN requests as a mean to select the * proper address to be used during the call . * @ param externalAddress * The public address of the Media Server . Used for SRFLX * candidates . * @ param rtcpMux * Whether RTCP is multiplexed or not . Affects number of * candidates . */ public void enableICE ( String externalAddress , boolean rtcpMux ) { } }
if ( ! this . ice ) { this . ice = true ; this . rtcpMux = rtcpMux ; this . iceAuthenticator . generateIceCredentials ( ) ; // Enable ICE on RTP channels this . rtpChannel . enableIce ( this . iceAuthenticator ) ; if ( ! rtcpMux ) { this . rtcpChannel . enableIce ( this . iceAuthenticator ) ; } if ( logger . isDebugEnabled ( ) ) { logger . debug ( this . mediaType + " channel " + this . ssrc + " enabled ICE" ) ; } }
public class ResourceRegistryImpl { /** * / * ( non - Javadoc ) * @ see com . google . code . siren4j . converter . ResourceRegistry # getClassByEntityName ( java . lang . String ) */ public Class < ? > getClassByEntityName ( String name ) { } }
if ( StringUtils . isBlank ( name ) ) { throw new IllegalArgumentException ( "name cannot be null or empty." ) ; } return entries . get ( name ) ;
public class SeaGlassRootPaneUI { /** * Uninstalls the previously installed < code > LayoutManager < / code > . * @ param root the JRootPane . */ private void uninstallLayout ( JRootPane root ) { } }
if ( savedOldLayout != null ) { root . setLayout ( savedOldLayout ) ; savedOldLayout = null ; }
public class Struct { /** * Returns a Map of the Fields belonging to this Struct and all its ancestors . * Keys are the Field names . */ public Map < String , Field > getFieldsPlusParents ( ) { } }
Map < String , Field > tmp = new HashMap < String , Field > ( ) ; tmp . putAll ( fields ) ; if ( extend != null && ! extend . equals ( "" ) ) { Struct parent = contract . getStructs ( ) . get ( extend ) ; tmp . putAll ( parent . getFieldsPlusParents ( ) ) ; } return tmp ;
public class Template { /** * Get the processor graph to use for executing all the processors for the template . * @ return the processor graph . */ public final ProcessorDependencyGraph getProcessorGraph ( ) { } }
if ( this . processorGraph == null ) { synchronized ( this ) { if ( this . processorGraph == null ) { final Map < String , Class < ? > > attcls = new HashMap < > ( ) ; for ( Map . Entry < String , Attribute > attribute : this . attributes . entrySet ( ) ) { attcls . put ( attribute . getKey ( ) , attribute . getValue ( ) . getValueType ( ) ) ; } this . processorGraph = this . processorGraphFactory . build ( this . processors , attcls ) ; } } } return this . processorGraph ;
public class ActorDispatcher { /** * Processing of envelope * @ param envelope envelope */ private void processEnvelope ( Envelope envelope ) { } }
ActorScope scope = envelope . getScope ( ) ; if ( actorSystem . getTraceInterface ( ) != null ) { actorSystem . getTraceInterface ( ) . onEnvelopeDelivered ( envelope ) ; } long start = ActorTime . currentTime ( ) ; if ( scope . getActor ( ) == null ) { if ( envelope . getMessage ( ) == PoisonPill . INSTANCE ) { // Not creating actor for PoisonPill return ; } try { Actor actor = scope . getProps ( ) . create ( ) ; actor . initActor ( scope . getPath ( ) , new ActorContext ( scope ) , scope . getMailbox ( ) ) ; ThreadDispatcher . pushDispatcher ( actor . getDispatcher ( ) ) ; try { actor . preStart ( ) ; } finally { ThreadDispatcher . popDispatcher ( ) ; } scope . onActorCreated ( actor ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; if ( envelope . getSender ( ) != null ) { envelope . getSender ( ) . send ( new DeadLetter ( "Unable to create actor" ) ) ; } return ; } } try { if ( envelope . getMessage ( ) == StartActor . INSTANCE ) { // Already created actor } else if ( envelope . getMessage ( ) == PoisonPill . INSTANCE ) { ThreadDispatcher . pushDispatcher ( scope . getActor ( ) . getDispatcher ( ) ) ; try { scope . getActor ( ) . postStop ( ) ; } finally { ThreadDispatcher . popDispatcher ( ) ; } onActorDie ( scope ) ; } else { scope . getActor ( ) . handleMessage ( envelope . getMessage ( ) , envelope . getSender ( ) ) ; } } catch ( Exception e ) { if ( actorSystem . getTraceInterface ( ) != null ) { actorSystem . getTraceInterface ( ) . onActorDie ( scope . getActorRef ( ) , envelope , e ) ; } ThreadDispatcher . pushDispatcher ( scope . getActor ( ) . getDispatcher ( ) ) ; try { scope . getActor ( ) . postStop ( ) ; } finally { ThreadDispatcher . popDispatcher ( ) ; } onActorDie ( scope ) ; } finally { if ( actorSystem . getTraceInterface ( ) != null ) { actorSystem . getTraceInterface ( ) . onEnvelopeProcessed ( envelope , ActorTime . currentTime ( ) - start ) ; } }
public class App { /** * Dismiss ( click ' Cancel ' on ) a confirmation box */ public void dismissConfirmation ( ) { } }
String action = "Clicking 'Cancel' on a confirmation" ; String expected = "Confirmation is present to be clicked" ; if ( isNotConfirmation ( action , expected ) ) { return ; } dismiss ( action , expected , "confirmation" ) ;
public class GoogleToken { /** * so we use getTokenizer ( ) and simple ignore the LT tokens . Also adds POS tags from original sentence if trivially possible . */ static List < GoogleToken > getGoogleTokens ( AnalyzedSentence sentence , boolean addStartToken , Tokenizer wordTokenizer ) { } }
List < GoogleToken > result = new ArrayList < > ( ) ; if ( addStartToken ) { result . add ( new GoogleToken ( LanguageModel . GOOGLE_SENTENCE_START , 0 , 0 ) ) ; } List < String > tokens = wordTokenizer . tokenize ( sentence . getText ( ) ) ; int startPos = 0 ; for ( String token : tokens ) { if ( ! StringTools . isWhitespace ( token ) ) { int endPos = startPos + token . length ( ) ; Set < AnalyzedToken > pos = findOriginalAnalyzedTokens ( sentence , startPos , endPos ) ; GoogleToken gToken = new GoogleToken ( token , startPos , endPos , pos ) ; result . add ( gToken ) ; } startPos += token . length ( ) ; } return result ;
public class TypeInference { /** * Sets the types of a named / non - destructuring function parameter to an inferred type . * < p > This method is responsible for typing : * < ul > * < li > The scope slot * < li > The param node * < / ul > */ @ CheckReturnValue @ SuppressWarnings ( "ReferenceEquality" ) // unknownType is a singleton private FlowScope updateNamedParameter ( Node paramName , boolean hasDefaultValue , JSType inferredType , FlowScope entryFlowScope ) { } }
TypedVar var = containerScope . getVar ( paramName . getString ( ) ) ; checkNotNull ( var , "Missing var for parameter %s" , paramName ) ; paramName . setJSType ( inferredType ) ; if ( var . isTypeInferred ( ) ) { var . setType ( inferredType ) ; } else if ( hasDefaultValue ) { // If this is a declared type with a default value , update the LinkedFlowScope slots but not // the actual TypedVar . This is similar to what would happen if the default value was moved // into an assignment in the fn body entryFlowScope = redeclareSimpleVar ( entryFlowScope , paramName , inferredType ) ; } return entryFlowScope ;
public class TranslatorTypes { /** * Creates a DPT translator for the given datapoint type ID . * The translation behavior of a DPT translator instance is uniquely defined by the * supplied datapoint type ID . * If the < code > dptID < / code > argument is built up the recommended way , that is " < i > main * number < / i > . < i > sub number < / i > " , the < code > mainNumber < / code > argument might be * left 0 to use the datapoint type ID only . < br > * Note , that we don ' t enforce any particular or standardized format on the dptID * structure , so using a different formatted dptID solely without main number argument * results in undefined behavior . * @ param mainNumber data type main number , number > = 0 ; use 0 to infer translator * type from < code > dptID < / code > argument only * @ param dptID datapoint type ID for selecting a particular kind of value translation * @ return the new { @ link DPTXlator } object * @ throws KNXException on main type not found or creation failed ( refer to * { @ link MainType # createTranslator ( String ) } ) */ public static DPTXlator createTranslator ( int mainNumber , String dptID ) throws KNXException { } }
try { final int main = getMainNumber ( mainNumber , dptID ) ; final MainType type = ( MainType ) map . get ( new Integer ( main ) ) ; if ( type != null ) return type . createTranslator ( dptID ) ; } catch ( final NumberFormatException e ) { } throw new KNXException ( "main number not found for " + dptID ) ;
public class Message { /** * Create a MessageCreator to execute create . * @ param pathAccountSid The SID of the Account that will create the resource * @ param to The destination phone number * @ param messagingServiceSid The SID of the Messaging Service you want to * associate with the message . * @ param body The text of the message you want to send . Can be up to 1,600 * characters in length . * @ return MessageCreator capable of executing the create */ public static MessageCreator creator ( final String pathAccountSid , final com . twilio . type . PhoneNumber to , final String messagingServiceSid , final String body ) { } }
return new MessageCreator ( pathAccountSid , to , messagingServiceSid , body ) ;
public class JMRandom { /** * Build random int stream int stream . * @ param streamSize the stream size * @ param random the random * @ param inclusiveLowerBound the inclusive lower bound * @ param exclusiveUpperBound the exclusive upper bound * @ return the int stream */ public static IntStream buildRandomIntStream ( int streamSize , Random random , int inclusiveLowerBound , int exclusiveUpperBound ) { } }
return buildRandomIntStream ( streamSize , ( ) -> getBoundedNumber ( random , inclusiveLowerBound , exclusiveUpperBound ) ) ;
public class HtmlMessages { /** * < p > Set the value of the < code > infoStyle < / code > property . < / p > */ public void setInfoStyle ( java . lang . String infoStyle ) { } }
getStateHelper ( ) . put ( PropertyKeys . infoStyle , infoStyle ) ;
public class Message { /** * Extracts the menu item type from a { @ link KnownType # MENU _ ITEM } response . * @ return the reported type of this menu item * @ throws IllegalArgumentException if this is not a { @ link KnownType # MENU _ ITEM } response . */ public MenuItemType getMenuItemType ( ) { } }
if ( knownType != KnownType . MENU_ITEM ) { throw new IllegalArgumentException ( "getMenuItemType() can only be used with MENU_ITEM responses." ) ; } final NumberField type = ( NumberField ) arguments . get ( 6 ) ; final MenuItemType result = MENU_ITEM_TYPE_MAP . get ( type . getValue ( ) ) ; if ( result == null ) { return MenuItemType . UNKNOWN ; } return result ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public BDMDatFmt createBDMDatFmtFromString ( EDataType eDataType , String initialValue ) { } }
BDMDatFmt result = BDMDatFmt . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class MultiIndex { /** * Enqueues unused segments for deletion in { @ link # deletable } . This method * does not synchronize on { @ link # deletable } ! A caller must ensure that it * is the only one acting on the { @ link # deletable } map . * @ throws IOException * if an error occurs while reading directories . */ private void enqueueUnusedSegments ( ) throws IOException { } }
// walk through index segments String [ ] dirNames = directoryManager . getDirectoryNames ( ) ; for ( int i = 0 ; i < dirNames . length ; i ++ ) { if ( dirNames [ i ] . startsWith ( "_" ) && ! indexNames . contains ( dirNames [ i ] ) ) { deletable . add ( dirNames [ i ] ) ; } }
public class ExcelModuleDemoToDoItemBulkUpdateMenu { String mementoFor ( final ExcelModuleDemoToDoItemBulkUpdateManager tdieim ) { } }
final Memento memento = mementoService . create ( ) ; memento . set ( "fileName" , tdieim . getFileName ( ) ) ; memento . set ( "category" , tdieim . getCategory ( ) ) ; memento . set ( "subcategory" , tdieim . getSubcategory ( ) ) ; memento . set ( "completed" , tdieim . isComplete ( ) ) ; return memento . asString ( ) ;
public class PKITools { /** * Write a single string to a file . Used to write keys * @ param fileName String file to write to * @ param bs bytes to write * @ param append true to add the key to the file . * @ throws IOException */ private void writeFile ( final String fileName , final byte [ ] bs , final boolean append ) throws IOException { } }
FileOutputStream fstr = null ; try { fstr = new FileOutputStream ( fileName , append ) ; fstr . write ( bs ) ; // Terminate key with newline fstr . write ( '\n' ) ; fstr . flush ( ) ; } finally { if ( fstr != null ) { fstr . close ( ) ; } }
public class Properties { /** * Replaces the entry for the specified key only if it is * currently mapped to some value . */ public V replace ( K key , V value ) { } }
V curValue ; if ( ( ( curValue = get ( key ) ) != null ) || containsKey ( key ) ) { curValue = put ( key , value ) ; } return curValue ;
public class ClobConverter { /** * { @ inheritDoc } */ public Object convert ( String value , TypeLiteral < ? > toType ) { } }
try { return new SerialClob ( value . toCharArray ( ) ) ; } catch ( Exception e ) { throw new ProvisionException ( "String value '" + value + "' is not a valid InetAddress" , e ) ; }
public class JSDocInfoBuilder { /** * Records a parameter type to an enum . * @ return { @ code true } if the enum ' s parameter type was recorded and * { @ code false } if it was invalid or already defined */ public boolean recordEnumParameterType ( JSTypeExpression type ) { } }
if ( type != null && ! hasAnyTypeRelatedTags ( ) ) { currentInfo . setEnumParameterType ( type ) ; populated = true ; return true ; } else { return false ; }
public class TreeWalker { /** * Perform a pre - order traversal non - recursive style . * In contrast to the traverse ( ) method this method will not issue * startDocument ( ) and endDocument ( ) events to the SAX listener . * @ param pos Node in the tree where to start traversal * @ throws TransformerException */ public void traverseFragment ( Node pos ) throws org . xml . sax . SAXException { } }
Node top = pos ; while ( null != pos ) { startNode ( pos ) ; Node nextNode = pos . getFirstChild ( ) ; while ( null == nextNode ) { endNode ( pos ) ; if ( top . equals ( pos ) ) break ; nextNode = pos . getNextSibling ( ) ; if ( null == nextNode ) { pos = pos . getParentNode ( ) ; if ( ( null == pos ) || ( top . equals ( pos ) ) ) { if ( null != pos ) endNode ( pos ) ; nextNode = null ; break ; } } } pos = nextNode ; }
public class UTFDataOutputStream { /** * This method uses the { @ link String # getBytes ( String ) } method to write * < ol > * < li > the size of the byte array < / li > * < li > the unicode byte sequence of this string < / li > * < / ol > * @ param str * String to be written in the output stream * @ throws IOException */ public void writeUTFAsArray ( String str ) throws IOException { } }
byte [ ] buffer = str . getBytes ( "UTF-8" ) ; super . writeInt ( buffer . length ) ; super . write ( buffer , 0 , buffer . length ) ;
public class AudioMetadata { /** * Parse tags common for all audio files . */ private boolean checkSupportedAudio ( ) { } }
AudioHeader header = audioFile . getAudioHeader ( ) ; bitrate = header . getBitRateAsNumber ( ) ; sampleRate = header . getSampleRateAsNumber ( ) ; channels = header . getChannels ( ) ; if ( header . getChannels ( ) . toLowerCase ( ) . contains ( "stereo" ) ) { channels = "2" ; } if ( header instanceof MP3AudioHeader ) { duration = ( ( MP3AudioHeader ) header ) . getPreciseTrackLength ( ) ; } else if ( header instanceof Mp4AudioHeader ) { duration = ( double ) ( ( Mp4AudioHeader ) header ) . getPreciseLength ( ) ; } else { duration = ( double ) header . getTrackLength ( ) ; } // generic frames Tag tag = audioFile . getTag ( ) ; artist = tag . getFirst ( FieldKey . ARTIST ) ; album = tag . getFirst ( FieldKey . ALBUM ) ; title = tag . getFirst ( FieldKey . TITLE ) ; comment = tag . getFirst ( FieldKey . COMMENT ) ; year = tag . getFirst ( FieldKey . YEAR ) ; track = tag . getFirst ( FieldKey . TRACK ) ; genre = tag . getFirst ( FieldKey . GENRE ) ; artwork = new ArrayList < > ( ) ; for ( Artwork a : tag . getArtworkList ( ) ) { AudioMetadataArtwork ama = new AudioMetadataArtwork ( ) ; ama . setMimeType ( a . getMimeType ( ) ) ; if ( a . getPictureType ( ) >= 0 ) { ama . setType ( a . getPictureType ( ) ) ; } ama . setData ( a . getBinaryData ( ) ) ; artwork . add ( ama ) ; } return true ;
public class AWSWAFRegionalClient { /** * Permanently deletes a < a > GeoMatchSet < / a > . You can ' t delete a < code > GeoMatchSet < / code > if it ' s still used in any * < code > Rules < / code > or if it still includes any countries . * If you just want to remove a < code > GeoMatchSet < / code > from a < code > Rule < / code > , use < a > UpdateRule < / a > . * To permanently delete a < code > GeoMatchSet < / code > from AWS WAF , perform the following steps : * < ol > * < li > * Update the < code > GeoMatchSet < / code > to remove any countries . For more information , see < a > UpdateGeoMatchSet < / a > . * < / li > * < li > * Use < a > GetChangeToken < / a > to get the change token that you provide in the < code > ChangeToken < / code > parameter of a * < code > DeleteGeoMatchSet < / code > request . * < / li > * < li > * Submit a < code > DeleteGeoMatchSet < / code > request . * < / li > * < / ol > * @ param deleteGeoMatchSetRequest * @ return Result of the DeleteGeoMatchSet operation returned by the service . * @ throws WAFStaleDataException * The operation failed because you tried to create , update , or delete an object by using a change token * that has already been used . * @ throws WAFInternalErrorException * The operation failed because of a system problem , even though the request was valid . Retry your request . * @ throws WAFInvalidAccountException * The operation failed because you tried to create , update , or delete an object by using an invalid account * identifier . * @ throws WAFNonexistentItemException * The operation failed because the referenced object doesn ' t exist . * @ throws WAFReferencedItemException * The operation failed because you tried to delete an object that is still in use . For example : < / p > * < ul > * < li > * You tried to delete a < code > ByteMatchSet < / code > that is still referenced by a < code > Rule < / code > . * < / li > * < li > * You tried to delete a < code > Rule < / code > that is still referenced by a < code > WebACL < / code > . * < / li > * @ throws WAFNonEmptyEntityException * The operation failed because you tried to delete an object that isn ' t empty . For example : < / p > * < ul > * < li > * You tried to delete a < code > WebACL < / code > that still contains one or more < code > Rule < / code > objects . * < / li > * < li > * You tried to delete a < code > Rule < / code > that still contains one or more < code > ByteMatchSet < / code > objects * or other predicates . * < / li > * < li > * You tried to delete a < code > ByteMatchSet < / code > that contains one or more < code > ByteMatchTuple < / code > * objects . * < / li > * < li > * You tried to delete an < code > IPSet < / code > that references one or more IP addresses . * < / li > * @ sample AWSWAFRegional . DeleteGeoMatchSet * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / waf - regional - 2016-11-28 / DeleteGeoMatchSet " target = " _ top " > AWS * API Documentation < / a > */ @ Override public DeleteGeoMatchSetResult deleteGeoMatchSet ( DeleteGeoMatchSetRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteGeoMatchSet ( request ) ;
public class VersionsImpl { /** * Gets the version info . * @ param appId The application ID . * @ param versionId The version ID . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the VersionInfo object */ public Observable < VersionInfo > getAsync ( UUID appId , String versionId ) { } }
return getWithServiceResponseAsync ( appId , versionId ) . map ( new Func1 < ServiceResponse < VersionInfo > , VersionInfo > ( ) { @ Override public VersionInfo call ( ServiceResponse < VersionInfo > response ) { return response . body ( ) ; } } ) ;
public class ZoomToRectangleAddon { /** * Paint this zoom to rectangle button ! */ public void accept ( PainterVisitor visitor , Object group , Bbox bounds , boolean recursive ) { } }
map . getVectorContext ( ) . drawGroup ( group , this ) ; Coordinate c = getUpperLeftCorner ( ) ; PictureStyle pictureStyle = new PictureStyle ( 1 ) ; pictureStyle . setClassName ( "gm-ZoomToRectangleControl" ) ; map . getVectorContext ( ) . drawImage ( this , "zoom-rect-img" , Geomajas . getIsomorphicDir ( ) + "geomajas/mapaddon/zoom_rectangle.png" , new Bbox ( c . getX ( ) , c . getY ( ) , 20 , 20 ) , pictureStyle ) ; if ( firstTime ) { map . getVectorContext ( ) . setController ( this , "zoom-rect-img" , new ActivateRectangleController ( map ) , Event . MOUSEEVENTS ) ; map . getVectorContext ( ) . setCursor ( this , "zoom-rect-img" , Cursor . POINTER . getValue ( ) ) ; } firstTime = false ;
public class TimeZoneNames { /** * Finds time zone name prefix matches for the input text at the * given offset and returns a collection of the matches . * @ param text the text . * @ param start the starting offset within the text . * @ param types the set of name types , or < code > null < / code > for all name types . * @ return A collection of matches . * @ see NameType * @ see MatchInfo * @ hide draft / provisional / internal are hidden on Android */ public Collection < MatchInfo > find ( CharSequence text , int start , EnumSet < NameType > types ) { } }
throw new UnsupportedOperationException ( "The method is not implemented in TimeZoneNames base class." ) ;
public class MarkLogicClient { /** * remove single triple * @ param baseURI * @ param subject * @ param predicate * @ param object * @ param contexts */ public void sendRemove ( String baseURI , Resource subject , URI predicate , Value object , Resource ... contexts ) throws MarkLogicSesameException { } }
if ( DELETE_CACHE_ENABLED ) { timerDeleteCache . add ( subject , predicate , object , contexts ) ; } else { if ( WRITE_CACHE_ENABLED ) sync ( ) ; getClient ( ) . performRemove ( baseURI , ( Resource ) skolemize ( subject ) , ( URI ) skolemize ( predicate ) , skolemize ( object ) , this . tx , contexts ) ; }
public class JacksonUtils { /** * Read a JSON string and parse to { @ link JsonNode } instance , with a custom class loader . * @ param source * @ param classLoader * @ return */ public static JsonNode readJson ( InputStream source , ClassLoader classLoader ) { } }
return SerializationUtils . readJson ( source , classLoader ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getBFG ( ) { } }
if ( bfgEClass == null ) { bfgEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 205 ) ; } return bfgEClass ;
public class NodeExecStats { /** * < code > optional string timeline _ label = 8 ; < / code > */ public com . google . protobuf . ByteString getTimelineLabelBytes ( ) { } }
java . lang . Object ref = timelineLabel_ ; if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; timelineLabel_ = b ; return b ; } else { return ( com . google . protobuf . ByteString ) ref ; }
public class UdpWorker { /** * On complete . * @ param response * the response * @ param error * the error * @ param errorMessage * the error message * @ param stackTrace * the stack trace * @ param statusCode * the status code * @ param statusCodeInt * the status code int */ public void onComplete ( String response , boolean error , String errorMessage , final String stackTrace , String statusCode , int statusCodeInt ) { } }
cancelCancellable ( ) ; reply ( response , error , errorMessage , stackTrace , statusCode , statusCodeInt ) ;
public class AsynchronousExecution { /** * If there is a pending completion notification , deliver it to the executor . * Must be called after { @ link # setExecutorWithoutCompleting ( Executor ) } . */ @ Restricted ( NoExternalUse . class ) public synchronized final void maybeComplete ( ) { } }
assert this . executor != null ; if ( result != null ) { executor . completedAsynchronous ( result != NULL ? result : null ) ; result = null ; }
public class NumberPath { /** * Method to construct the greater than expression for double * @ param value the double * @ return Expression */ public Expression < Double > gt ( double value ) { } }
String valueString = "'" + value + "'" ; return new Expression < Double > ( this , Operation . gt , valueString ) ;
public class Element { /** * Scroll to this element */ @ Override public void scrollTo ( ) throws WidgetException { } }
WebElement we = findElement ( ) ; Locatable l = ( ( Locatable ) we ) ; l . getCoordinates ( ) . inViewPort ( ) ;
public class MLSparse { /** * Gets column indices * < tt > ic < / tt > points to an integer array of length nzmax containing the column indices of * the corresponding elements in < tt > pr < / tt > and < tt > pi < / tt > . */ public int [ ] getIC ( ) { } }
int [ ] ic = new int [ nzmax ] ; int i = 0 ; for ( IndexMN index : indexSet ) { ic [ i ++ ] = index . n ; } return ic ;
public class appfwpolicy_stats { /** * Use this API to fetch the statistics of all appfwpolicy _ stats resources that are configured on netscaler . */ public static appfwpolicy_stats [ ] get ( nitro_service service ) throws Exception { } }
appfwpolicy_stats obj = new appfwpolicy_stats ( ) ; appfwpolicy_stats [ ] response = ( appfwpolicy_stats [ ] ) obj . stat_resources ( service ) ; return response ;
public class TiffEPProfile { /** * Validates that the IFD conforms the Tiff / EP standard . */ @ Override public void validate ( ) { } }
try { IFD ifd = model . getFirstIFD ( ) ; int n = 0 ; while ( ifd != null ) { validateIfd ( ifd , n ) ; ifd = ifd . getNextIFD ( ) ; n ++ ; } } catch ( Exception e ) { }
public class Javalin { /** * Adds a WebSocket exception mapper to the instance . * @ see < a href = " https : / / javalin . io / documentation # exception - mapping " > Exception mapping in docs < / a > */ public < T extends Exception > Javalin wsException ( @ NotNull Class < T > exceptionClass , @ NotNull WsExceptionHandler < ? super T > exceptionHandler ) { } }
wsServlet . getWsExceptionMapper ( ) . getHandlers ( ) . put ( exceptionClass , ( WsExceptionHandler < Exception > ) exceptionHandler ) ; return this ;
public class FactorGraph { /** * Adds a variable to this factor graph , if not already present . * @ param var The variable to add . * @ return The node for this variable . */ public void addVar ( Var var ) { } }
int id = var . getId ( ) ; boolean alreadyAdded = ( 0 <= id && id < vars . size ( ) ) ; if ( alreadyAdded ) { if ( vars . get ( id ) != var ) { throw new IllegalStateException ( "Var id already set, but factor not yet added." ) ; } } else { // Var was not yet in the factor graph . // Check and set the id . if ( id != - 1 && id != vars . size ( ) ) { throw new IllegalStateException ( "Var id already set, but incorrect: " + id ) ; } var . setId ( vars . size ( ) ) ; // Add the Var . vars . add ( var ) ; if ( bg != null ) { log . warn ( "Discarding BipartiteGraph. This may indicate inefficiency." ) ; } bg = null ; }
public class DefaultGroovyMethodsSupport { /** * helper method for getAt and putAt */ protected static RangeInfo subListBorders ( int size , Range range ) { } }
if ( range instanceof IntRange ) { return ( ( IntRange ) range ) . subListBorders ( size ) ; } int from = normaliseIndex ( DefaultTypeTransformation . intUnbox ( range . getFrom ( ) ) , size ) ; int to = normaliseIndex ( DefaultTypeTransformation . intUnbox ( range . getTo ( ) ) , size ) ; boolean reverse = range . isReverse ( ) ; if ( from > to ) { // support list [ 1 . . - 1] int tmp = to ; to = from ; from = tmp ; reverse = ! reverse ; } return new RangeInfo ( from , to + 1 , reverse ) ;
public class RetryAbsoluteAsRelativeResourceAccessor { /** * Intercepts getResourcesAsStream , the method used to retrieve resources for the master changeset * @ param path * @ return * @ throws IOException */ @ Override public Set < InputStream > getResourcesAsStream ( final String path ) throws IOException { } }
// First , try the path as specified final Set < InputStream > streams = inner . getResourcesAsStream ( path ) ; // If no results were found and the path was absolute , retry without the leading slash if ( ( streams == null || streams . isEmpty ( ) ) && ! path . isEmpty ( ) && path . charAt ( 0 ) == '/' ) { // Strip the leading slash away and re - try the path // This lets us final String newPath = path . substring ( 1 ) ; return inner . getResourcesAsStream ( newPath ) ; } else { return streams ; }
public class SetSubnetsRequest { /** * The IDs of the public subnets . You must specify subnets from at least two Availability Zones . You can specify * only one subnet per Availability Zone . You must specify either subnets or subnet mappings . * You cannot specify Elastic IP addresses for your subnets . * @ param subnetMappings * The IDs of the public subnets . You must specify subnets from at least two Availability Zones . You can * specify only one subnet per Availability Zone . You must specify either subnets or subnet mappings . < / p > * You cannot specify Elastic IP addresses for your subnets . */ public void setSubnetMappings ( java . util . Collection < SubnetMapping > subnetMappings ) { } }
if ( subnetMappings == null ) { this . subnetMappings = null ; return ; } this . subnetMappings = new java . util . ArrayList < SubnetMapping > ( subnetMappings ) ;
public class TreeTaggerWrapper { /** * tokenizes a given JCas object ' s document text using the treetagger program * and adds the recognized tokens to the JCas object . * @ param jcas JCas object supplied by the pipeline */ private void tokenize ( JCas jcas ) { } }
// read tokenized text to add tokens to the jcas Logger . printDetail ( component , "TreeTagger (tokenization) with: " + ttprops . abbFileName ) ; EnumSet < Flag > flags = Flag . getSet ( ttprops . languageSwitch ) ; TreeTaggerTokenizer ttt ; ttprops . abbFileName = "english-abbreviations" ; if ( ttprops . abbFileName != null ) { ttt = new TreeTaggerTokenizer ( ttprops . rootPath + ttprops . fileSeparator + "lib" + ttprops . fileSeparator + ttprops . abbFileName , flags ) ; } else { ttt = new TreeTaggerTokenizer ( null , flags ) ; } String docText = jcas . getDocumentText ( ) . replaceAll ( "\n\n" , "\nEMPTYLINE\n" ) ; List < String > tokenized = ttt . tokenize ( docText ) ; int tokenOffset = 0 ; // loop through all the lines in the treetagger output for ( String s : tokenized ) { // charset missmatch fallback : signal ( invalid ) s if ( ( ! ( s . equals ( "EMPTYLINE" ) ) ) && ( jcas . getDocumentText ( ) . indexOf ( s , tokenOffset ) < 0 ) ) { Logger . printError ( component , "Tokenization was interrupted because the token \"" + s + "\" could not be found in the original text. The reason for this might be " + "that the encoding of the document is not UTF-8. This token was skipped and " + "if it was part of a temporal expression, will not be extracted." ) ; continue ; } // create tokens and add them to the jcas ' s indexes . Token newToken = new Token ( jcas ) ; if ( s . equals ( "EMPTYLINE" ) ) { newToken . setBegin ( tokenOffset ) ; newToken . setEnd ( tokenOffset ) ; newToken . setPos ( "EMPTYLINE" ) ; if ( annotate_partofspeech ) { newToken . addToIndexes ( ) ; } } else { newToken . setBegin ( jcas . getDocumentText ( ) . indexOf ( s , tokenOffset ) ) ; newToken . setEnd ( newToken . getBegin ( ) + s . length ( ) ) ; newToken . addToIndexes ( ) ; tokenOffset = newToken . getEnd ( ) ; } }
public class CommercePriceListUserSegmentEntryRelPersistenceImpl { /** * Returns the commerce price list user segment entry rels before and after the current commerce price list user segment entry rel in the ordered set where commercePriceListId = & # 63 ; . * @ param commercePriceListUserSegmentEntryRelId the primary key of the current commerce price list user segment entry rel * @ param commercePriceListId the commerce price list ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next commerce price list user segment entry rel * @ throws NoSuchPriceListUserSegmentEntryRelException if a commerce price list user segment entry rel with the primary key could not be found */ @ Override public CommercePriceListUserSegmentEntryRel [ ] findByCommercePriceListId_PrevAndNext ( long commercePriceListUserSegmentEntryRelId , long commercePriceListId , OrderByComparator < CommercePriceListUserSegmentEntryRel > orderByComparator ) throws NoSuchPriceListUserSegmentEntryRelException { } }
CommercePriceListUserSegmentEntryRel commercePriceListUserSegmentEntryRel = findByPrimaryKey ( commercePriceListUserSegmentEntryRelId ) ; Session session = null ; try { session = openSession ( ) ; CommercePriceListUserSegmentEntryRel [ ] array = new CommercePriceListUserSegmentEntryRelImpl [ 3 ] ; array [ 0 ] = getByCommercePriceListId_PrevAndNext ( session , commercePriceListUserSegmentEntryRel , commercePriceListId , orderByComparator , true ) ; array [ 1 ] = commercePriceListUserSegmentEntryRel ; array [ 2 ] = getByCommercePriceListId_PrevAndNext ( session , commercePriceListUserSegmentEntryRel , commercePriceListId , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class SpringSecurityUserManager { /** * { @ inheritDoc } */ @ Override public void disable ( User appSensorUser ) { } }
logger . info ( "Request received to disable user <{}>." , appSensorUser . getUsername ( ) ) ; if ( userDetailsManager == null ) { logger . warn ( "Could not disable user [" + appSensorUser . getUsername ( ) + "] since the spring security " + "UserDetailsManager is not properly configured." ) ; return ; } UserDetails springUser = userDetailsManager . loadUserByUsername ( appSensorUser . getUsername ( ) ) ; if ( springUser == null ) { logger . warn ( "Could not disable user [" + appSensorUser . getUsername ( ) + "] because the user could not " + "be found by lookup" ) ; return ; } logger . info ( "Disabling user <{}>." , springUser . getUsername ( ) ) ; userDetailsManager . updateUser ( new DisabledUser ( springUser ) ) ; logger . info ( "After disabling user <{}>, also logging out so the disable gets triggered." , springUser . getUsername ( ) ) ; userResponseCache . setUserLoggedOut ( appSensorUser . getUsername ( ) ) ;
public class AponReader { /** * Converts into a given Parameters object from a file . * @ param < T > the generic type * @ param file the file to parse * @ param encoding the character encoding * @ param parameters the Parameters object * @ return the Parameters object * @ throws AponParseException if reading APON format document fails */ public static < T extends Parameters > T parse ( File file , String encoding , T parameters ) throws AponParseException { } }
if ( file == null ) { throw new IllegalArgumentException ( "file must not be null" ) ; } if ( parameters == null ) { throw new IllegalArgumentException ( "parameters must not be null" ) ; } AponReader aponReader = null ; try { if ( encoding == null ) { aponReader = new AponReader ( new FileReader ( file ) ) ; } else { aponReader = new AponReader ( new InputStreamReader ( new FileInputStream ( file ) , encoding ) ) ; } return aponReader . read ( parameters ) ; } catch ( Exception e ) { throw new AponParseException ( "Failed to parse string with APON format" , e ) ; } finally { if ( aponReader != null ) { try { aponReader . close ( ) ; } catch ( IOException e ) { // ignore } } }
public class SlotManager { /** * Removes a pending slot request identified by the given allocation id from a slot identified * by the given slot id . * @ param slotId identifying the slot * @ param allocationId identifying the presumable assigned pending slot request */ private void removeSlotRequestFromSlot ( SlotID slotId , AllocationID allocationId ) { } }
TaskManagerSlot taskManagerSlot = slots . get ( slotId ) ; if ( null != taskManagerSlot ) { if ( taskManagerSlot . getState ( ) == TaskManagerSlot . State . PENDING && Objects . equals ( allocationId , taskManagerSlot . getAssignedSlotRequest ( ) . getAllocationId ( ) ) ) { TaskManagerRegistration taskManagerRegistration = taskManagerRegistrations . get ( taskManagerSlot . getInstanceId ( ) ) ; if ( taskManagerRegistration == null ) { throw new IllegalStateException ( "Trying to remove slot request from slot for which there is no TaskManager " + taskManagerSlot . getInstanceId ( ) + " is registered." ) ; } // clear the pending slot request taskManagerSlot . clearPendingSlotRequest ( ) ; updateSlotState ( taskManagerSlot , taskManagerRegistration , null , null ) ; } else { LOG . debug ( "Ignore slot request removal for slot {}." , slotId ) ; } } else { LOG . debug ( "There was no slot with {} registered. Probably this slot has been already freed." , slotId ) ; }
public class DescribeSnapshotsRequest { /** * The IDs of the AWS accounts that can create volumes from the snapshot . * @ return The IDs of the AWS accounts that can create volumes from the snapshot . */ public java . util . List < String > getRestorableByUserIds ( ) { } }
if ( restorableByUserIds == null ) { restorableByUserIds = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return restorableByUserIds ;
public class DRL5Expressions { /** * src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 684:1 : assignmentOperator : ( EQUALS _ ASSIGN | PLUS _ ASSIGN | MINUS _ ASSIGN | MULT _ ASSIGN | DIV _ ASSIGN | AND _ ASSIGN | OR _ ASSIGN | XOR _ ASSIGN | MOD _ ASSIGN | LESS LESS EQUALS _ ASSIGN | ( GREATER GREATER GREATER ) = > GREATER GREATER GREATER EQUALS _ ASSIGN | ( GREATER GREATER ) = > GREATER GREATER EQUALS _ ASSIGN ) ; */ public final void assignmentOperator ( ) throws RecognitionException { } }
try { // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 685:5 : ( EQUALS _ ASSIGN | PLUS _ ASSIGN | MINUS _ ASSIGN | MULT _ ASSIGN | DIV _ ASSIGN | AND _ ASSIGN | OR _ ASSIGN | XOR _ ASSIGN | MOD _ ASSIGN | LESS LESS EQUALS _ ASSIGN | ( GREATER GREATER GREATER ) = > GREATER GREATER GREATER EQUALS _ ASSIGN | ( GREATER GREATER ) = > GREATER GREATER EQUALS _ ASSIGN ) int alt89 = 12 ; switch ( input . LA ( 1 ) ) { case EQUALS_ASSIGN : { alt89 = 1 ; } break ; case PLUS_ASSIGN : { alt89 = 2 ; } break ; case MINUS_ASSIGN : { alt89 = 3 ; } break ; case MULT_ASSIGN : { alt89 = 4 ; } break ; case DIV_ASSIGN : { alt89 = 5 ; } break ; case AND_ASSIGN : { alt89 = 6 ; } break ; case OR_ASSIGN : { alt89 = 7 ; } break ; case XOR_ASSIGN : { alt89 = 8 ; } break ; case MOD_ASSIGN : { alt89 = 9 ; } break ; case LESS : { alt89 = 10 ; } break ; case GREATER : { int LA89_11 = input . LA ( 2 ) ; if ( ( LA89_11 == GREATER ) ) { int LA89_12 = input . LA ( 3 ) ; if ( ( LA89_12 == GREATER ) && ( synpred42_DRL5Expressions ( ) ) ) { alt89 = 11 ; } else if ( ( LA89_12 == EQUALS_ASSIGN ) && ( synpred43_DRL5Expressions ( ) ) ) { alt89 = 12 ; } } else { if ( state . backtracking > 0 ) { state . failed = true ; return ; } int nvaeMark = input . mark ( ) ; try { input . consume ( ) ; NoViableAltException nvae = new NoViableAltException ( "" , 89 , 11 , input ) ; throw nvae ; } finally { input . rewind ( nvaeMark ) ; } } } break ; default : if ( state . backtracking > 0 ) { state . failed = true ; return ; } NoViableAltException nvae = new NoViableAltException ( "" , 89 , 0 , input ) ; throw nvae ; } switch ( alt89 ) { case 1 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 685:9 : EQUALS _ ASSIGN { match ( input , EQUALS_ASSIGN , FOLLOW_EQUALS_ASSIGN_in_assignmentOperator4158 ) ; if ( state . failed ) return ; } break ; case 2 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 686:7 : PLUS _ ASSIGN { match ( input , PLUS_ASSIGN , FOLLOW_PLUS_ASSIGN_in_assignmentOperator4166 ) ; if ( state . failed ) return ; } break ; case 3 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 687:7 : MINUS _ ASSIGN { match ( input , MINUS_ASSIGN , FOLLOW_MINUS_ASSIGN_in_assignmentOperator4174 ) ; if ( state . failed ) return ; } break ; case 4 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 688:7 : MULT _ ASSIGN { match ( input , MULT_ASSIGN , FOLLOW_MULT_ASSIGN_in_assignmentOperator4182 ) ; if ( state . failed ) return ; } break ; case 5 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 689:7 : DIV _ ASSIGN { match ( input , DIV_ASSIGN , FOLLOW_DIV_ASSIGN_in_assignmentOperator4190 ) ; if ( state . failed ) return ; } break ; case 6 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 690:7 : AND _ ASSIGN { match ( input , AND_ASSIGN , FOLLOW_AND_ASSIGN_in_assignmentOperator4198 ) ; if ( state . failed ) return ; } break ; case 7 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 691:7 : OR _ ASSIGN { match ( input , OR_ASSIGN , FOLLOW_OR_ASSIGN_in_assignmentOperator4206 ) ; if ( state . failed ) return ; } break ; case 8 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 692:7 : XOR _ ASSIGN { match ( input , XOR_ASSIGN , FOLLOW_XOR_ASSIGN_in_assignmentOperator4214 ) ; if ( state . failed ) return ; } break ; case 9 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 693:7 : MOD _ ASSIGN { match ( input , MOD_ASSIGN , FOLLOW_MOD_ASSIGN_in_assignmentOperator4222 ) ; if ( state . failed ) return ; } break ; case 10 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 694:7 : LESS LESS EQUALS _ ASSIGN { match ( input , LESS , FOLLOW_LESS_in_assignmentOperator4230 ) ; if ( state . failed ) return ; match ( input , LESS , FOLLOW_LESS_in_assignmentOperator4232 ) ; if ( state . failed ) return ; match ( input , EQUALS_ASSIGN , FOLLOW_EQUALS_ASSIGN_in_assignmentOperator4234 ) ; if ( state . failed ) return ; } break ; case 11 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 695:7 : ( GREATER GREATER GREATER ) = > GREATER GREATER GREATER EQUALS _ ASSIGN { match ( input , GREATER , FOLLOW_GREATER_in_assignmentOperator4251 ) ; if ( state . failed ) return ; match ( input , GREATER , FOLLOW_GREATER_in_assignmentOperator4253 ) ; if ( state . failed ) return ; match ( input , GREATER , FOLLOW_GREATER_in_assignmentOperator4255 ) ; if ( state . failed ) return ; match ( input , EQUALS_ASSIGN , FOLLOW_EQUALS_ASSIGN_in_assignmentOperator4257 ) ; if ( state . failed ) return ; } break ; case 12 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 696:7 : ( GREATER GREATER ) = > GREATER GREATER EQUALS _ ASSIGN { match ( input , GREATER , FOLLOW_GREATER_in_assignmentOperator4272 ) ; if ( state . failed ) return ; match ( input , GREATER , FOLLOW_GREATER_in_assignmentOperator4274 ) ; if ( state . failed ) return ; match ( input , EQUALS_ASSIGN , FOLLOW_EQUALS_ASSIGN_in_assignmentOperator4276 ) ; if ( state . failed ) return ; } break ; } } catch ( RecognitionException re ) { throw re ; } finally { // do for sure before leaving }
public class JChromeTabbedPane { /** * the event was consumed - DO NOT PAINT */ public void mouseReleased ( MouseEvent e ) { } }
if ( ! hasFocus ) { return ; } boolean repaint = tabRowPainter . mouseReleased ( e ) ; if ( ! repaint ) { repaint = toolbarPainter . mouseReleased ( e ) ; }
public class AbstractMutableTimecode { /** * Sets the timecode to the provided hours , minutes , seconds and frames * @ param hours * @ param minutes * @ param seconds * @ param frames */ public void setHMSF ( int hours , int minutes , int seconds , int frames ) { } }
innerSetHMSF ( hours , minutes , seconds , frames ) ;
public class GBSIterator { /** * Find first key in index using optimistic locking * @ param stack The stack to use to record the traversal */ private boolean optimisticFindFirst ( DeleteStack stack ) { } }
Object q = null ; int v1 = _index . vno ( ) ; int x1 = _index . xno ( ) ; if ( ( v1 & 1 ) != 0 ) return pessimisticNeeded ; synchronized ( this ) { } try { q = getFirst ( stack ) ; } catch ( NullPointerException npe ) { // No FFDC Code Needed . _nullPointerExceptions ++ ; return GBSTree . checkForPossibleIndexChange ( v1 , _index . vno ( ) , npe , "optimisticFindFirst" ) ; } catch ( OptimisticDepthException ode ) { // No FFDC Code Needed . _optimisticDepthExceptions ++ ; return GBSTree . checkForPossibleIndexChange ( v1 , _index . vno ( ) , ode , "optimisticFindFirst" ) ; } if ( v1 != _index . vno ( ) ) return pessimisticNeeded ; _current1 . setLocation ( q , v1 , x1 ) ; _optimisticFindFirsts ++ ; return optimisticWorked ;
public class CqlBlockedDataReaderDAO { /** * Converts rows from a single C * row to a Record . */ private Iterator < Record > decodeRows ( Iterator < Iterable < Row > > rowGroups , final AstyanaxTable table , Placement placement ) { } }
return Iterators . transform ( rowGroups , rowGroup -> { String key = AstyanaxStorage . getContentKey ( getRawKeyFromRowGroup ( rowGroup ) ) ; return newRecordFromCql ( new Key ( table , key ) , rowGroup , placement , ByteBufferUtil . bytesToHex ( getRawKeyFromRowGroupOrNull ( rowGroup ) ) ) ; } ) ;
public class Aggregation { /** * is within XFF , otherwise return NaN . */ private Double aggregate ( Datasource ds , Collection < Double > values ) { } }
return ( ( values . size ( ) / m_intervalsPer ) > ds . getXff ( ) ) ? ds . getAggregationFuction ( ) . apply ( values ) : Double . NaN ;
public class ISOS { /** * Vote for neighbors not being outliers . The key method of SOS . * @ param ignore Object to ignore * @ param di Neighbor object IDs . * @ param p Probabilities * @ param norm Normalization factor ( 1 / sum ) * @ param scores Output score storage */ public static void nominateNeighbors ( DBIDIter ignore , DBIDArrayIter di , double [ ] p , double norm , WritableDoubleDataStore scores ) { } }
for ( di . seek ( 0 ) ; di . valid ( ) ; di . advance ( ) ) { if ( DBIDUtil . equal ( ignore , di ) ) { continue ; } double v = p [ di . getOffset ( ) ] * norm ; // Normalize if ( ! ( v > 0 ) ) { break ; } scores . increment ( di , FastMath . log1p ( - v ) ) ; }
public class ListUtil { /** * cast a Object Array to a String Array * @ param array * @ return String Array * @ throws PageException */ public static String [ ] toStringArray ( Array array ) throws PageException { } }
String [ ] arr = new String [ array . size ( ) ] ; for ( int i = 0 ; i < arr . length ; i ++ ) { arr [ i ] = Caster . toString ( array . get ( i + 1 , null ) ) ; } return arr ;
public class JodaBeanXmlReader { /** * reader can be anywhere , but normally at StartDocument */ private StartElement advanceToStartElement ( ) throws Exception { } }
while ( reader . hasNext ( ) ) { XMLEvent event = nextEvent ( "advnc " ) ; if ( event . isStartElement ( ) ) { return event . asStartElement ( ) ; } } throw new IllegalArgumentException ( "Unexpected end of document" ) ;
public class Random { /** * Generates a permutation of given array . */ public void permutate ( int [ ] x ) { } }
for ( int i = 0 ; i < x . length ; i ++ ) { int j = i + nextInt ( x . length - i ) ; Math . swap ( x , i , j ) ; }
public class ManagedApplication { /** * Check the scoped instances states with respect to missed heart beats . * @ param notificationMngr */ public void checkStates ( INotificationMngr notificationMngr ) { } }
// Check the status of scoped instances Collection < Instance > scopedInstances = InstanceHelpers . findAllScopedInstances ( this . application ) ; for ( Instance scopedInstance : scopedInstances ) { // Never started instances , // or scoped instances that have been stopped by an agent , // are not processed anymore here if ( scopedInstance . getStatus ( ) == InstanceStatus . NOT_DEPLOYED || scopedInstance . getStatus ( ) == InstanceStatus . DEPLOYING || scopedInstance . getStatus ( ) == InstanceStatus . UNDEPLOYING ) { scopedInstance . data . remove ( MISSED_HEARTBEATS ) ; continue ; } // Otherwise String countAs = scopedInstance . data . get ( MISSED_HEARTBEATS ) ; int count = countAs == null ? 0 : Integer . parseInt ( countAs ) ; if ( ++ count > THRESHOLD ) { scopedInstance . setStatus ( InstanceStatus . PROBLEM ) ; notificationMngr . instance ( scopedInstance , this . application , EventType . CHANGED ) ; this . logger . severe ( "Agent " + InstanceHelpers . computeInstancePath ( scopedInstance ) + " has not sent heart beats for quite a long time. Status changed to PROBLEM." ) ; } scopedInstance . data . put ( MISSED_HEARTBEATS , String . valueOf ( count ) ) ; }
public class BatchGetPartitionResult { /** * A list of the partition values in the request for which partions were not returned . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setUnprocessedKeys ( java . util . Collection ) } or { @ link # withUnprocessedKeys ( java . util . Collection ) } if you * want to override the existing values . * @ param unprocessedKeys * A list of the partition values in the request for which partions were not returned . * @ return Returns a reference to this object so that method calls can be chained together . */ public BatchGetPartitionResult withUnprocessedKeys ( PartitionValueList ... unprocessedKeys ) { } }
if ( this . unprocessedKeys == null ) { setUnprocessedKeys ( new java . util . ArrayList < PartitionValueList > ( unprocessedKeys . length ) ) ; } for ( PartitionValueList ele : unprocessedKeys ) { this . unprocessedKeys . add ( ele ) ; } return this ;
public class AdSenseSettings { /** * Gets the fontFamily value for this AdSenseSettings . * @ return fontFamily * Specifies the font family of the { @ link AdUnit } . This attribute * is optional * and defaults to the ad unit ' s parent or ancestor ' s * setting if one has been * set . If no ancestor of the ad unit has set { @ code * fontFamily } , the * attribute is defaulted to { @ link FontFamily # DEFAULT } . */ public com . google . api . ads . admanager . axis . v201805 . AdSenseSettingsFontFamily getFontFamily ( ) { } }
return fontFamily ;