signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CommandBasedTaskService { /** * TODO : does not filter on language */ public List < TaskSummary > getTasksByStatusByProcessInstanceId ( long processInstanceId , List < Status > status , String language ) { } }
return executor . execute ( new GetTasksByStatusByProcessInstanceIdCommand ( processInstanceId , status ) ) ;
public class NodeTypes { /** * Obtain a new version of this cache with the specified node types removed from the new cache . * @ param removedNodeTypes the node types that are to be removed from the resulting cache ; may not be null but may be empty * @ return the resulting cache that contains all of the node types within this cache but without the supplied node types ; * never null */ protected NodeTypes without ( Collection < JcrNodeType > removedNodeTypes ) { } }
if ( removedNodeTypes . isEmpty ( ) ) return this ; Collection < JcrNodeType > nodeTypes = new HashSet < JcrNodeType > ( this . nodeTypes . values ( ) ) ; nodeTypes . removeAll ( removedNodeTypes ) ; return new NodeTypes ( this . context , nodeTypes , getVersion ( ) + 1 ) ;
public class Node { /** * Get the double value of another map in the current node position . * @ param map the map from which to get the value . * @ return the double value or a novalue . */ public double getDoubleValueFromMap ( RandomIter map ) { } }
try { if ( map == null ) { return HMConstants . doubleNovalue ; } double value = map . getSampleDouble ( col , row , 0 ) ; return value ; } catch ( Exception e ) { // ignore and return novalue return HMConstants . doubleNovalue ; }
public class CmsImportVersion7 { /** * Returns the list of properties to ignore during import . < p > * @ return the list of properties to ignore during import */ protected List < String > getIgnoredProperties ( ) { } }
if ( m_ignoredProperties == null ) { // get list of ignored properties m_ignoredProperties = OpenCms . getImportExportManager ( ) . getIgnoredProperties ( ) ; if ( m_ignoredProperties == null ) { m_ignoredProperties = Collections . emptyList ( ) ; } } return m_ignoredProperties ;
public class AbstractPlugin { /** * Enable this test */ @ Override public void setEnabled ( boolean enabled ) { } }
if ( this . enabled != enabled ) { this . enabled = enabled ; setProperty ( "enabled" , Boolean . toString ( enabled ) ) ; if ( enabled && getAlertThreshold ( ) == AlertThreshold . OFF ) { setAlertThreshold ( AlertThreshold . DEFAULT ) ; } }
public class KeyValueHandler { /** * Helper method to reset the reader index of the content so if downstream components * on outbound did modify it , it can be reused ( looking at you , SSLHandler ) . * @ param request the request which may have content that needs to be reset . */ private static void resetContentReaderIndex ( BinaryRequest request ) { } }
ByteBuf content = contentFromWriteRequest ( request ) ; if ( content != null ) { try { content . readerIndex ( 0 ) ; } catch ( Exception ex ) { LOGGER . warn ( "Exception while resetting the content reader index to 0, " + "please report this as a bug." , ex ) ; } }
public class NginxMetrics { /** * Get NginxMetrics by their names . Order of NginxMetrics in the resulting list * is constant and does not depend on order of metric names . * { @ link # HANDLED } is added to the result even if its name is not passed . * @ param metricsNames names of metrics . * @ return list of metrics found . */ public static List < NginxMetrics > findMetrics ( List < String > metricsNames ) { } }
if ( metricsNames == null || metricsNames . isEmpty ( ) ) { return Collections . singletonList ( NginxMetrics . HANDLED ) ; } final List < NginxMetrics > result = new ArrayList < > ( metricsNames . size ( ) ) ; for ( NginxMetrics metric : getMetricsOfType ( NginxMetrics . class ) ) { if ( metricsNames . contains ( metric . getCaption ( ) ) ) { result . add ( metric ) ; continue ; } // we need ' handled ' to monitor nginx restarts . if ( metric == NginxMetrics . HANDLED ) { result . add ( metric ) ; } } return result ;
public class TransactionServiceMain { /** * Invoked by jsvc to start the program . */ public void start ( ) throws Exception { } }
Injector injector = Guice . createInjector ( new ConfigModule ( conf ) , new ZKModule ( ) , new DiscoveryModules ( ) . getDistributedModules ( ) , new TransactionModules ( ) . getDistributedModules ( ) , new TransactionClientModule ( ) ) ; ZKClientService zkClientService = injector . getInstance ( ZKClientService . class ) ; zkClientService . startAndWait ( ) ; // start a tx server txService = injector . getInstance ( TransactionService . class ) ; try { LOG . info ( "Starting {}" , getClass ( ) . getSimpleName ( ) ) ; txService . startAndWait ( ) ; } catch ( Exception e ) { System . err . println ( "Failed to start service: " + e . getMessage ( ) ) ; }
public class YarnUtils { /** * Returns the host names for all nodes in yarnClient ' s YARN cluster . * @ param yarnClient the client to use to look up node information * @ return the set of host names */ public static Set < String > getNodeHosts ( YarnClient yarnClient ) throws YarnException , IOException { } }
ImmutableSet . Builder < String > nodeHosts = ImmutableSet . builder ( ) ; for ( NodeReport runningNode : yarnClient . getNodeReports ( USABLE_NODE_STATES ) ) { nodeHosts . add ( runningNode . getNodeId ( ) . getHost ( ) ) ; } return nodeHosts . build ( ) ;
public class RasterReliefType { /** * Gets the value of the genericApplicationPropertyOfRasterRelief property . * This accessor method returns a reference to the live list , * not a snapshot . Therefore any modification you make to the * returned list will be present inside the JAXB object . * This is why there is not a < CODE > set < / CODE > method for the genericApplicationPropertyOfRasterRelief property . * For example , to add a new item , do as follows : * < pre > * get _ GenericApplicationPropertyOfRasterRelief ( ) . add ( newItem ) ; * < / pre > * Objects of the following type ( s ) are allowed in the list * { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } * { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } */ public List < JAXBElement < Object > > get_GenericApplicationPropertyOfRasterRelief ( ) { } }
if ( _GenericApplicationPropertyOfRasterRelief == null ) { _GenericApplicationPropertyOfRasterRelief = new ArrayList < JAXBElement < Object > > ( ) ; } return this . _GenericApplicationPropertyOfRasterRelief ;
public class FSEditLog { /** * Add generation stamp record to edit log */ public void logGenerationStamp ( long genstamp ) { } }
SetGenstampOp op = SetGenstampOp . getInstance ( ) ; op . set ( genstamp ) ; logEdit ( op ) ;
public class CurrentGpsInfo { /** * Method to add a new { @ link GSVSentence } . * @ param gsv the sentence to add . */ public void addGSV ( GSVSentence gsv ) { } }
try { if ( gsv . isValid ( ) ) satelliteInfo = gsv . getSatelliteInfo ( ) ; } catch ( Exception e ) { // ignore it , this should be handled in the isValid , // if an exception is thrown , we can ' t deal with it here . }
public class JSONObject { /** * get float value . * @ param key key . * @ param def default value . * @ return value or default value . */ public float getFloat ( String key , float def ) { } }
Object tmp = objectMap . get ( key ) ; return tmp != null && tmp instanceof Number ? ( ( Number ) tmp ) . floatValue ( ) : def ;
public class Query { /** * Adds the GROUP BY columns . * @ param groupbyColumns The columns . * @ return This expression . */ public Query groupby ( final Collection < ? extends Expression > groupbyColumns ) { } }
if ( groupbyColumns == null ) { return this ; } this . groupbyColumns . addAll ( groupbyColumns ) ; return this ;
public class HttpHeaders { /** * @ deprecated Use { @ link # getInt ( CharSequence ) } instead . * Returns the integer header value with the specified header name . If * there are more than one header value for the specified header name , the * first value is returned . * @ return the header value * @ throws NumberFormatException * if there is no such header or the header value is not a number */ @ Deprecated public static int getIntHeader ( HttpMessage message , CharSequence name ) { } }
String value = message . headers ( ) . get ( name ) ; if ( value == null ) { throw new NumberFormatException ( "header not found: " + name ) ; } return Integer . parseInt ( value ) ;
public class WsLogger { /** * @ see java . util . logging . Logger # config ( java . lang . String ) */ @ Override public void config ( String msg ) { } }
if ( isLoggable ( Level . CONFIG ) ) { log ( Level . CONFIG , msg ) ; }
public class OmsSaintGeo { /** * Use Gaukler - Strickler formula for the evaluation of the initial condition * ( hypotesis of steady flow ) . * @ param q : discharge * @ param level : the level * @ param sectionsList */ private void calculateGauklerStrickler ( double q , double [ ] level , List < RiverPoint > sectionsList ) { } }
double toll , conta_cicli ; double IF , max_tir , tir_dx , tir_sx , tir_med , val_dx , val_sx , val_med ; double [ ] [ ] idrgeo ; /* create a complete list to proceed with the next elaborations */ int imax = sectionsList . size ( ) ; double [ ] minsez = new double [ imax ] ; double [ ] maxsez = new double [ imax ] ; for ( int i = 0 ; i < imax ; i ++ ) { RiverPoint section = sectionsList . get ( i ) ; minsez [ i ] = section . getMinElevation ( ) ; maxsez [ i ] = section . getMaxElevation ( ) ; level [ i ] = minsez [ i ] + 1 ; } /* calculate the steady flow water level for the sections of the j - esim segment */ for ( int i = 0 ; i < imax ; i ++ ) { /* the slope of the bottom of the i - esim section */ if ( i == 0 || i == 1 ) IF = ( minsez [ i ] - minsez [ i + 1 ] ) / ( sectionsList . get ( i + 1 ) . getProgressiveDistance ( ) - sectionsList . get ( i ) . getProgressiveDistance ( ) ) ; else if ( i == imax - 1 || i == imax - 2 ) IF = ( minsez [ i - 1 ] - minsez [ i ] ) / ( sectionsList . get ( i ) . getProgressiveDistance ( ) - sectionsList . get ( i - 1 ) . getProgressiveDistance ( ) ) ; else IF = ( minsez [ i - 2 ] - minsez [ i + 2 ] ) / ( sectionsList . get ( i + 2 ) . getProgressiveDistance ( ) - sectionsList . get ( i - 2 ) . getProgressiveDistance ( ) ) ; if ( IF <= 0 ) IF = ( minsez [ 0 ] - minsez [ imax - 1 ] ) / ( sectionsList . get ( imax - 1 ) . getProgressiveDistance ( ) - sectionsList . get ( 0 ) . getProgressiveDistance ( ) ) ; /* * the function is calculated for the extreme values looking for the minimun value * at the bank */ max_tir = maxsez [ i ] ; tir_dx = max_tir ; tir_sx = minsez [ i ] + MIN_TIR ; toll = 100 ; conta_cicli = 0 ; while ( toll >= TOLL && conta_cicli <= MAX_CICLI ) { level [ i ] = tir_dx ; idrgeo = wettedArea ( level , sectionsList ) ; val_dx = q - idrgeo [ i ] [ 0 ] * idrgeo [ i ] [ 4 ] * Math . pow ( IF , ( 0.5 ) ) * Math . pow ( idrgeo [ i ] [ 2 ] , ( 2.0 / 3.0 ) ) ; level [ i ] = tir_sx ; idrgeo = wettedArea ( level , sectionsList ) ; val_sx = q - idrgeo [ i ] [ 0 ] * idrgeo [ i ] [ 4 ] * Math . pow ( IF , ( 0.5 ) ) * Math . pow ( idrgeo [ i ] [ 2 ] , ( 2.0 / 3.0 ) ) ; if ( ( val_dx * val_sx ) > 0 ) { pm . errorMessage ( "Evaluation of the steady flow not possible for the section " + i + "solution not found." ) ; } tir_med = ( tir_dx + tir_sx ) / 2.0 ; level [ i ] = tir_med ; idrgeo = wettedArea ( level , sectionsList ) ; val_med = q - idrgeo [ i ] [ 0 ] * idrgeo [ i ] [ 4 ] * Math . pow ( IF , ( 0.5 ) ) * Math . pow ( idrgeo [ i ] [ 2 ] , ( 2.0 / 3.0 ) ) ; toll = Math . abs ( tir_dx - tir_sx ) ; if ( ( val_dx * val_med ) < 0 ) tir_sx = tir_med ; else tir_dx = tir_med ; conta_cicli = conta_cicli + 1 ; } }
public class SnorocketOWLReasoner { /** * Performs a full classification on the current ontology . */ private void classify ( ) { } }
// Classify monitor . taskStarted ( "Classifying" ) ; monitor . taskBusy ( ) ; reasoner = reasoner . classify ( ) ; monitor . taskEnded ( ) ; monitor . taskStarted ( "Building taxonomy" ) ; monitor . taskBusy ( ) ; taxonomy = reasoner . getClassifiedOntology ( ) ; monitor . taskEnded ( ) ;
public class ProtoLexer { /** * $ ANTLR start " HEX " */ public final void mHEX ( ) throws RecognitionException { } }
try { int _type = HEX ; int _channel = DEFAULT_TOKEN_CHANNEL ; // com / dyuproject / protostuff / parser / ProtoLexer . g : 252:5 : ( ( MINUS ) ? ' 0 ' ( ' x ' | ' X ' ) ( HEX _ DIGIT ) + ) // com / dyuproject / protostuff / parser / ProtoLexer . g : 252:9 : ( MINUS ) ? ' 0 ' ( ' x ' | ' X ' ) ( HEX _ DIGIT ) + { // com / dyuproject / protostuff / parser / ProtoLexer . g : 252:9 : ( MINUS ) ? int alt10 = 2 ; switch ( input . LA ( 1 ) ) { case '-' : { alt10 = 1 ; } break ; } switch ( alt10 ) { case 1 : // com / dyuproject / protostuff / parser / ProtoLexer . g : 252:9 : MINUS { mMINUS ( ) ; } break ; } match ( '0' ) ; if ( input . LA ( 1 ) == 'X' || input . LA ( 1 ) == 'x' ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } // com / dyuproject / protostuff / parser / ProtoLexer . g : 252:30 : ( HEX _ DIGIT ) + int cnt11 = 0 ; loop11 : do { int alt11 = 2 ; switch ( input . LA ( 1 ) ) { case '0' : case '1' : case '2' : case '3' : case '4' : case '5' : case '6' : case '7' : case '8' : case '9' : case 'A' : case 'B' : case 'C' : case 'D' : case 'E' : case 'F' : case 'a' : case 'b' : case 'c' : case 'd' : case 'e' : case 'f' : { alt11 = 1 ; } break ; } switch ( alt11 ) { case 1 : // com / dyuproject / protostuff / parser / ProtoLexer . g : 252:30 : HEX _ DIGIT { mHEX_DIGIT ( ) ; } break ; default : if ( cnt11 >= 1 ) break loop11 ; EarlyExitException eee = new EarlyExitException ( 11 , input ) ; throw eee ; } cnt11 ++ ; } while ( true ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class PersianCalendar { /** * / * [ deutsch ] * < p > Pr & uuml ; ft , ob die angegebenen Parameter ein wohldefiniertes Kalenderdatum beschreiben . < / p > * @ param yearOfEra the year of era to be checked * @ param month the month to be checked * @ param dayOfMonth the day of month to be checked * @ return { @ code true } if valid else { @ code false } * @ see # of ( int , int , int ) * @ since 3.34/4.29 */ public static boolean isValid ( int yearOfEra , int month , int dayOfMonth ) { } }
return CALSYS . isValid ( PersianEra . ANNO_PERSICO , yearOfEra , month , dayOfMonth ) ;
public class CudaZeroHandler { /** * This method returns sets of allocation tracking IDs for specific bucket * @ param bucketId * @ return */ @ Override public Set < Long > getHostTrackingPoints ( Long bucketId ) { } }
if ( ! zeroAllocations . containsKey ( bucketId ) ) { return new HashSet < > ( ) ; } return zeroAllocations . get ( bucketId ) . keySet ( ) ;
public class Driver { /** * < p > Try to make a database connection to the given URL . The driver should return " null " if it * realizes it is the wrong kind of driver to connect to the given URL . This will be common , as * when the JDBC driverManager is asked to connect to a given URL , it passes the URL to each * loaded driver in turn . < / p > * < p > The driver should raise an SQLException if it is the right driver to connect to the given URL , * but has trouble connecting to the database . < / p > * < p > The java . util . Properties argument can be used to pass arbitrary string tag / value pairs as * connection arguments . < / p > * < ul > * < li > user - ( required ) The user to connect as < / li > * < li > password - ( optional ) The password for the user < / li > * < li > ssl - ( optional ) Use SSL when connecting to the server < / li > * < li > readOnly - ( optional ) Set connection to read - only by default < / li > * < li > charSet - ( optional ) The character set to be used for converting to / from * the database to unicode . If multibyte is enabled on the server then the character set of the * database is used as the default , otherwise the jvm character encoding is used as the default . * This value is only used when connecting to a 7.2 or older server . < / li > * < li > loglevel - ( optional ) Enable logging of messages from the driver . The value is an integer * from 0 to 2 where : OFF = 0 , INFO = 1 , DEBUG = 2 The output is sent to * DriverManager . getPrintWriter ( ) if set , otherwise it is sent to System . out . < / li > * < li > compatible - ( optional ) This is used to toggle between different functionality * as it changes across different releases of the jdbc driver code . The values here are versions * of the jdbc client and not server versions . For example in 7.1 get / setBytes worked on * LargeObject values , in 7.2 these methods were changed to work on bytea values . This change in * functionality could be disabled by setting the compatible level to be " 7.1 " , in which case the * driver will revert to the 7.1 functionality . < / li > * < / ul > * < p > Normally , at least " user " and " password " properties should be included in the properties . For a * list of supported character encoding , see * http : / / java . sun . com / products / jdk / 1.2 / docs / guide / internat / encoding . doc . html Note that you will * probably want to have set up the Postgres database itself to use the same encoding , with the * { @ code - E < encoding > } argument to createdb . < / p > * < p > Our protocol takes the forms : < / p > * < pre > * jdbc : postgresql : / / host : port / database ? param1 = val1 & amp ; . . . * < / pre > * @ param url the URL of the database to connect to * @ param info a list of arbitrary tag / value pairs as connection arguments * @ return a connection to the URL or null if it isnt us * @ exception SQLException if a database access error occurs or the url is * { @ code null } * @ see java . sql . Driver # connect */ @ Override public Connection connect ( String url , Properties info ) throws SQLException { } }
if ( url == null ) { throw new SQLException ( "url is null" ) ; } // get defaults Properties defaults ; if ( ! url . startsWith ( "jdbc:postgresql:" ) ) { return null ; } try { defaults = getDefaultProperties ( ) ; } catch ( IOException ioe ) { throw new PSQLException ( GT . tr ( "Error loading default settings from driverconfig.properties" ) , PSQLState . UNEXPECTED_ERROR , ioe ) ; } // override defaults with provided properties Properties props = new Properties ( defaults ) ; if ( info != null ) { Set < String > e = info . stringPropertyNames ( ) ; for ( String propName : e ) { String propValue = info . getProperty ( propName ) ; if ( propValue == null ) { throw new PSQLException ( GT . tr ( "Properties for the driver contains a non-string value for the key " ) + propName , PSQLState . UNEXPECTED_ERROR ) ; } props . setProperty ( propName , propValue ) ; } } // parse URL and add more properties if ( ( props = parseURL ( url , props ) ) == null ) { return null ; } try { // Setup java . util . logging . Logger using connection properties . setupLoggerFromProperties ( props ) ; LOGGER . log ( Level . FINE , "Connecting with URL: {0}" , url ) ; // Enforce login timeout , if specified , by running the connection // attempt in a separate thread . If we hit the timeout without the // connection completing , we abandon the connection attempt in // the calling thread , but the separate thread will keep trying . // Eventually , the separate thread will either fail or complete // the connection ; at that point we clean up the connection if // we managed to establish one after all . See ConnectThread for // more details . long timeout = timeout ( props ) ; if ( timeout <= 0 ) { return makeConnection ( url , props ) ; } ConnectThread ct = new ConnectThread ( url , props ) ; Thread thread = new Thread ( ct , "PostgreSQL JDBC driver connection thread" ) ; thread . setDaemon ( true ) ; // Don ' t prevent the VM from shutting down thread . start ( ) ; return ct . getResult ( timeout ) ; } catch ( PSQLException ex1 ) { LOGGER . log ( Level . FINE , "Connection error: " , ex1 ) ; // re - throw the exception , otherwise it will be caught next , and a // org . postgresql . unusual error will be returned instead . throw ex1 ; } catch ( java . security . AccessControlException ace ) { throw new PSQLException ( GT . tr ( "Your security policy has prevented the connection from being attempted. You probably need to grant the connect java.net.SocketPermission to the database server host and port that you wish to connect to." ) , PSQLState . UNEXPECTED_ERROR , ace ) ; } catch ( Exception ex2 ) { LOGGER . log ( Level . FINE , "Unexpected connection error: " , ex2 ) ; throw new PSQLException ( GT . tr ( "Something unusual has occurred to cause the driver to fail. Please report this exception." ) , PSQLState . UNEXPECTED_ERROR , ex2 ) ; }
public class DropboxAuth { /** * Start the process of getting a Dropbox API access token for the user ' s Dropbox account . */ public void doStart ( HttpServletRequest request , HttpServletResponse response ) throws IOException , ServletException { } }
if ( ! common . checkPost ( request , response ) ) return ; User user = common . requireLoggedInUser ( request , response ) ; if ( user == null ) return ; // Start the authorization process with Dropbox . DbxWebAuth . Request authRequest = DbxWebAuth . newRequestBuilder ( ) // After we redirect the user to the Dropbox website for authorization , // Dropbox will redirect them back here . . withRedirectUri ( getRedirectUri ( request ) , getSessionStore ( request ) ) . build ( ) ; String authorizeUrl = getWebAuth ( request ) . authorize ( authRequest ) ; // Redirect the user to the Dropbox website so they can approve our application . // The Dropbox website will send them back to / dropbox - auth - finish when they ' re done . response . sendRedirect ( authorizeUrl ) ;
public class WIMXPathInterpreter { /** * Simple grammar * < LocationPath > : = " / / " < Noun > ( < Predicate > ) < Predicate > : = " [ " * < PredicateExpr > " ] " < RelationaExpr > : = < FAName > < comp _ operator > * ( < Literal > | < Number > ) / / add negative numbers support < comp _ operator > : = * ' < ' | ' > ' | ' < = ' | ' > = ' | ' = ' | ' ! = ' < Name > : = < Prefix > < ' : ' > < NCName > * < Prefix > : = < NCName > < NCName > : = ( < Letter | ' _ ' ) < NCNameChar > * < NCNameChar > : = ( < Letter > | < Digit > | ' . ' | ' _ ' | ' - ' ) < Letter > : = * [ a - zA - Z ] < Digit > : = [ 0-9 ] < Noun > : = < Name > */ final public XPathNode parse ( MetadataMapper aMetaDataMapper ) throws ParseException , AttributeNotSupportedException { } }
_metaDataMapper = aMetaDataMapper ; if ( _metaDataMapper != null ) { checkLocation = true ; } XPath ( ) ; { if ( true ) return node ; } throw new Error ( "Missing return statement in function" ) ;
public class Exceptional { /** * Applies custom operator on { @ code Exceptional } . * @ param < R > the type of the result * @ param function a transforming function * @ return a result of the transforming function * @ throws NullPointerException if { @ code function } is null * @ since 1.1.9 */ @ Nullable public < R > R custom ( @ NotNull Function < Exceptional < T > , R > function ) { } }
Objects . requireNonNull ( function ) ; return function . apply ( this ) ;
public class Shortcode { /** * Appends an attribute value with appropriate quoting . * @ param value The value . * @ param buf The buffer to append to . * @ return The input buffer . */ private StringBuilder appendAttributeValue ( final String value , final StringBuilder buf ) { } }
if ( value . contains ( "\"" ) ) { buf . append ( "\'" ) . append ( escapeAttribute ( value ) ) . append ( "\'" ) ; } else if ( value . contains ( " " ) || value . contains ( "\'" ) || value . contains ( "=" ) ) { buf . append ( "\"" ) . append ( escapeAttribute ( value ) ) . append ( "\"" ) ; } else { buf . append ( escapeAttribute ( value ) ) ; } return buf ;
public class CmsModuleUpdater { /** * Reads the module data from an import zip file . < p > * @ param cms the CMS context * @ param importFile the import file * @ param report the report to write to * @ return the module data * @ throws CmsException if something goes wrong */ public static CmsModuleImportData readModuleData ( CmsObject cms , String importFile , I_CmsReport report ) throws CmsException { } }
CmsModuleImportData result = new CmsModuleImportData ( ) ; CmsModule module = CmsModuleImportExportHandler . readModuleFromImport ( importFile ) ; cms = OpenCms . initCmsObject ( cms ) ; String importSite = module . getImportSite ( ) ; if ( ! CmsStringUtil . isEmptyOrWhitespaceOnly ( importSite ) ) { cms . getRequestContext ( ) . setSiteRoot ( importSite ) ; } else { String siteToSet = cms . getRequestContext ( ) . getSiteRoot ( ) ; if ( "" . equals ( siteToSet ) ) { siteToSet = "/" ; } module . setSite ( siteToSet ) ; } result . setModule ( module ) ; result . setCms ( cms ) ; CmsImportResourceDataReader importer = new CmsImportResourceDataReader ( result ) ; CmsImportParameters params = new CmsImportParameters ( importFile , "/" , false ) ; importer . importData ( cms , report , params ) ; // This only reads the module data into Java objects return result ;
public class DbPro { /** * Batch update records using the columns names of the first record in recordList . * Ensure all the records can use the same sql as the first record . * @ param tableName the table name * @ param primaryKey the primary key of the table , composite primary key is separated by comma character : " , " */ public int [ ] batchUpdate ( String tableName , String primaryKey , List < Record > recordList , int batchSize ) { } }
if ( recordList == null || recordList . size ( ) == 0 ) return new int [ 0 ] ; String [ ] pKeys = primaryKey . split ( "," ) ; config . dialect . trimPrimaryKeys ( pKeys ) ; Record record = recordList . get ( 0 ) ; Map < String , Object > cols = record . getColumns ( ) ; List < String > colNames = new ArrayList < String > ( ) ; // the same as the iterator in Dialect . forDbUpdate ( ) to ensure the order of the columns for ( Entry < String , Object > e : cols . entrySet ( ) ) { String col = e . getKey ( ) ; if ( config . dialect . isPrimaryKey ( col , pKeys ) == false ) colNames . add ( col ) ; } for ( String pKey : pKeys ) colNames . add ( pKey ) ; String columns = StrKit . join ( colNames . toArray ( new String [ colNames . size ( ) ] ) , "," ) ; Object [ ] idsNoUse = new Object [ pKeys . length ] ; StringBuilder sql = new StringBuilder ( ) ; List < Object > parasNoUse = new ArrayList < Object > ( ) ; config . dialect . forDbUpdate ( tableName , pKeys , idsNoUse , record , sql , parasNoUse ) ; return batch ( sql . toString ( ) , columns , recordList , batchSize ) ;
public class ModifyDBClusterSnapshotAttributeRequest { /** * A list of DB cluster snapshot attributes to add to the attribute specified by < code > AttributeName < / code > . * To authorize other AWS accounts to copy or restore a manual DB cluster snapshot , set this list to include one or * more AWS account IDs . To make the manual DB cluster snapshot restorable by any AWS account , set it to * < code > all < / code > . Do not add the < code > all < / code > value for any manual DB cluster snapshots that contain private * information that you don ' t want to be available to all AWS accounts . * @ param valuesToAdd * A list of DB cluster snapshot attributes to add to the attribute specified by < code > AttributeName < / code > * To authorize other AWS accounts to copy or restore a manual DB cluster snapshot , set this list to include * one or more AWS account IDs . To make the manual DB cluster snapshot restorable by any AWS account , set it * to < code > all < / code > . Do not add the < code > all < / code > value for any manual DB cluster snapshots that * contain private information that you don ' t want to be available to all AWS accounts . */ public void setValuesToAdd ( java . util . Collection < String > valuesToAdd ) { } }
if ( valuesToAdd == null ) { this . valuesToAdd = null ; return ; } this . valuesToAdd = new java . util . ArrayList < String > ( valuesToAdd ) ;
public class ClassDescriptorConstraints { /** * Makes sure that the class descriptor has a table attribute if it requires it ( i . e . it is * relevant for the repository descriptor ) . * @ param classDef The class descriptor * @ param checkLevel The current check level ( this constraint is checked in all levels ) */ private void ensureTableIfNecessary ( ClassDescriptorDef classDef , String checkLevel ) { } }
if ( classDef . getBooleanProperty ( PropertyHelper . OJB_PROPERTY_OJB_PERSISTENT , false ) ) { if ( ! classDef . hasProperty ( PropertyHelper . OJB_PROPERTY_TABLE ) ) { classDef . setProperty ( PropertyHelper . OJB_PROPERTY_TABLE , classDef . getDefaultTableName ( ) ) ; } }
public class CmsNewResourceTypeDialog { /** * Checks if form is valid . < p > * @ return true if form is valid */ private boolean isValid ( ) { } }
return m_typeID . isValid ( ) && m_typeShortName . isValid ( ) && m_parentFormatter . isValid ( ) && m_parentSchema . isValid ( ) && m_bundle . isValid ( ) && m_config . isValid ( ) && m_typeDescription . isValid ( ) && m_typeName . isValid ( ) && m_typeXPathName . isValid ( ) ;
public class OperaProfile { /** * Converts this instance to its JSON representation . * @ return the JSON representation of this profile * @ throws IOException if an I / O error occurs */ public JSONObject toJson ( ) throws IOException { } }
String base64 = new Zip ( ) . zip ( getDirectory ( ) ) ; Map < String , String > map = ImmutableMap . of ( "className" , this . getClass ( ) . getName ( ) , BASE64_JSON_KEY , base64 ) ; return new JSONObject ( map ) ;
public class TaskProxy { /** * Build a new remote session and initialize it . * NOTE : This is convenience method to create a task below the current APPLICATION ( not below this task ) * @ param properties to create the new remote task * @ return The remote Task . */ public RemoteTask createRemoteTask ( Map < String , Object > properties ) throws RemoteException { } }
// Note : This new task ' s parent is MY application ! BaseTransport transport = this . createProxyTransport ( CREATE_REMOTE_TASK ) ; // Don ' t use my method yet , since I don ' t have the returned ID transport . addParam ( PROPERTIES , properties ) ; String strID = ( String ) transport . sendMessageAndGetReply ( ) ; return new TaskProxy ( ( ApplicationProxy ) m_parentProxy , strID ) ; // Note the parent is MY PARENT not ME . ( just like the remote hierarchy )
public class FsInfoSector { /** * Creates an new { @ code FsInfoSector } where the specified * { @ code Fat32BootSector } indicates it should be . * @ param bs the boot sector specifying the FS info sector storage * @ return the FS info sector instance that was created * @ throws IOException on write error * @ see Fat32BootSector # getFsInfoSectorNr ( ) */ public static FsInfoSector create ( Fat32BootSector bs ) throws IOException { } }
final int offset = offset ( bs ) ; if ( offset == 0 ) throw new IOException ( "creating a FS info sector at offset 0 is strange" ) ; final FsInfoSector result = new FsInfoSector ( bs . getDevice ( ) , offset ( bs ) ) ; result . init ( ) ; result . write ( ) ; return result ;
public class JsonWriter { /** * Changes the writer to treat the next value as a string name . This is useful for map adapters so * that arbitrary type adapters can use { @ link # value } to write a name value . */ final void promoteValueToName ( ) throws IOException { } }
int context = peekScope ( ) ; if ( context != NONEMPTY_OBJECT && context != EMPTY_OBJECT ) { throw new IllegalStateException ( "Nesting problem." ) ; } promoteValueToName = true ;
public class LockFile { /** * Retrieves the interval , in milliseconds , that < tt > pollHeartbeat < / tt > * waits between failed invocations of < tt > checkHeartbeat < / tt > . * The value is obtained in the following manner : < p > * < ol > * < li > interval is assigned < tt > 10 + ( HEARTBEAT _ INTERVAL _ PADDED * getPollHeartbeatRetries ( ) ) < / tt > * < li > interval is assigned < tt > Long . getLong ( POLL _ INTERVAL _ PROPERTY , * interval ) < / tt > , inside a try - catch block , to silently ignore any security * exception . * < li > If interval is less than or equal to zero ( 0 ) , interval is reassigned * < tt > 10 + ( HEARTBEAT _ INTERVAL _ PADDED / getPollHeartbeatRetries ( ) ) < / tt > * < / ol > * @ return the interval , in milliseconds , that < tt > pollHeartbeat < / tt > * waits between failed invocations of < tt > checkHeartbeat < / tt > */ public long getPollHeartbeatInterval ( ) { } }
int retries = getPollHeartbeatRetries ( ) ; long interval = 10 + ( HEARTBEAT_INTERVAL_PADDED / retries ) ; try { interval = Long . getLong ( POLL_INTERVAL_PROPERTY , interval ) . longValue ( ) ; } catch ( Exception e ) { } if ( interval <= 0 ) { interval = 10 + ( HEARTBEAT_INTERVAL_PADDED / retries ) ; } return interval ;
public class StylesheetHandler { /** * Flush the characters buffer . * @ throws org . xml . sax . SAXException */ private void flushCharacters ( ) throws org . xml . sax . SAXException { } }
XSLTElementProcessor elemProcessor = getCurrentProcessor ( ) ; if ( null != elemProcessor ) elemProcessor . startNonText ( this ) ;
public class RemoteConsumerReceiver { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . SIMPDeliveryReceiverControllable # getStreamID ( ) */ public SIBUuid12 getStreamID ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getStreamID" ) ; SIBUuid12 returnValue = aiStream . getStreamId ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getStreamID" , returnValue ) ; return returnValue ;
public class AliasStrings { /** * Replaces a string literal with a reference to the string ' s alias variable . */ private void replaceStringWithAliasName ( StringOccurrence occurrence , String name , StringInfo info ) { } }
Node nameNode = IR . name ( name ) ; occurrence . parent . replaceChild ( occurrence . node , nameNode ) ; info . isAliased = true ; compiler . reportChangeToEnclosingScope ( nameNode ) ;
public class PrintStmt { /** * Gets the esjp select . * @ param _ instances the _ instances * @ return the esjp select * @ throws Exception the exception */ private Map < String , IEsjpSelect > getEsjpSelect ( final List < Instance > _instances ) throws Exception { } }
final Map < String , IEsjpSelect > ret = new HashMap < > ( ) ; if ( ! _instances . isEmpty ( ) ) { for ( final Entry < String , AbstractSelect > entry : getAlias2Selects ( ) . entrySet ( ) ) { if ( entry . getValue ( ) instanceof ExecSelect ) { final Class < ? > clazz = Class . forName ( entry . getValue ( ) . getSelect ( ) , false , EFapsClassLoader . getInstance ( ) ) ; final IEsjpSelect esjp = ( IEsjpSelect ) clazz . newInstance ( ) ; final List < String > parameters = ( ( ExecSelect ) entry . getValue ( ) ) . getParameters ( ) ; if ( parameters . isEmpty ( ) ) { esjp . initialize ( _instances ) ; } else { esjp . initialize ( _instances , parameters . toArray ( new String [ parameters . size ( ) ] ) ) ; } ret . put ( entry . getKey ( ) , esjp ) ; } } } return ret ;
public class HeatChart { /** * Sets the x - values which are plotted along the x - axis . The x - values are calculated based upon * the indexes of the z - values array : * < pre > * { @ code * < pre > * x - value = x - offset + ( column - index * x - interval ) * < / pre > * < / pre > * The x - interval defines the gap between each x - value and the x - offset is applied to each value * to offset them all from zero . * Alternatively the x - values can be set more directly with the < code > setXValues ( Object [ ] ) < / code > * method . * @ param xOffset an offset value to be applied to the index of each z - value element . * @ param xInterval an interval that will separate each x - value item . */ public void setXValues ( double xOffset , double xInterval ) { } }
// Update the x - values according to the offset and interval . xValues = new Object [ zValues [ 0 ] . length ] ; for ( int i = 0 ; i < zValues [ 0 ] . length ; i ++ ) { xValues [ i ] = xOffset + ( i * xInterval ) ; }
public class PerformanceMonitorBeanDefinitionDecorator { /** * Gets the value of an attribute and returns true if it is set to " true " * ( case - insensitive ) , otherwise returns false . * @ param source An Attribute node from the spring configuration * @ return boolean */ private boolean getBooleanAttributeValue ( Node source ) { } }
Attr attribute = ( Attr ) source ; String value = attribute . getValue ( ) ; return "true" . equalsIgnoreCase ( value ) ;
public class AnnotationParser { /** * Handle a file with an annotation handler . * @ param file * @ param ah * @ throws java . lang . Exception */ public static void handle ( File srcFile , AnnotationHandler ah ) throws Exception { } }
FileInputStream fis = new FileInputStream ( srcFile ) ; FileChannel fc = fis . getChannel ( ) ; // Get a CharBuffer from the source file ByteBuffer bb = fc . map ( FileChannel . MapMode . READ_ONLY , 0 , fc . size ( ) ) ; CharsetDecoder cd = Charset . forName ( "8859_1" ) . newDecoder ( ) ; CharBuffer cb = cd . decode ( bb ) ; // handle the content . ah . start ( cb . toString ( ) ) ; handle ( cb . toString ( ) , ah ) ; ah . done ( ) ; fis . close ( ) ;
public class BoundingBox { /** * This takes the currently defined values found inside this instance and converts it to a GeoJson * string . * @ return a JSON string which represents this Bounding box * @ since 3.0.0 */ public final String toJson ( ) { } }
Gson gson = new GsonBuilder ( ) . registerTypeAdapter ( BoundingBox . class , new BoundingBoxTypeAdapter ( ) ) . create ( ) ; return gson . toJson ( this , BoundingBox . class ) ;
public class CATAsynchConsumer { /** * This method will send the message to our peer in chunks as given to us by MFP . This is much * easier on the Java memory manager as it doesn ' t require the allocation of an enormous byte * array . * @ param sibMessage * @ return Returns true if the message was sent . * @ throws MessageCopyFailedException * @ throws IncorrectMessageTypeException * @ throws MessageEncodeFailedException * @ throws UnsupportedEncodingException */ private boolean sendChunkedMessage ( SIBusMessage sibMessage , boolean lastMsg , Integer priority ) throws MessageEncodeFailedException , IncorrectMessageTypeException , MessageCopyFailedException , UnsupportedEncodingException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "sendChunkedMessage" , new Object [ ] { sibMessage , lastMsg , priority } ) ; // Flag to indicate a Comms error so that we stop sending messages boolean ok = true ; int jfapPriority = JFapChannelConstants . getJFAPPriority ( priority ) ; int msgLen = 0 ; // First of all we must encode the message ourselves CommsServerByteBuffer buffer = poolManager . allocate ( ) ; ConversationState convState = ( ConversationState ) getConversation ( ) . getAttachment ( ) ; try { List < DataSlice > messageSlices = buffer . encodeFast ( ( JsMessage ) sibMessage , convState . getCommsConnection ( ) , getConversation ( ) ) ; // Do a check on the size of the message . If it is less than our threshold , forget the // chunking and simply send the message as one for ( DataSlice slice : messageSlices ) msgLen += slice . getLength ( ) ; if ( msgLen < CommsConstants . MINIMUM_MESSAGE_SIZE_FOR_CHUNKING ) { // The message is a tiddler , send it in one if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Message is smaller than " + CommsConstants . MINIMUM_MESSAGE_SIZE_FOR_CHUNKING ) ; sendEntireMessage ( sibMessage , messageSlices , lastMsg , priority ) ; } else { // Now we have the slices , send each one in turn . Each slice contains all the header // information so that the client code knows what to do with the message for ( int x = 0 ; x < messageSlices . size ( ) ; x ++ ) { DataSlice slice = messageSlices . get ( x ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Sending slice:" , slice ) ; boolean first = ( x == 0 ) ; boolean last = ( x == ( messageSlices . size ( ) - 1 ) ) ; byte flags = 0 ; // Work out the flags to send if ( first ) flags |= CommsConstants . CHUNKED_MESSAGE_FIRST ; if ( last ) flags |= CommsConstants . CHUNKED_MESSAGE_LAST ; else if ( ! first ) flags |= CommsConstants . CHUNKED_MESSAGE_MIDDLE ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Flags: " + flags ) ; if ( ! first ) { // This isn ' t the first slice , grab a fresh buffer buffer = poolManager . allocate ( ) ; } // Set a flag to indicate the last in batch short msgFlags = CommsConstants . ASYNC_START_OR_MID_BATCH ; if ( lastMsg ) msgFlags |= CommsConstants . ASYNC_LAST_IN_BATCH ; // Now add all the header information buffer . putShort ( convState . getConnectionObjectId ( ) ) ; buffer . putShort ( mainConsumer . getClientSessionId ( ) ) ; buffer . putShort ( msgFlags ) ; buffer . putShort ( mainConsumer . getMessageBatchNumber ( ) ) ; // BIT16 Message batch buffer . put ( flags ) ; buffer . putDataSlice ( slice ) ; getConversation ( ) . send ( buffer , JFapChannelConstants . SEG_CHUNKED_ASYNC_MESSAGE , 0 , // No request number jfapPriority , false , ThrottlingPolicy . BLOCK_THREAD , null ) ; } } } catch ( SIException e ) { FFDCFilter . processException ( e , CLASS_NAME + ".sendChunkedMessage" , CommsConstants . CATASYNCHCONSUMER_SENDCHUNKEDMESS_01 , this ) ; ok = false ; SibTr . error ( tc , "COMMUNICATION_ERROR_SICO2017" , e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "sendChunkedMessage" , ok ) ; return ok ;
public class HubState { /** * Starts the hub . */ @ CommandArgument public void start ( ) { } }
System . out . println ( "Starting hub" ) ; try { Class . forName ( "org.apache.derby.jdbc.EmbeddedDriver" ) ; EmbeddedDataSource ds = new EmbeddedDataSource ( ) ; ds . setConnectionAttributes ( "create=true" ) ; ds . setDatabaseName ( "classes" ) ; JobHub . prepareDataSource ( ds ) ; logger . info ( "Initializing service" ) ; jobHub = new JobHub ( ds ) ; AuthenticationServer authServer = new AuthenticationServer ( jobHub , JdcpUtil . DEFAULT_PORT ) ; logger . info ( "Binding service" ) ; Registry registry = getRegistry ( ) ; registry . bind ( "AuthenticationService" , authServer ) ; logger . info ( "Hub ready" ) ; System . out . println ( "Hub started" ) ; } catch ( Exception e ) { System . err . println ( "Failed to start hub" ) ; logger . error ( "Failed to start hub" , e ) ; }
public class ServerOperations { /** * START SNIPPET : searchParamAdvanced */ @ Operation ( name = "$find-matches" , idempotent = true ) public Parameters findMatchesAdvanced ( @ OperationParam ( name = "dateRange" ) DateRangeParam theDate , @ OperationParam ( name = "name" ) List < StringParam > theName , @ OperationParam ( name = "code" ) TokenAndListParam theEnd ) { } }
Parameters retVal = new Parameters ( ) ; // Populate bundle with matching resources return retVal ;
public class AppCacheLinker { /** * Standard linker that also outputs a manifest file based on the public * resources */ @ Override public ArtifactSet link ( TreeLogger logger , LinkerContext context , ArtifactSet artifacts ) throws UnableToCompleteException { } }
ArtifactSet toReturn = super . link ( logger , context , artifacts ) ; // Create the general cache - manifest resource for the landing page : toReturn . add ( emitLandingPageCacheManifest ( context , logger , toReturn , staticCachedFiles ( ) ) ) ; return toReturn ;
public class SimpleExpression { /** * Create a { @ code this not in right } expression * @ param right rhs of the comparison * @ return this not in right */ public final BooleanExpression notIn ( Expression < ? extends T > ... right ) { } }
return Expressions . booleanOperation ( Ops . NOT_IN , mixin , Expressions . list ( right ) ) ;
public class KryoInstantiator { /** * Use this to set a specific classloader */ public KryoInstantiator setClassLoader ( final ClassLoader cl ) { } }
return new KryoInstantiator ( ) { public Kryo newKryo ( ) { Kryo k = KryoInstantiator . this . newKryo ( ) ; k . setClassLoader ( cl ) ; return k ; } } ;
public class BitmapAdapter { @ Override public void prepareSlide ( int position ) { } }
BitmapCache bc = cachedBitmaps . get ( position ) ; if ( bc != null && bc . bitmap != null && bc . bitmap . get ( ) != null ) { bc . bitmap . get ( ) . recycle ( ) ; bc . bitmap . clear ( ) ; } bc = new BitmapCache ( ) ; cachedBitmaps . put ( position , bc ) ; loadBitmap ( position ) ;
public class Matrix4x3d { /** * Apply a projection transformation to this matrix that projects onto the plane specified via the general plane equation * < code > x * a + y * b + z * c + d = 0 < / code > as if casting a shadow from a given light position / direction < code > light < / code > . * If < code > light . w < / code > is < code > 0.0 < / code > the light is being treated as a directional light ; if it is < code > 1.0 < / code > it is a point light . * If < code > M < / code > is < code > this < / code > matrix and < code > S < / code > the shadow matrix , * then the new matrix will be < code > M * S < / code > . So when transforming a * vector < code > v < / code > with the new matrix by using < code > M * S * v < / code > , the * reflection will be applied first ! * Reference : < a href = " ftp : / / ftp . sgi . com / opengl / contrib / blythe / advanced99 / notes / node192 . html " > ftp . sgi . com < / a > * @ param light * the light ' s vector * @ param a * the x factor in the plane equation * @ param b * the y factor in the plane equation * @ param c * the z factor in the plane equation * @ param d * the constant in the plane equation * @ return this */ public Matrix4x3d shadow ( Vector4dc light , double a , double b , double c , double d ) { } }
return shadow ( light . x ( ) , light . y ( ) , light . z ( ) , light . w ( ) , a , b , c , d , this ) ;
public class DistanceMatrixPrinter { /** * Print a matrix representation of the distances between each pair of * categories of the given study . */ public void print ( final PrintStream out , final ICodingAnnotationStudy study , final IDistanceFunction distanceFunction ) { } }
doPrint ( out , study . getCategories ( ) , study , distanceFunction ) ;
public class ListRegexPatternSetsResult { /** * An array of < a > RegexPatternSetSummary < / a > objects . * @ param regexPatternSets * An array of < a > RegexPatternSetSummary < / a > objects . */ public void setRegexPatternSets ( java . util . Collection < RegexPatternSetSummary > regexPatternSets ) { } }
if ( regexPatternSets == null ) { this . regexPatternSets = null ; return ; } this . regexPatternSets = new java . util . ArrayList < RegexPatternSetSummary > ( regexPatternSets ) ;
public class ParquetGroupConverter { /** * check if a parquet type is a valid ' map ' type */ private static boolean isLogicalMapType ( Type groupType ) { } }
OriginalType ot = groupType . getOriginalType ( ) ; if ( groupType . isPrimitive ( ) || ot == null || groupType . isRepetition ( Type . Repetition . REPEATED ) ) { return false ; } if ( groupType . getOriginalType ( ) . equals ( OriginalType . MAP ) || groupType . getOriginalType ( ) . equals ( OriginalType . MAP_KEY_VALUE ) ) { GroupType myMapType = groupType . asGroupType ( ) ; if ( myMapType . getFieldCount ( ) != 1 || myMapType . getFields ( ) . get ( 0 ) . isPrimitive ( ) ) { return false ; } GroupType mapItemType = myMapType . getFields ( ) . get ( 0 ) . asGroupType ( ) ; return mapItemType . isRepetition ( Type . Repetition . REPEATED ) && mapItemType . getFieldCount ( ) == 2 && mapItemType . getFields ( ) . get ( 0 ) . getName ( ) . equalsIgnoreCase ( "key" ) && mapItemType . getFields ( ) . get ( 0 ) . isPrimitive ( ) && mapItemType . getFields ( ) . get ( 1 ) . getName ( ) . equalsIgnoreCase ( "value" ) ; } return false ;
public class NumberUtil { /** * 将给定范围内的整数添加到已有集合中 * @ param start 开始 ( 包含 ) * @ param stop 结束 ( 包含 ) * @ param step 步进 * @ param values 集合 * @ return 集合 */ public static Collection < Integer > appendRange ( int start , int stop , int step , Collection < Integer > values ) { } }
if ( start < stop ) { step = Math . abs ( step ) ; } else if ( start > stop ) { step = - Math . abs ( step ) ; } else { // start = = end values . add ( start ) ; return values ; } for ( int i = start ; ( step > 0 ) ? i <= stop : i >= stop ; i += step ) { values . add ( i ) ; } return values ;
public class JavascriptEngine { /** * Evaluates the script * @ param scriptName * the script name * @ param reader * the script reader * @ return the result */ public Object evaluate ( String scriptName , Reader reader ) { } }
try { scriptEngine . put ( ScriptEngine . FILENAME , scriptName ) ; return scriptEngine . eval ( reader ) ; } catch ( ScriptException e ) { throw new BundlingProcessException ( "Error while evaluating script : " + scriptName , e ) ; }
public class StreamManager { /** * To take into account overhead due to underlying protocols ( e . g . TCP / IP ) * @ param payloadPercentage a ] 0 , 100 ] value . where 100 means that the required * downstream / upstream bandwidth will be full used for * sending payload . * Default value is 95 % . * The default value is applied if an out of boundaries value is passed in . */ public void setPayloadPercentage ( int payloadPercentage ) { } }
if ( ! ( payloadPercentage > 0 && payloadPercentage <= 100 ) ) { // if an invalid percentage is given payloadPercentage = 95 ; } this . actualPayloadPercentage = ( double ) payloadPercentage / 100 ; setMaxBps ( this . downStream , this . downStream . maxBps ) ; setMaxBps ( this . upStream , this . upStream . maxBps ) ;
public class NestedClassWriterImpl { /** * { @ inheritDoc } */ @ Override protected void addInheritedSummaryLink ( TypeElement typeElement , Element member , Content linksTree ) { } }
linksTree . addContent ( writer . getLink ( new LinkInfoImpl ( configuration , LinkInfoImpl . Kind . MEMBER , ( TypeElement ) member ) ) ) ;
public class VoiceApi { /** * Switch to listen in * Switch to the listen in monitoring mode . When listen in is enabled and the agent receives a call , the supervisor is able to listen to the agent and the customer , but they can & # 39 ; t hear the supervisor . * @ param id The connection ID of the call being monitored . ( required ) * @ param monitoringScopeData ( optional ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > switchToListenInWithHttpInfo ( String id , MonitoringScopeData monitoringScopeData ) throws ApiException { } }
com . squareup . okhttp . Call call = switchToListenInValidateBeforeCall ( id , monitoringScopeData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class SeaGlassGraphicsUtils { /** * Paints an icon and text . This will render the text as html , if necessary , * and offset the location by the insets of the component . * @ param ss * SynthContext * @ param g * Graphics to render string and icon into * @ param text * Text to layout * @ param icon * Icon to layout * @ param hAlign * horizontal alignment * @ param vAlign * vertical alignment * @ param hTextPosition * horizontal text position * @ param vTextPosition * vertical text position * @ param iconTextGap * gap between icon and text * @ param mnemonicIndex * Index into text to render the mnemonic at , - 1 indicates no * mnemonic . * @ param textOffset * Amount to offset the text when painting */ public void paintText ( SynthContext ss , Graphics g , String text , Icon icon , int hAlign , int vAlign , int hTextPosition , int vTextPosition , int iconTextGap , int mnemonicIndex , int textOffset ) { } }
if ( ( icon == null ) && ( text == null ) ) { return ; } Graphics2D g2d = ( Graphics2D ) g . create ( ) ; g2d . setRenderingHint ( RenderingHints . KEY_TEXT_ANTIALIASING , RenderingHints . VALUE_TEXT_ANTIALIAS_ON ) ; JComponent c = ss . getComponent ( ) ; FontMetrics fm = SwingUtilities2 . getFontMetrics ( c , g2d ) ; Insets insets = SeaGlassLookAndFeel . getPaintingInsets ( ss , paintInsets ) ; paintViewR . x = insets . left ; paintViewR . y = insets . top ; paintViewR . width = c . getWidth ( ) - ( insets . left + insets . right ) ; paintViewR . height = c . getHeight ( ) - ( insets . top + insets . bottom ) ; paintIconR . x = paintIconR . y = paintIconR . width = paintIconR . height = 0 ; paintTextR . x = paintTextR . y = paintTextR . width = paintTextR . height = 0 ; String clippedText = layoutText ( ss , fm , text , icon , hAlign , vAlign , hTextPosition , vTextPosition , paintViewR , paintIconR , paintTextR , iconTextGap ) ; if ( icon != null ) { Color color = g2d . getColor ( ) ; paintIconR . x += textOffset ; paintIconR . y += textOffset ; SeaGlassIcon . paintIcon ( icon , ss , g2d , paintIconR . x , paintIconR . y , paintIconR . width , paintIconR . height ) ; g2d . setColor ( color ) ; } if ( text != null ) { View v = ( View ) c . getClientProperty ( BasicHTML . propertyKey ) ; if ( v != null ) { v . paint ( g2d , paintTextR ) ; } else { paintTextR . x += textOffset ; paintTextR . y += textOffset ; paintText ( ss , g2d , clippedText , paintTextR , mnemonicIndex ) ; } }
public class XmlPackageReader { /** * Read a < code > RuleSet < / code > from a < code > Reader < / code > . * @ param reader * The reader containing the rule - set . * @ return The rule - set . */ public PackageDescr read ( final Reader reader ) throws SAXException , IOException { } }
this . packageDescr = ( PackageDescr ) this . parser . read ( reader ) ; return this . packageDescr ;
public class StateHandler { /** * Check the current state . * @ param state The state to check . * @ return < code > true < / code > if it is this state , < code > false < / code > else . */ public boolean isState ( Class < ? extends State > state ) { } }
if ( current != null ) { return current . getClass ( ) . equals ( state ) ; } return false ;
public class AssertSoapFault { /** * Constructs the control soap fault holding all expected fault information * like faultCode , faultString and faultDetail . * @ return the constructed SoapFault instance . */ private SoapFault constructControlFault ( TestContext context ) { } }
SoapFault controlFault = new SoapFault ( ) ; if ( StringUtils . hasText ( faultActor ) ) { controlFault . faultActor ( context . replaceDynamicContentInString ( faultActor ) ) ; } controlFault . faultCode ( context . replaceDynamicContentInString ( faultCode ) ) ; controlFault . faultString ( context . replaceDynamicContentInString ( faultString ) ) ; for ( String faultDetail : faultDetails ) { controlFault . addFaultDetail ( context . replaceDynamicContentInString ( faultDetail ) ) ; } try { for ( String faultDetailPath : faultDetailResourcePaths ) { String resourcePath = context . replaceDynamicContentInString ( faultDetailPath ) ; controlFault . addFaultDetail ( context . replaceDynamicContentInString ( FileUtils . readToString ( FileUtils . getFileResource ( resourcePath , context ) , FileUtils . getCharset ( resourcePath ) ) ) ) ; } } catch ( IOException e ) { throw new CitrusRuntimeException ( "Failed to create SOAP fault detail from file resource" , e ) ; } return controlFault ;
public class DOType { /** * Answer a list of all field names declared by this type and all it ' s super types . * @ return */ public List < String > getFieldNames ( ) { } }
List < String > ret = new ArrayList < String > ( ) ; DOType typ = this ; while ( typ != null ) { ret . addAll ( typ . getDeclaredFieldNames ( ) ) ; typ = typ . getSuperType ( ) ; } return ret ;
public class CnvBnRsToEntity { /** * < p > Convert RS to entity ( fill only ID ) . < / p > * @ param pAddParam additional params , e . g . entity class UserRoleTomcat * to reveal derived columns for its composite ID , or field Enum class * to reveal Enum value by index . * @ param pFrom from a bean * @ param pName by a name * @ param pFieldClass field class * @ param pEntityClass entity class * @ param pCurrentLevel Current Levels * @ param pDeepLevel Deep Levels * @ param pForeignFieldNms Foreign Fields Names * @ return pTo to a bean * @ throws Exception - an exception */ public final Object convertOnlyId ( final Map < String , Object > pAddParam , final IRecordSet < RS > pFrom , final String pName , final Class pFieldClass , final Class pEntityClass , final List < Integer > pCurrentLevel , final List < Integer > pDeepLevel , final List < String > pForeignFieldNms ) throws Exception { } }
// foreign entity ' s ID map Map < String , Object > idNmValMap = new HashMap < String , Object > ( ) ; TableSql tableSql = this . tablesMap . get ( pFieldClass . getSimpleName ( ) ) ; for ( String idFldNm : tableSql . getIdColumnsNames ( ) ) { Object idVal ; Field rapiFld = this . fieldsRapiHolder . getFor ( pFieldClass , idFldNm ) ; String columnAlias ; if ( pDeepLevel . get ( pDeepLevel . size ( ) - 1 ) == 1 || pCurrentLevel . get ( pCurrentLevel . size ( ) - 1 ) > pDeepLevel . get ( pDeepLevel . size ( ) - 1 ) ) { // there is only ID of foreign entity in SELECT query : // e . g . PERSISTABLELINE // top level1 // PERSISTABLELINE . ITSPRODUCT as ITSPRODUCT // top level2 // PERSISTABLEHEAD . ITSDEPARTMENT as PERSISTABLEHEADITSDEPARTMENT if ( pCurrentLevel . get ( 0 ) == 2 ) { if ( tableSql . getIdColumnsNames ( ) . length > 1 ) { // composite derived names : columnAlias = idFldNm . toUpperCase ( ) ; } else { columnAlias = pName . toUpperCase ( ) ; } } else { int pos ; if ( pCurrentLevel . get ( pCurrentLevel . size ( ) - 1 ) > pDeepLevel . get ( pDeepLevel . size ( ) - 1 ) // deepLevel2 , currLevel3 // overridden on level > 2 || pDeepLevel . get ( pDeepLevel . size ( ) - 1 ) == 1 ) { // only ID - previous alias pos = 2 ; } else { pos = 1 ; } String currForeignFieldNm = pForeignFieldNms . get ( pForeignFieldNms . size ( ) - pos ) ; String tableAlias = currForeignFieldNm . toUpperCase ( ) ; if ( tableSql . getIdColumnsNames ( ) . length > 1 ) { // composite derived names : columnAlias = tableAlias + idFldNm . toUpperCase ( ) ; } else { columnAlias = tableAlias + pName . toUpperCase ( ) ; } } } else { // e . g . PREPAYMENTTO - ACCCASH . ITSID as ACCCASHITSID columnAlias = pName . toUpperCase ( ) + idFldNm . toUpperCase ( ) ; } if ( tableSql . getFieldsMap ( ) . get ( idFldNm ) . getForeignEntity ( ) != null ) { // foreign entity again , e . g . UserTomcat in // UserRoleTomcatPriority . userRoleTomcat . itsUser TableSql tableSqlFr = this . tablesMap . get ( rapiFld . getType ( ) . getSimpleName ( ) ) ; if ( tableSqlFr . getIdColumnsNames ( ) . length > 1 || tableSqlFr . getFieldsMap ( ) . get ( tableSqlFr . getIdColumnsNames ( ) [ 0 ] ) . getForeignEntity ( ) != null ) { String msg = "There is no rule to fill foreign2 ID - " + rapiFld . getType ( ) ; throw new ExceptionWithCode ( ExceptionWithCode . NOT_YET_IMPLEMENTED , msg ) ; } Field rapiFldFr = this . fieldsRapiHolder . getFor ( rapiFld . getType ( ) , tableSqlFr . getIdFieldName ( ) ) ; if ( ! ( pDeepLevel . get ( pDeepLevel . size ( ) - 1 ) == 1 || pCurrentLevel . get ( pCurrentLevel . size ( ) - 1 ) >= pDeepLevel . get ( pDeepLevel . size ( ) - 1 ) ) ) { // e . g . CUOrSeGdLn . sel . seller columnAlias = idFldNm . toUpperCase ( ) + tableSqlFr . getIdColumnsNames ( ) [ 0 ] . toUpperCase ( ) ; } Object idEntFr = getSimpleId ( rapiFldFr . getType ( ) , pFrom , columnAlias ) ; if ( idEntFr != null ) { @ SuppressWarnings ( "unchecked" ) IFactorySimple < Object > facEnFr = ( IFactorySimple < Object > ) this . entitiesFactoriesFatory . lazyGet ( pAddParam , rapiFld . getType ( ) ) ; idVal = facEnFr . create ( pAddParam ) ; @ SuppressWarnings ( "unchecked" ) IFillerObjectFields < Object > filler = ( IFillerObjectFields < Object > ) fillersFieldsFactory . lazyGet ( pAddParam , rapiFld . getType ( ) ) ; filler . fill ( pAddParam , idVal , idEntFr , tableSqlFr . getIdColumnsNames ( ) [ 0 ] ) ; } else { idVal = null ; } } else { // simple ID idVal = getSimpleId ( rapiFld . getType ( ) , pFrom , columnAlias ) ; } if ( idVal != null ) { idNmValMap . put ( idFldNm , idVal ) ; } } if ( idNmValMap . size ( ) > 0 ) { @ SuppressWarnings ( "unchecked" ) IFactorySimple < Object > facEn = ( IFactorySimple < Object > ) this . entitiesFactoriesFatory . lazyGet ( pAddParam , pFieldClass ) ; Object entity = facEn . create ( pAddParam ) ; @ SuppressWarnings ( "unchecked" ) IFillerObjectFields < Object > filler = ( IFillerObjectFields < Object > ) fillersFieldsFactory . lazyGet ( pAddParam , pFieldClass ) ; // e . g . UserRoleTomcatPriority . userRoleTomcat : // { itsRole - String = ' role1 ' , itsUser - UserTomcat { itsUser = ' admin ' } } for ( Map . Entry < String , Object > entry : idNmValMap . entrySet ( ) ) { filler . fill ( pAddParam , entity , entry . getValue ( ) , entry . getKey ( ) ) ; } return entity ; } return null ;
public class EncryptionAlgorithmOptions { /** * The set of accepted encryption algorithms that are allowed in an AWS Signer job . * @ param allowedValues * The set of accepted encryption algorithms that are allowed in an AWS Signer job . * @ return Returns a reference to this object so that method calls can be chained together . * @ see EncryptionAlgorithm */ public EncryptionAlgorithmOptions withAllowedValues ( EncryptionAlgorithm ... allowedValues ) { } }
java . util . ArrayList < String > allowedValuesCopy = new java . util . ArrayList < String > ( allowedValues . length ) ; for ( EncryptionAlgorithm value : allowedValues ) { allowedValuesCopy . add ( value . toString ( ) ) ; } if ( getAllowedValues ( ) == null ) { setAllowedValues ( allowedValuesCopy ) ; } else { getAllowedValues ( ) . addAll ( allowedValuesCopy ) ; } return this ;
public class ClassWriter { /** * Write method parameter annotations ; * return number of attributes written . */ int writeParameterAttrs ( MethodSymbol m ) { } }
boolean hasVisible = false ; boolean hasInvisible = false ; if ( m . params != null ) { for ( VarSymbol s : m . params ) { for ( Attribute . Compound a : s . getRawAttributes ( ) ) { switch ( types . getRetention ( a ) ) { case SOURCE : break ; case CLASS : hasInvisible = true ; break ; case RUNTIME : hasVisible = true ; break ; default : ; // / * fail soft * / throw new AssertionError ( vis ) ; } } } } int attrCount = 0 ; if ( hasVisible ) { int attrIndex = writeAttr ( names . RuntimeVisibleParameterAnnotations ) ; databuf . appendByte ( m . params . length ( ) ) ; for ( VarSymbol s : m . params ) { ListBuffer < Attribute . Compound > buf = new ListBuffer < Attribute . Compound > ( ) ; for ( Attribute . Compound a : s . getRawAttributes ( ) ) if ( types . getRetention ( a ) == RetentionPolicy . RUNTIME ) buf . append ( a ) ; databuf . appendChar ( buf . length ( ) ) ; for ( Attribute . Compound a : buf ) writeCompoundAttribute ( a ) ; } endAttr ( attrIndex ) ; attrCount ++ ; } if ( hasInvisible ) { int attrIndex = writeAttr ( names . RuntimeInvisibleParameterAnnotations ) ; databuf . appendByte ( m . params . length ( ) ) ; for ( VarSymbol s : m . params ) { ListBuffer < Attribute . Compound > buf = new ListBuffer < Attribute . Compound > ( ) ; for ( Attribute . Compound a : s . getRawAttributes ( ) ) if ( types . getRetention ( a ) == RetentionPolicy . CLASS ) buf . append ( a ) ; databuf . appendChar ( buf . length ( ) ) ; for ( Attribute . Compound a : buf ) writeCompoundAttribute ( a ) ; } endAttr ( attrIndex ) ; attrCount ++ ; } return attrCount ;
public class FileUtilities { /** * Write a list of lines to a file . * @ param lines the list of lines to write . * @ param filePath the path to the file to write to . * @ throws IOException */ public static void writeFile ( List < String > lines , String filePath ) throws IOException { } }
writeFile ( lines , new File ( filePath ) ) ;
public class Database { /** * Creates an attachment from the specified InputStream and a new document with a generated * document ID . * < P > Example usage : < / P > * < pre > * { @ code * byte [ ] bytesToDB = " binary data " . getBytes ( ) ; * ByteArrayInputStream bytesIn = new ByteArrayInputStream ( bytesToDB ) ; * Response response = db . saveAttachment ( bytesIn , " foo . txt " , " text / plain " ) ; * < / pre > * < p > To retrieve an attachment , see { @ link # find ( Class , String , Params ) } < / p > * @ param in The { @ link InputStream } providing the binary data . * @ param name The attachment name . * @ param contentType The attachment " Content - Type " . * @ return { @ link com . cloudant . client . api . model . Response } * @ see < a * href = " https : / / console . bluemix . net / docs / services / Cloudant / api / attachments . html # attachments " * target = " _ blank " > Attachments < / a > */ public com . cloudant . client . api . model . Response saveAttachment ( InputStream in , String name , String contentType ) { } }
Response couchDbResponse = db . saveAttachment ( in , name , contentType ) ; com . cloudant . client . api . model . Response response = new com . cloudant . client . api . model . Response ( couchDbResponse ) ; return response ;
public class ListUtil { /** * Clears out the first element that is referentially equal to the * supplied element ( < code > list [ idx ] = = element < / code > ) . * @ return the element that was removed or null if it was not found . */ public static Object clearRef ( Object [ ] list , Object element ) { } }
return clear ( REFERENCE_COMP , list , element ) ;
public class Application { /** * Change the current user to this user and ( optionally ) validate password . * @ param strPassword * @ param strDomain The domain * @ param strUser * @ return normal _ return if successful */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) public int login ( Task task , String strUserName , String strPassword , String strDomain ) { boolean bCreateServer = false ; if ( ( this . getProperty ( Params . REMOTE_HOST ) != null ) && ( this . getProperty ( Params . REMOTE_HOST ) . length ( ) > 0 ) ) bCreateServer = true ; org . jbundle . thin . base . remote . RemoteTask remoteTask = ( org . jbundle . thin . base . remote . RemoteTask ) this . getRemoteTask ( null , strUserName , bCreateServer ) ; if ( remoteTask == null ) { if ( task != null ) return task . setLastError ( this . getResources ( "Error" , true ) . getString ( "User remote session does not exist" ) ) ; return Constants . ERROR_RETURN ; } try { Map < String , Object > mapLoginInfo = remoteTask . login ( strUserName , strPassword , strDomain ) ; String strSecurityMap = ( String ) mapLoginInfo . get ( Params . SECURITY_MAP ) ; Map < String , Object > mapReturnParams = ( Map ) mapLoginInfo . get ( Params . USER_PROPERTIES ) ; if ( this . getSystemRecordOwner ( ) != null ) this . getSystemRecordOwner ( ) . setProperties ( mapReturnParams ) ; // Thick implementation else { if ( this . getProperties ( ) == null ) this . setProperties ( mapReturnParams ) ; // Thin implementation else this . getProperties ( ) . putAll ( mapReturnParams ) ; } if ( Util . isNumeric ( strUserName ) ) strUserName = null ; if ( mapReturnParams . get ( Params . USER_NAME ) != null ) strUserName = ( String ) mapReturnParams . get ( Params . USER_NAME ) ; this . setProperty ( Params . USER_NAME , strUserName ) ; this . setProperty ( Params . USER_ID , ( String ) mapReturnParams . get ( Params . USER_ID ) ) ; this . setProperty ( Params . AUTH_TOKEN , ( String ) mapReturnParams . get ( Params . AUTH_TOKEN ) ) ; // Save the remote authorization token this . setProperty ( Params . CONTACT_ID , ( String ) mapReturnParams . get ( Params . CONTACT_ID ) ) ; this . setProperty ( Params . CONTACT_TYPE , ( String ) mapReturnParams . get ( Params . CONTACT_TYPE ) ) ; // Save the remote authorization token this . setProperty ( Params . SECURITY_MAP , strSecurityMap ) ; this . setProperty ( Params . SECURITY_LEVEL , ( ( strPassword == null ) || ( strPassword . length ( ) == 0 ) ) ? Integer . toString ( Constants . LOGIN_USER ) : Integer . toString ( Constants . LOGIN_AUTHENTICATED ) ) ; if ( ( "1" /* DBConstants . ANON _ USER _ ID */ . equals ( this . getProperty ( Params . USER_ID ) ) ) || ( this . getProperty ( Params . USER_ID ) == null ) ) this . setProperty ( Params . SECURITY_LEVEL , Integer . toString ( Constants . LOGIN_USER ) ) ; // Special case - If user is anonymous , level is always anonymous return Constants . NORMAL_RETURN ; } catch ( RemoteException ex ) { if ( task != null ) { String message = ex . getMessage ( ) ; if ( ex . getCause ( ) != null ) message = ex . getCause ( ) . getMessage ( ) ; return task . setLastError ( message ) ; } return Constants . ERROR_RETURN ; }
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getSTOBORNTION ( ) { } }
if ( stoborntionEEnum == null ) { stoborntionEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 73 ) ; } return stoborntionEEnum ;
public class ConnectionUtils { /** * Remove all { @ link ConnectionData } associated to the specified provider ID . * @ param profile the profile where to remove the data from * @ param providerId the provider ID of the connection */ public static void removeConnectionData ( Profile profile , String providerId ) { } }
Map < String , List < Map < String , Object > > > allConnections = profile . getAttribute ( CONNECTIONS_ATTRIBUTE_NAME ) ; if ( MapUtils . isNotEmpty ( allConnections ) ) { allConnections . remove ( providerId ) ; }
public class Parameters { /** * Given a list of strings , returns the first string in the list which exists as a parameter . If * none do , a { @ link ParameterException } is thrown . */ public String getFirstExistingParamName ( String [ ] paramNames ) { } }
for ( final String paramName : paramNames ) { if ( isPresent ( paramName ) ) { return paramName ; } } throw new ParameterException ( "One of " + Arrays . toString ( paramNames ) + " must be present" ) ;
public class Particles { /** * Creates and returns a GVRSceneObject with the specified mesh attributes . * @ param vertices the vertex positions of that make up the mesh . ( x1 , y1 , z1 , x2 , y2 , z2 , . . . ) * @ param velocities the velocity attributes for each vertex . ( vx1 , vy1 , vz1 , vx2 , vy2 , vz2 . . . ) * @ param particleTimeStamps the spawning times of each vertex . ( t1 , 0 , t2 , 0 , t3 , 0 . . ) * @ return The GVRSceneObject with this mesh . */ GVRSceneObject makeParticleMesh ( float [ ] vertices , float [ ] velocities , float [ ] particleTimeStamps ) { } }
mParticleMesh = new GVRMesh ( mGVRContext ) ; // pass the particle positions as vertices , velocities as normals , and // spawning times as texture coordinates . mParticleMesh . setVertices ( vertices ) ; mParticleMesh . setNormals ( velocities ) ; mParticleMesh . setTexCoords ( particleTimeStamps ) ; particleID = new GVRShaderId ( ParticleShader . class ) ; material = new GVRMaterial ( mGVRContext , particleID ) ; material . setVec4 ( "u_color" , mColorMultiplier . x , mColorMultiplier . y , mColorMultiplier . z , mColorMultiplier . w ) ; material . setFloat ( "u_particle_age" , mAge ) ; material . setVec3 ( "u_acceleration" , mAcceleration . x , mAcceleration . y , mAcceleration . z ) ; material . setFloat ( "u_particle_size" , mSize ) ; material . setFloat ( "u_size_change_rate" , mParticleSizeRate ) ; material . setFloat ( "u_fade" , mFadeWithAge ) ; material . setFloat ( "u_noise_factor" , mNoiseFactor ) ; GVRRenderData renderData = new GVRRenderData ( mGVRContext ) ; renderData . setMaterial ( material ) ; renderData . setMesh ( mParticleMesh ) ; material . setMainTexture ( mTexture ) ; GVRSceneObject meshObject = new GVRSceneObject ( mGVRContext ) ; meshObject . attachRenderData ( renderData ) ; meshObject . getRenderData ( ) . setMaterial ( material ) ; // Set the draw mode to GL _ POINTS , disable writing to depth buffer , enable depth testing // and set the rendering order to transparent . // Disabling writing to depth buffer ensure that the particles blend correctly // and keeping the depth test on along with rendering them // after the geometry queue makes sure they occlude , and are occluded , correctly . meshObject . getRenderData ( ) . setDrawMode ( GL_POINTS ) ; meshObject . getRenderData ( ) . setDepthTest ( true ) ; meshObject . getRenderData ( ) . setDepthMask ( false ) ; meshObject . getRenderData ( ) . setRenderingOrder ( GVRRenderData . GVRRenderingOrder . TRANSPARENT ) ; return meshObject ;
public class RelationalOperations { /** * Returns true if multipoint _ a is within point _ b . */ private static boolean multiPointWithinPoint_ ( MultiPoint multipoint_a , Point point_b , double tolerance , ProgressTracker progress_tracker ) { } }
return multiPointEqualsPoint_ ( multipoint_a , point_b , tolerance , progress_tracker ) ;
public class IntegerTermsSet { /** * Serialize the list of terms to the { @ link StreamOutput } . * < br > * Given the low performance of { @ link org . elasticsearch . common . io . stream . BytesStreamOutput } when writing a large number * of longs ( 5 to 10 times slower than writing directly to a byte [ ] ) , we use a small buffer of 8kb * to optimise the throughput . 8kb seems to be the optimal buffer size , larger buffer size did not improve * the throughput . * @ param out the output */ @ Override public void writeTo ( StreamOutput out ) throws IOException { } }
// Encode flag out . writeBoolean ( this . isPruned ( ) ) ; // Encode size of list out . writeInt ( set . size ( ) ) ; // Encode ints BytesRef buffer = new BytesRef ( new byte [ 1024 * 8 ] ) ; Iterator < IntCursor > it = set . iterator ( ) ; while ( it . hasNext ( ) ) { Bytes . writeVInt ( buffer , it . next ( ) . value ) ; if ( buffer . offset > buffer . bytes . length - 5 ) { out . write ( buffer . bytes , 0 , buffer . offset ) ; buffer . offset = 0 ; } } // flush the remaining bytes from the buffer out . write ( buffer . bytes , 0 , buffer . offset ) ;
public class StringUtil { /** * Trims the argument string for whitespace on the left side only . * @ param pString the string to trim * @ return the string with no whitespace on the left , or { @ code null } if * the string argument is { @ code null } . * @ see # rtrim * @ see String # trim ( ) */ public static String ltrim ( String pString ) { } }
if ( ( pString == null ) || ( pString . length ( ) == 0 ) ) { return pString ; // Null or empty string } for ( int i = 0 ; i < pString . length ( ) ; i ++ ) { if ( ! Character . isWhitespace ( pString . charAt ( i ) ) ) { if ( i == 0 ) { return pString ; // First char is not whitespace } else { return pString . substring ( i ) ; // Return rest after whitespace } } } // If all whitespace , return empty string return "" ;
public class CPDefinitionInventoryLocalServiceBaseImpl { /** * Creates a new cp definition inventory with the primary key . Does not add the cp definition inventory to the database . * @ param CPDefinitionInventoryId the primary key for the new cp definition inventory * @ return the new cp definition inventory */ @ Override @ Transactional ( enabled = false ) public CPDefinitionInventory createCPDefinitionInventory ( long CPDefinitionInventoryId ) { } }
return cpDefinitionInventoryPersistence . create ( CPDefinitionInventoryId ) ;
public class SendRoutingInfoForLCSRequestImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . map . api . primitives . MAPAsnPrimitive # encodeData * ( org . mobicents . protocols . asn . AsnOutputStream ) */ public void encodeData ( AsnOutputStream asnOs ) throws MAPException { } }
if ( this . mlcNumber == null ) { throw new MAPException ( "Encoding of " + _PrimitiveName + " failed. Manadatory parameter mlcNumber [0] ISDN-AddressString is not set" ) ; } if ( this . targetMS == null ) { throw new MAPException ( "Encoding of " + _PrimitiveName + " failed. Manadatory parameter targetMS [1] SubscriberIdentity is not set" ) ; } ( ( ISDNAddressStringImpl ) this . mlcNumber ) . encodeAll ( asnOs , Tag . CLASS_CONTEXT_SPECIFIC , _TAG_MLC_NUMBER ) ; try { asnOs . writeTag ( Tag . CLASS_CONTEXT_SPECIFIC , false , _TAG_TARGET_MS ) ; int pos = asnOs . StartContentDefiniteLength ( ) ; ( ( SubscriberIdentityImpl ) this . targetMS ) . encodeAll ( asnOs ) ; asnOs . FinalizeContent ( pos ) ; } catch ( AsnException e ) { throw new MAPException ( "AsnException while encoding parameter targetMS [1] SubscriberIdentity" ) ; } if ( this . extensionContainer != null ) { ( ( MAPExtensionContainerImpl ) this . extensionContainer ) . encodeAll ( asnOs , Tag . CLASS_CONTEXT_SPECIFIC , _TAG_EXTENSION_CONTAINER ) ; }
public class PersistingUnit { /** * Handles the success case , which by default posts a response to a ConfirmListener . * If you need something else , or to repond to a ResultListener , you ' ll need to override this . */ public void handleSuccess ( ) { } }
if ( _listener instanceof InvocationService . ConfirmListener ) { reportRequestProcessed ( ) ; } else if ( _listener instanceof InvocationService . ResultListener && _resultSet ) { reportRequestProcessed ( _result ) ; }
public class Config { /** * Resolve variables string . * @ param string the string * @ return the string */ static String resolveVariables ( String string ) { } }
if ( string == null ) { return null ; } String rval = string ; Matcher m = STRING_SUBSTITUTION . matcher ( string ) ; while ( m . find ( ) ) { if ( getInstance ( ) . properties . containsKey ( m . group ( 1 ) ) ) { rval = rval . replaceAll ( Pattern . quote ( m . group ( 0 ) ) , get ( m . group ( 1 ) ) . asString ( ) ) ; } else if ( System . getProperties ( ) . contains ( m . group ( 1 ) ) ) { rval = rval . replaceAll ( Pattern . quote ( m . group ( 0 ) ) , System . getProperties ( ) . get ( m . group ( 1 ) ) . toString ( ) ) ; } else if ( System . getenv ( ) . containsKey ( m . group ( 1 ) ) ) { rval = rval . replaceAll ( Pattern . quote ( m . group ( 0 ) ) , System . getenv ( ) . get ( m . group ( 1 ) ) ) ; } } return rval ;
public class OnEntryScriptTypeImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case DroolsPackage . ON_ENTRY_SCRIPT_TYPE__SCRIPT : setScript ( SCRIPT_EDEFAULT ) ; return ; case DroolsPackage . ON_ENTRY_SCRIPT_TYPE__SCRIPT_FORMAT : setScriptFormat ( SCRIPT_FORMAT_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class LongStream { /** * Skips first { @ code n } elements and returns { @ code LongStream } with remaining elements . * If this stream contains fewer than { @ code n } elements , then an * empty stream will be returned . * < p > This is a stateful intermediate operation . * < p > Example : * < pre > * n : 3 * stream : [ 1 , 2 , 3 , 4 , 5] * result : [ 4 , 5] * n : 10 * stream : [ 1 , 2] * result : [ ] * < / pre > * @ param n the number of elements to skip * @ return the new stream * @ throws IllegalArgumentException if { @ code n } is negative */ @ NotNull public LongStream skip ( final long n ) { } }
if ( n < 0 ) throw new IllegalArgumentException ( "n cannot be negative" ) ; if ( n == 0 ) return this ; return new LongStream ( params , new LongSkip ( iterator , n ) ) ;
public class CmsSerialDateValue { /** * Generates the JSON object storing the pattern information . * @ return the JSON object storing the pattern information . */ private JSONObject patternToJson ( ) { } }
JSONObject pattern = new JSONObject ( ) ; if ( null != getPatternType ( ) ) { pattern . put ( JsonKey . PATTERN_TYPE , new JSONString ( getPatternType ( ) . toString ( ) ) ) ; switch ( getPatternType ( ) ) { case DAILY : if ( isEveryWorkingDay ( ) ) { pattern . put ( JsonKey . PATTERN_EVERYWORKINGDAY , JSONBoolean . getInstance ( true ) ) ; } else { pattern . put ( JsonKey . PATTERN_INTERVAL , new JSONString ( String . valueOf ( getInterval ( ) ) ) ) ; } break ; case WEEKLY : pattern . put ( JsonKey . PATTERN_INTERVAL , new JSONString ( String . valueOf ( getInterval ( ) ) ) ) ; pattern . put ( JsonKey . PATTERN_WEEKDAYS , toJsonStringList ( getWeekDays ( ) ) ) ; break ; case MONTHLY : pattern . put ( JsonKey . PATTERN_INTERVAL , new JSONString ( String . valueOf ( getInterval ( ) ) ) ) ; if ( null != getWeekDay ( ) ) { pattern . put ( JsonKey . PATTERN_WEEKS_OF_MONTH , toJsonStringList ( getWeeksOfMonth ( ) ) ) ; pattern . put ( JsonKey . PATTERN_WEEKDAYS , toJsonStringList ( getWeekDays ( ) ) ) ; } else { pattern . put ( JsonKey . PATTERN_DAY_OF_MONTH , new JSONString ( String . valueOf ( getDayOfMonth ( ) ) ) ) ; } break ; case YEARLY : pattern . put ( JsonKey . PATTERN_MONTH , new JSONString ( String . valueOf ( getMonth ( ) ) ) ) ; if ( null != getWeekDay ( ) ) { pattern . put ( JsonKey . PATTERN_WEEKS_OF_MONTH , toJsonStringList ( getWeeksOfMonth ( ) ) ) ; pattern . put ( JsonKey . PATTERN_WEEKDAYS , toJsonStringList ( getWeekDays ( ) ) ) ; } else { pattern . put ( JsonKey . PATTERN_DAY_OF_MONTH , new JSONString ( String . valueOf ( getDayOfMonth ( ) ) ) ) ; } break ; case INDIVIDUAL : pattern . put ( JsonKey . PATTERN_DATES , datesToJsonArray ( getIndividualDates ( ) ) ) ; break ; case NONE : default : break ; } } return pattern ;
public class OvrViewManager { /** * Called when the surface is created or recreated . Avoided because this can * be called twice at the beginning . */ void onSurfaceChanged ( int width , int height ) { } }
Log . v ( TAG , "onSurfaceChanged" ) ; final VrAppSettings . EyeBufferParams . DepthFormat depthFormat = mApplication . getAppSettings ( ) . getEyeBufferParams ( ) . getDepthFormat ( ) ; mApplication . getConfigurationManager ( ) . configureRendering ( VrAppSettings . EyeBufferParams . DepthFormat . DEPTH_24_STENCIL_8 == depthFormat ) ;
public class PcHttpUtils { /** * ! ! ! ! ASSUMPTION : all VAR exists in HTTP Header must of type : * APIVARREPLACE _ NAME _ PREFIX _ HTTP _ HEADER * 20140310 This may be costly ( O ( n ^ 2 ) ) of the updated related # of headers ; * # of parameters in the requests . * Better to only do it when there are some replacement in the request * Parameters . a prefix : * TOBE tested * @ param httpHeaderMap * the http header map * @ param requestParameters * the request parameters */ public static void replaceHttpHeaderMapNodeSpecific ( Map < String , String > httpHeaderMap , Map < String , String > requestParameters ) { } }
boolean needToReplaceVarInHttpHeader = false ; for ( String parameter : requestParameters . keySet ( ) ) { if ( parameter . contains ( PcConstants . NODE_REQUEST_PREFIX_REPLACE_VAR ) ) { needToReplaceVarInHttpHeader = true ; break ; } } if ( ! needToReplaceVarInHttpHeader ) { logger . debug ( "No need to replace. Since there are no HTTP header variables. " ) ; return ; } // replace all the values in the ( not the keys ) in the header map . for ( Entry < String , String > entry : httpHeaderMap . entrySet ( ) ) { String key = entry . getKey ( ) ; String valueOriginal = entry . getValue ( ) ; String valueUpdated = NodeReqResponse . replaceStrByMap ( requestParameters , valueOriginal ) ; httpHeaderMap . put ( key , valueUpdated ) ; }
public class Exceptions { /** * Invoke { @ code Throwable # addSuppressed ( Throwable ) } reflectively if it is available . * It is not available on JRE & lt ; 1.7 * @ since 2.8 */ public static void addSuppressed ( Throwable owner , Throwable add ) { } }
try { Method method = owner . getClass ( ) . getMethod ( "addSuppressed" , Throwable . class ) ; method . invoke ( owner , add ) ; } catch ( NoSuchMethodException e ) { // ignore , will happen for JRE < 1.7 } catch ( SecurityException e ) { throwUncheckedException ( e ) ; } catch ( IllegalAccessException e ) { throwUncheckedException ( e ) ; } catch ( IllegalArgumentException e ) { throwUncheckedException ( e ) ; } catch ( InvocationTargetException e ) { throwUncheckedException ( e ) ; }
public class SeaGlassSynthPainterImpl { /** * Paint the provided painter using the provided transform at the specified * position and size . Handles if g is a non 2D Graphics by painting via a * BufferedImage . * @ param p the painter to use . * @ param ctx the SynthContext describing the component / region , the * style , and the state . * @ param g the Graphics context to paint with . * @ param x the left - most portion of the object to paint . * @ param y the upper - most portion of the object to paint . * @ param w the width to paint . * @ param h the height to paint . * @ param transform the affine transform to apply , or { @ code null } if none * is to be applied . */ private void paint ( SeaGlassPainter p , SynthContext ctx , Graphics g , int x , int y , int w , int h , AffineTransform transform ) { } }
if ( p != null ) { if ( g instanceof Graphics2D ) { Graphics2D gfx = ( Graphics2D ) g ; if ( transform != null ) { gfx . transform ( transform ) ; } gfx . translate ( x , y ) ; p . paint ( gfx , ctx . getComponent ( ) , w , h ) ; gfx . translate ( - x , - y ) ; if ( transform != null ) { try { gfx . transform ( transform . createInverse ( ) ) ; } catch ( NoninvertibleTransformException e ) { // this should never happen as we are in control of all // calls into this method and only ever pass in simple // transforms of rotate , flip and translates e . printStackTrace ( ) ; } } } else { // use image if we are printing to a Java 1.1 PrintGraphics as // it is not a instance of Graphics2D BufferedImage img = new BufferedImage ( w , h , BufferedImage . TYPE_INT_ARGB ) ; Graphics2D gfx = img . createGraphics ( ) ; if ( transform != null ) { gfx . transform ( transform ) ; } p . paint ( gfx , ctx . getComponent ( ) , w , h ) ; gfx . dispose ( ) ; g . drawImage ( img , x , y , null ) ; img = null ; } }
public class ValueMap { /** * Constructs ValueMap using a sequence of key - value arguments . * Keys should be String , values should fit { @ link Parcel # writeValue ( Object ) } argument * requirements . */ public static ValueMap map ( Object ... map ) { } }
if ( map . length / 2 * 2 != map . length ) throw new IllegalArgumentException ( "Arguments should be <String> key - <?> value pairs" ) ; Builder builder = new Builder ( ) ; for ( int i = 0 ; i < map . length ; i += 2 ) builder . put ( ( String ) map [ i ] , map [ i + 1 ] ) ; return builder . build ( ) ;
public class Vector3i { /** * / * ( non - Javadoc ) * @ see org . joml . Vector3ic # distanceSquared ( org . joml . Vector3ic ) */ public long distanceSquared ( Vector3ic v ) { } }
return distanceSquared ( v . x ( ) , v . y ( ) , v . z ( ) ) ;
public class Reflection { /** * get super class ' s generic types . * eg . A extends B ( T1 , T2 ) will return [ T1 , T2 ] . < br > * if no generic type is defined for the parent class , null is returned . * It is not recommended to use this method when you want to get a { @ link Unit unit } ' s * { @ link Group group } . Instead , use { @ link LocalUnitsManager } . */ public static Type [ ] getSupperGenericType ( Object object ) { } }
if ( Proxy . isProxyClass ( object . getClass ( ) ) ) { object = ProxyBuilder . getProxyBuilder ( object . hashCode ( ) ) . getMostOriginalObject ( ) ; } return getSupperGenericType ( object . getClass ( ) ) ;
public class FormatterMojo { /** * Format file . * @ param file * the file * @ param rc * the rc * @ param hashCache * the hash cache * @ param basedirPath * the basedir path */ private void formatFile ( File file , ResultCollector rc , Properties hashCache , String basedirPath ) throws MojoFailureException , MojoExecutionException { } }
try { doFormatFile ( file , rc , hashCache , basedirPath , false ) ; } catch ( IOException | MalformedTreeException | BadLocationException e ) { rc . failCount ++ ; getLog ( ) . warn ( e ) ; }
public class Lz4Codec { /** * Create a new { @ link Compressor } for use by this { @ link CompressionCodec } . * @ return a new compressor for use by this codec */ @ Override public Compressor createCompressor ( ) { } }
if ( ! isNativeCodeLoaded ( ) ) { throw new RuntimeException ( "native lz4 library not available" ) ; } int bufferSize = conf . getInt ( IO_COMPRESSION_CODEC_LZ4_BUFFERSIZE_KEY , IO_COMPRESSION_CODEC_LZ4_BUFFERSIZE_DEFAULT ) ; boolean useLz4HC = conf . getBoolean ( IO_COMPRESSION_CODEC_LZ4_USELZ4HC_KEY , IO_COMPRESSION_CODEC_LZ4_USELZ4HC_DEFAULT ) ; return new Lz4Compressor ( bufferSize , useLz4HC ) ;
public class CRLReasonCodeExtension { /** * Get the attribute value . */ public Integer get ( String name ) throws IOException { } }
if ( name . equalsIgnoreCase ( REASON ) ) { return new Integer ( reasonCode ) ; } else { throw new IOException ( "Name not supported by CRLReasonCodeExtension" ) ; }
public class AmazonS3Client { /** * Adds the specified parameter to the specified request , if the parameter * value is not null . * @ param request * The request to add the parameter to . * @ param paramName * The parameter name . * @ param paramValue * The parameter value . */ private static void addParameterIfNotNull ( Request < ? > request , String paramName , Integer paramValue ) { } }
if ( paramValue != null ) { addParameterIfNotNull ( request , paramName , paramValue . toString ( ) ) ; }
public class CNDStreamWriter { /** * Write given list of node types to output stream . * @ param nodeTypes * List of NodeTypes to write . * @ param os * OutputStream to write to . * @ throws RepositoryException */ public void write ( List < NodeTypeData > nodeTypes , OutputStream os ) throws RepositoryException { } }
OutputStreamWriter out = new OutputStreamWriter ( os ) ; try { for ( NodeTypeData nodeType : nodeTypes ) { printNamespaces ( nodeType , out ) ; printNodeTypeDeclaration ( nodeType , out ) ; } out . close ( ) ; } catch ( IOException e ) { throw new RepositoryException ( e . getMessage ( ) , e ) ; }
public class BsElevateWordToLabelCA { public void filter ( String name , EsAbstractConditionQuery . OperatorCall < BsElevateWordToLabelCQ > queryLambda , ConditionOptionCall < FilterAggregationBuilder > opLambda , OperatorCall < BsElevateWordToLabelCA > aggsLambda ) { } }
ElevateWordToLabelCQ cq = new ElevateWordToLabelCQ ( ) ; if ( queryLambda != null ) { queryLambda . callback ( cq ) ; } FilterAggregationBuilder builder = regFilterA ( name , cq . getQuery ( ) ) ; if ( opLambda != null ) { opLambda . callback ( builder ) ; } if ( aggsLambda != null ) { ElevateWordToLabelCA ca = new ElevateWordToLabelCA ( ) ; aggsLambda . callback ( ca ) ; ca . getAggregationBuilderList ( ) . forEach ( builder :: subAggregation ) ; }
public class AbstractSupplier { /** * Void for most suppliers */ @ Override public void setPrimaryKey ( WV value , Mapper mapper , String column , K primaryKey , Object Entity ) { } }