signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ActivityChooserModel { /** * Sets an intent for which to choose a activity . * < strong > Note : < / strong > Clients must set only semantically similar * intents for each data model . * @ param intent The intent . */ public void setIntent ( Intent intent ) { } }
synchronized ( mInstanceLock ) { if ( mIntent == intent ) { return ; } mIntent = intent ; mReloadActivities = true ; ensureConsistentState ( ) ; }
public class TableDescription { /** * The global secondary indexes , if any , on the table . Each index is scoped to a given partition key value . Each * element is composed of : * < ul > * < li > * < code > Backfilling < / code > - If true , then the index is currently in the backfilling phase . Backfilling occurs only * when a new global secondary index is added to the table ; it is the process by which DynamoDB populates the new * index with data from the table . ( This attribute does not appear for indexes that were created during a * < code > CreateTable < / code > operation . ) * < / li > * < li > * < code > IndexName < / code > - The name of the global secondary index . * < / li > * < li > * < code > IndexSizeBytes < / code > - The total size of the global secondary index , in bytes . DynamoDB updates this value * approximately every six hours . Recent changes might not be reflected in this value . * < / li > * < li > * < code > IndexStatus < / code > - The current status of the global secondary index : * < ul > * < li > * < code > CREATING < / code > - The index is being created . * < / li > * < li > * < code > UPDATING < / code > - The index is being updated . * < / li > * < li > * < code > DELETING < / code > - The index is being deleted . * < / li > * < li > * < code > ACTIVE < / code > - The index is ready for use . * < / li > * < / ul > * < / li > * < li > * < code > ItemCount < / code > - The number of items in the global secondary index . DynamoDB updates this value * approximately every six hours . Recent changes might not be reflected in this value . * < / li > * < li > * < code > KeySchema < / code > - Specifies the complete index key schema . The attribute names in the key schema must be * between 1 and 255 characters ( inclusive ) . The key schema must begin with the same partition key as the table . * < / li > * < li > * < code > Projection < / code > - Specifies attributes that are copied ( projected ) from the table into the index . These * are in addition to the primary key attributes and index key attributes , which are automatically projected . Each * attribute specification is composed of : * < ul > * < li > * < code > ProjectionType < / code > - One of the following : * < ul > * < li > * < code > KEYS _ ONLY < / code > - Only the index and primary keys are projected into the index . * < / li > * < li > * < code > INCLUDE < / code > - Only the specified table attributes are projected into the index . The list of projected * attributes are in < code > NonKeyAttributes < / code > . * < / li > * < li > * < code > ALL < / code > - All of the table attributes are projected into the index . * < / li > * < / ul > * < / li > * < li > * < code > NonKeyAttributes < / code > - A list of one or more non - key attribute names that are projected into the * secondary index . The total count of attributes provided in < code > NonKeyAttributes < / code > , summed across all of * the secondary indexes , must not exceed 20 . If you project the same attribute into two different indexes , this * counts as two distinct attributes when determining the total . * < / li > * < / ul > * < / li > * < li > * < code > ProvisionedThroughput < / code > - The provisioned throughput settings for the global secondary index , * consisting of read and write capacity units , along with data about increases and decreases . * < / li > * < / ul > * If the table is in the < code > DELETING < / code > state , no information about indexes will be returned . * @ param globalSecondaryIndexes * The global secondary indexes , if any , on the table . Each index is scoped to a given partition key value . * Each element is composed of : < / p > * < ul > * < li > * < code > Backfilling < / code > - If true , then the index is currently in the backfilling phase . Backfilling * occurs only when a new global secondary index is added to the table ; it is the process by which DynamoDB * populates the new index with data from the table . ( This attribute does not appear for indexes that were * created during a < code > CreateTable < / code > operation . ) * < / li > * < li > * < code > IndexName < / code > - The name of the global secondary index . * < / li > * < li > * < code > IndexSizeBytes < / code > - The total size of the global secondary index , in bytes . DynamoDB updates * this value approximately every six hours . Recent changes might not be reflected in this value . * < / li > * < li > * < code > IndexStatus < / code > - The current status of the global secondary index : * < ul > * < li > * < code > CREATING < / code > - The index is being created . * < / li > * < li > * < code > UPDATING < / code > - The index is being updated . * < / li > * < li > * < code > DELETING < / code > - The index is being deleted . * < / li > * < li > * < code > ACTIVE < / code > - The index is ready for use . * < / li > * < / ul > * < / li > * < li > * < code > ItemCount < / code > - The number of items in the global secondary index . DynamoDB updates this value * approximately every six hours . Recent changes might not be reflected in this value . * < / li > * < li > * < code > KeySchema < / code > - Specifies the complete index key schema . The attribute names in the key schema * must be between 1 and 255 characters ( inclusive ) . The key schema must begin with the same partition key as * the table . * < / li > * < li > * < code > Projection < / code > - Specifies attributes that are copied ( projected ) from the table into the index . * These are in addition to the primary key attributes and index key attributes , which are automatically * projected . Each attribute specification is composed of : * < ul > * < li > * < code > ProjectionType < / code > - One of the following : * < ul > * < li > * < code > KEYS _ ONLY < / code > - Only the index and primary keys are projected into the index . * < / li > * < li > * < code > INCLUDE < / code > - Only the specified table attributes are projected into the index . The list of * projected attributes are in < code > NonKeyAttributes < / code > . * < / li > * < li > * < code > ALL < / code > - All of the table attributes are projected into the index . * < / li > * < / ul > * < / li > * < li > * < code > NonKeyAttributes < / code > - A list of one or more non - key attribute names that are projected into the * secondary index . The total count of attributes provided in < code > NonKeyAttributes < / code > , summed across * all of the secondary indexes , must not exceed 20 . If you project the same attribute into two different * indexes , this counts as two distinct attributes when determining the total . * < / li > * < / ul > * < / li > * < li > * < code > ProvisionedThroughput < / code > - The provisioned throughput settings for the global secondary index , * consisting of read and write capacity units , along with data about increases and decreases . * < / li > * < / ul > * If the table is in the < code > DELETING < / code > state , no information about indexes will be returned . */ public void setGlobalSecondaryIndexes ( java . util . Collection < GlobalSecondaryIndexDescription > globalSecondaryIndexes ) { } }
if ( globalSecondaryIndexes == null ) { this . globalSecondaryIndexes = null ; return ; } this . globalSecondaryIndexes = new java . util . ArrayList < GlobalSecondaryIndexDescription > ( globalSecondaryIndexes ) ;
public class PoolInfo { /** * Convert this object to PoolInfoStrings for Thrift * @ param poolInfo Pool info * @ return { @ link PoolInfo } converted to a Thrift form */ public static PoolInfoStrings createPoolInfoStrings ( PoolInfo poolInfo ) { } }
if ( poolInfo == null ) { return null ; } return new PoolInfoStrings ( poolInfo . getPoolGroupName ( ) , poolInfo . getPoolName ( ) ) ;
public class ExtensionHook { /** * Gets the { @ link ConnectRequestProxyListener } s added to this hook . * @ return an unmodifiable { @ code List } containing the added { @ code ConnectRequestProxyListener } s , never { @ code null } . * @ since 2.5.0 */ List < ConnectRequestProxyListener > getConnectRequestProxyListeners ( ) { } }
if ( connectRequestProxyListeners == null ) { return Collections . emptyList ( ) ; } return Collections . unmodifiableList ( connectRequestProxyListeners ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link TexCoordGenType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link TexCoordGenType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/appearance/2.0" , name = "TexCoordGen" , substitutionHeadNamespace = "http://www.opengis.net/citygml/appearance/2.0" , substitutionHeadName = "_TextureParameterization" ) public JAXBElement < TexCoordGenType > createTexCoordGen ( TexCoordGenType value ) { } }
return new JAXBElement < TexCoordGenType > ( _TexCoordGen_QNAME , TexCoordGenType . class , null , value ) ;
public class SparkBitcoinBlockCounter { /** * a job for counting the total number of transactions * @ param sc context * @ param hadoopConf Configuration for input format * @ param inputFile Input file * @ param output outputFile file */ public static void jobTotalNumOfTransactions ( JavaSparkContext sc , Configuration hadoopConf , String inputFile , String outputFile ) { } }
// read bitcoin data from HDFS JavaPairRDD < BytesWritable , BitcoinBlock > bitcoinBlocksRDD = sc . newAPIHadoopFile ( inputFile , BitcoinBlockFileInputFormat . class , BytesWritable . class , BitcoinBlock . class , hadoopConf ) ; // extract the no transactions / block ( map ) JavaPairRDD < String , Long > noOfTransactionPair = bitcoinBlocksRDD . mapToPair ( new PairFunction < Tuple2 < BytesWritable , BitcoinBlock > , String , Long > ( ) { @ Override public Tuple2 < String , Long > call ( Tuple2 < BytesWritable , BitcoinBlock > tupleBlock ) { return mapNoOfTransaction ( tupleBlock . _2 ( ) ) ; } } ) ; // combine the results from all blocks JavaPairRDD < String , Long > totalCount = noOfTransactionPair . reduceByKey ( new Function2 < Long , Long , Long > ( ) { @ Override public Long call ( Long a , Long b ) { return reduceSumUpTransactions ( a , b ) ; } } ) ; // write results to HDFS totalCount . repartition ( 1 ) . saveAsTextFile ( outputFile ) ;
public class ConfigurationAction { /** * < p > doEditClasspath . < / p > * @ return a { @ link java . lang . String } object . */ public String doEditClasspath ( ) { } }
try { selectedRunner = getService ( ) . getRunner ( selectedRunnerName ) ; selectedRunner . setClasspaths ( ClasspathSet . parse ( classpath ) ) ; getService ( ) . updateRunner ( selectedRunnerName , selectedRunner ) ; } catch ( GreenPepperServerException e ) { addActionError ( e . getId ( ) ) ; } return doGetRunners ( ) ;
public class BasicRequestCtx { /** * Private helper function to encode the subjects */ private void encodeSubject ( Subject subject , PrintStream out , Indenter indenter ) { } }
char [ ] indent = indenter . makeString ( ) . toCharArray ( ) ; out . print ( indent ) ; out . append ( "<Subject SubjectCategory=\"" ) . append ( subject . getCategory ( ) . toString ( ) ) . append ( '"' ) ; List subjectAttrs = subject . getAttributesAsList ( ) ; if ( subjectAttrs . size ( ) == 0 ) { // there ' s nothing in this Subject , so just close the tag out . println ( "/>" ) ; } else { // there ' s content , so fill it in out . println ( '>' ) ; encodeAttributes ( subjectAttrs , out , indenter ) ; out . print ( indent ) ; out . println ( "</Subject>" ) ; }
public class CassandraStorage { /** * { @ inheritDoc } Memoized in order to avoid re - preparing statements */ @ Override public SpanConsumer spanConsumer ( ) { } }
if ( spanConsumer == null ) { synchronized ( this ) { if ( spanConsumer == null ) { spanConsumer = new CassandraSpanConsumer ( this , indexCacheSpec ) ; } } } return spanConsumer ;
public class AtomWriter { /** * Write feed body . * @ param entities The list of entities to fill in the XML stream . It can not { @ code null } . * @ throws ODataRenderException In case it is not possible to write to the XML stream . */ public void writeBodyFeed ( List < ? > entities ) throws ODataRenderException { } }
checkNotNull ( entities ) ; try { for ( Object entity : entities ) { writeEntry ( entity , true ) ; } } catch ( XMLStreamException | IllegalAccessException | NoSuchFieldException | ODataEdmException e ) { LOG . error ( "Not possible to marshall feed stream XML" ) ; throw new ODataRenderException ( "Not possible to marshall feed stream XML: " , e ) ; }
public class JmsSession { /** * Method that opens a new Context in eFaps , setting the User , the Locale , * the Attributes of this Session { @ link # sessionAttributes } . * @ throws EFapsException on error * @ see # attach ( ) */ public void openContext ( ) throws EFapsException { } }
if ( isLogedIn ( ) ) { if ( ! Context . isTMActive ( ) ) { if ( ! this . sessionAttributes . containsKey ( UserAttributesSet . CONTEXTMAPKEY ) ) { Context . begin ( null , Context . Inheritance . Local ) ; this . sessionAttributes . put ( UserAttributesSet . CONTEXTMAPKEY , new UserAttributesSet ( this . userName ) ) ; Context . rollback ( ) ; } Context . begin ( this . userName , null , this . sessionAttributes , null , null , Context . Inheritance . Local ) ; this . timeStamp = new Date ( ) ; } }
public class TransformerIdentityImpl { /** * Report an element type declaration . * < p > The content model will consist of the string " EMPTY " , the * string " ANY " , or a parenthesised group , optionally followed * by an occurrence indicator . The model will be normalized so * that all whitespace is removed , and will include the enclosing * parentheses . < / p > * @ param name The element type name . * @ param model The content model as a normalized string . * @ exception SAXException The application may raise an exception . */ public void elementDecl ( String name , String model ) throws SAXException { } }
if ( null != m_resultDeclHandler ) m_resultDeclHandler . elementDecl ( name , model ) ;
public class JBBPFieldString { /** * Get the reversed bit representation of the value . * @ param value the value to be reversed , can be null * @ return the reversed value */ public static String reverseBits ( final String value ) { } }
String result = null ; if ( value != null ) { final char [ ] chars = value . toCharArray ( ) ; for ( int i = 0 ; i < chars . length ; i ++ ) { chars [ i ] = ( char ) JBBPFieldUShort . reverseBits ( ( short ) chars [ i ] ) ; } result = String . valueOf ( chars ) ; } return result ;
public class DateTimeDialogFragment { /** * < p > The callback used by the DatePicker to update { @ code mCalendar } as * the user changes the date . Each time this is called , we also update * the text on the date tab to reflect the date the user has currenly * selected . < / p > * < p > Implements the { @ link DateFragment . DateChangedListener } * interface . < / p > */ @ Override public void onDateChanged ( int year , int month , int day ) { } }
mCalendar . set ( year , month , day ) ; updateDateTab ( ) ;
public class StreamingCinchContext { /** * Create a new ForeachRddFunction , which implements Spark ' s VoidFunction interface . */ public < T > ForeachRddFunction < T > foreachRddFunction ( Class < ? extends VoidFunction < T > > springBeanClass ) { } }
return new ForeachRddFunction < > ( voidFunction ( springBeanClass ) ) ;
public class IndexedSet { /** * { @ inheritDoc } */ @ Override public void flip ( T e ) { } }
indices . flip ( itemToIndex . get ( e ) . intValue ( ) ) ;
public class AbstractResilienceStrategy { /** * Called when the cache failed to recover from a failing store operation on a key . * @ param key key now inconsistent * @ param because exception thrown by the failing operation * @ param cleanup all the exceptions that occurred during cleanup */ protected void inconsistent ( K key , StoreAccessException because , StoreAccessException ... cleanup ) { } }
pacedErrorLog ( "Ehcache key {} in possible inconsistent state" , key , because ) ;
public class CommandHelpers { /** * A helper function to add the components to the builder and return a list of all the components */ public static List < InputComponent > addInputComponents ( UIBuilder builder , InputComponent ... components ) { } }
List < InputComponent > inputComponents = new ArrayList < > ( ) ; for ( InputComponent component : components ) { builder . add ( component ) ; inputComponents . add ( component ) ; } return inputComponents ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcRelDecomposes ( ) { } }
if ( ifcRelDecomposesEClass == null ) { ifcRelDecomposesEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 469 ) ; } return ifcRelDecomposesEClass ;
public class ImgUtil { /** * 给图片添加图片水印 < br > * 此方法并不关闭流 * @ param srcImage 源图像流 * @ param out 目标图像流 * @ param pressImg 水印图片 , 可以使用 { @ link ImageIO # read ( File ) } 方法读取文件 * @ param x 修正值 。 默认在中间 , 偏移量相对于中间偏移 * @ param y 修正值 。 默认在中间 , 偏移量相对于中间偏移 * @ param alpha 透明度 : alpha 必须是范围 [ 0.0 , 1.0 ] 之内 ( 包含边界值 ) 的一个浮点数字 * @ throws IORuntimeException IO异常 * @ since 3.2.2 */ public static void pressImage ( Image srcImage , OutputStream out , Image pressImg , int x , int y , float alpha ) throws IORuntimeException { } }
pressImage ( srcImage , getImageOutputStream ( out ) , pressImg , x , y , alpha ) ;
public class DubiousListCollection { /** * return the field object that the current method was called on , by finding the * reference down in the stack based on the number of parameters * @ param stk the opcode stack where fields are stored * @ param signature the signature of the called method * @ return the field annotation for the field whose method was executed */ @ Nullable private static XField getFieldFromStack ( final OpcodeStack stk , final String signature ) { } }
int parmCount = SignatureUtils . getNumParameters ( signature ) ; if ( stk . getStackDepth ( ) > parmCount ) { OpcodeStack . Item itm = stk . getStackItem ( parmCount ) ; return itm . getXField ( ) ; } return null ;
public class BetaDetector { /** * Reports bug in case the field defined by the given name is { @ link Beta } . * < p > The field is searched in current class and all super classses as well . */ private void checkField ( String fieldName ) { } }
JavaClass javaClass = checkClass ( ) ; if ( javaClass == null ) { return ; } for ( JavaClass current = javaClass ; current != null ; current = getSuperclass ( current ) ) { for ( Field field : current . getFields ( ) ) { if ( fieldName . equals ( field . getName ( ) ) ) { // field has been found - check if it ' s beta if ( isBeta ( field . getAnnotationEntries ( ) ) ) { bugReporter . reportBug ( createBugInstance ( BETA_FIELD_USAGE ) . addReferencedField ( this ) ) ; } return ; } } } bugReporter . logError ( "Can't locate field " + javaClass . getClassName ( ) + "." + fieldName ) ;
public class IfcPersonImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) public EList < IfcAddress > getAddresses ( ) { } }
return ( EList < IfcAddress > ) eGet ( Ifc2x3tc1Package . Literals . IFC_PERSON__ADDRESSES , true ) ;
public class AbstractDatabaseEngine { /** * Connects to the database . * @ throws Exception If connection is not possible , or failed to decrypt username / password if encryption was provided . */ protected void connect ( ) throws Exception { } }
String username = this . properties . getUsername ( ) ; String password = this . properties . getPassword ( ) ; if ( this . properties . isEncryptedPassword ( ) || this . properties . isEncryptedUsername ( ) ) { String privateKey = getPrivateKey ( ) ; if ( this . properties . isEncryptedUsername ( ) ) { final String decryptedUsername = AESHelper . decrypt ( this . properties . getProperty ( ENCRYPTED_USERNAME ) , privateKey ) ; if ( StringUtils . isEmpty ( decryptedUsername ) ) { throw new DatabaseEngineException ( "The encrypted username could not be decrypted." ) ; } username = decryptedUsername ; } if ( this . properties . isEncryptedPassword ( ) ) { final String decryptedPassword = AESHelper . decrypt ( this . properties . getProperty ( ENCRYPTED_PASSWORD ) , privateKey ) ; if ( StringUtils . isEmpty ( decryptedPassword ) ) { throw new DatabaseEngineException ( "The encrypted password could not be decrypted." ) ; } password = decryptedPassword ; } } final String jdbc = getFinalJdbcConnection ( this . properties . getJdbc ( ) ) ; this . conn = DriverManager . getConnection ( jdbc , username , password ) ; if ( this . properties . isSchemaSet ( ) ) { setSchema ( this . properties . getSchema ( ) ) ; } this . currentSchema = Optional . ofNullable ( getSchema ( ) ) . orElseThrow ( ( ) -> new DatabaseEngineException ( "Could not get current schema" ) ) ; setTransactionIsolation ( ) ; logger . debug ( "Connection successful." ) ;
public class Polygon { /** * Point in polygon test , based on * http : / / www . ecse . rpi . edu / Homepages / wrf / Research / Short _ Notes / pnpoly . html * by W . Randolph Franklin * @ param v Point to test * @ return True when contained . */ public boolean containsPoint2D ( double [ ] v ) { } }
assert ( v . length == 2 ) ; final double testx = v [ 0 ] ; final double testy = v [ 1 ] ; boolean c = false ; Iterator < double [ ] > it = points . iterator ( ) ; double [ ] pre = points . get ( points . size ( ) - 1 ) ; while ( it . hasNext ( ) ) { final double [ ] cur = it . next ( ) ; final double curx = cur [ 0 ] , cury = cur [ 1 ] ; final double prex = pre [ 0 ] , prey = pre [ 1 ] ; if ( ( ( cury > testy ) != ( prey > testy ) ) ) { if ( ( testx < ( prex - curx ) * ( testy - cury ) / ( prey - cury ) + curx ) ) { c = ! c ; } } pre = cur ; } return c ;
public class EntityFactory { private Table getTableStrict ( EntityConfig entityConfig ) { } }
Assert . isTrue ( entityConfig . hasTableName ( ) , "A tableName is expected for the entityConfig " + entityConfig . getEntityName ( ) ) ; Table table = config . getMetadata ( ) . getTableBySchemaAndName ( entityConfig . getSchemaName ( ) , entityConfig . getTableName ( ) ) ; Assert . notNull ( table , "Could not find table named " + entityConfig . getTableName ( ) ) ; return table ;
public class AccordionPanel { /** * Add the given component to this accordion , wrapping it into a * collapsible panel with the given title . * @ param title The title * @ param component The component to add * @ return The collapsible panel that has been created internally */ public CollapsiblePanel addToAccordion ( String title , JComponent component ) { } }
return addToAccordion ( title , component , false ) ;
public class RetentionEnforcingStore { /** * Updates the store definition object and the retention time based on the * updated store definition */ @ Override public void updateStoreDefinition ( StoreDefinition storeDef ) { } }
this . storeDef = storeDef ; if ( storeDef . hasRetentionPeriod ( ) ) this . retentionTimeMs = storeDef . getRetentionDays ( ) * Time . MS_PER_DAY ;
public class DrizzleResultSet { /** * Retrieves the value of the designated column in the current row of this < code > ResultSet < / code > object as a * < code > java . sql . Time < / code > object in the Java programming language . This method uses the given calendar to * construct an appropriate millisecond value for the time if the underlying database does not store timezone * information . * @ param columnIndex the first column is 1 , the second is 2 , . . . * @ param cal the < code > java . util . Calendar < / code > object to use in constructing the time * @ return the column value as a < code > java . sql . Time < / code > object ; if the value is SQL < code > NULL < / code > , the value * returned is < code > null < / code > in the Java programming language * @ throws java . sql . SQLException if the columnIndex is not valid ; if a database access error occurs or this method * is called on a closed result set * @ since 1.2 */ public Time getTime ( final int columnIndex , final Calendar cal ) throws SQLException { } }
return getValueObject ( columnIndex ) . getTime ( cal ) ;
public class BindingElement { /** * Replies the string representation of the binding key . * @ return the string representation of the binding key . * @ since 0.8 */ public String getKeyString ( ) { } }
if ( ! Strings . isEmpty ( getAnnotatedWith ( ) ) ) { return MessageFormat . format ( "@{1} {0}" , getBind ( ) , getAnnotatedWith ( ) ) ; // $ NON - NLS - 1 $ } if ( ! Strings . isEmpty ( getAnnotatedWithName ( ) ) ) { return MessageFormat . format ( "@Named({1}) {0}" , getBind ( ) , getAnnotatedWithName ( ) ) ; // $ NON - NLS - 1 $ } return MessageFormat . format ( "{0}" , getBind ( ) ) ; // $ NON - NLS - 1 $
public class InfiniteScrollPanel { /** * Will be called once the scroll bar reached at the bottom of the scroll panel . * This will load the current { @ link this # offset } and { @ link this # limit } and will * check if recycling is enabled . */ protected void onScrollBottom ( ) { } }
if ( isEnableRecycling ( ) && recycleManager . hasRecycledWidgets ( ) ) { recycleManager . recycle ( RecyclePosition . BOTTOM ) ; } else { load ( offset , limit ) ; }
public class ItemListener { /** * Calls { @ link # onRenamed } and { @ link # onLocationChanged } as appropriate . * @ param rootItem the topmost item whose location has just changed * @ param oldFullName the previous { @ link Item # getFullName } * @ since 1.548 */ public static void fireLocationChange ( final Item rootItem , final String oldFullName ) { } }
String prefix = rootItem . getParent ( ) . getFullName ( ) ; if ( ! prefix . isEmpty ( ) ) { prefix += '/' ; } final String newFullName = rootItem . getFullName ( ) ; assert newFullName . startsWith ( prefix ) ; int prefixS = prefix . length ( ) ; if ( oldFullName . startsWith ( prefix ) && oldFullName . indexOf ( '/' , prefixS ) == - 1 ) { final String oldName = oldFullName . substring ( prefixS ) ; final String newName = rootItem . getName ( ) ; assert newName . equals ( newFullName . substring ( prefixS ) ) ; forAll ( new Function < ItemListener , Void > ( ) { @ Override public Void apply ( ItemListener l ) { l . onRenamed ( rootItem , oldName , newName ) ; return null ; } } ) ; } forAll ( new Function < ItemListener , Void > ( ) { @ Override public Void apply ( ItemListener l ) { l . onLocationChanged ( rootItem , oldFullName , newFullName ) ; return null ; } } ) ; if ( rootItem instanceof ItemGroup ) { for ( final Item child : Items . allItems ( ACL . SYSTEM , ( ItemGroup ) rootItem , Item . class ) ) { final String childNew = child . getFullName ( ) ; assert childNew . startsWith ( newFullName ) ; assert childNew . charAt ( newFullName . length ( ) ) == '/' ; final String childOld = oldFullName + childNew . substring ( newFullName . length ( ) ) ; forAll ( new Function < ItemListener , Void > ( ) { @ Override public Void apply ( ItemListener l ) { l . onLocationChanged ( child , childOld , childNew ) ; return null ; } } ) ; } }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link Boolean } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link Boolean } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "Boolean" ) public JAXBElement < Boolean > createBoolean ( Boolean value ) { } }
return new JAXBElement < Boolean > ( _Boolean_QNAME , Boolean . class , null , value ) ;
public class SxmpSession { /** * Processes an InputStream that contains a request . Does its best to * only produce a Response that can be written to an OutputStream . Any * exception this method throws should be treated as fatal and no attempt * should be made to print out valid XML as a response . * @ param is The InputStream to read the Request from * @ return A Response that can be written to an OutputStream via an SxmpWriter * @ throws IOException Thrown if there is an error while reading the InputStream * @ throws SAXException Thrown if there is an error with parsing the XML document * @ throws ParserConfigurationException Thrown if there is an error with loading * the XML parser . */ public Response process ( InputStream is ) throws IOException , SAXException , ParserConfigurationException { } }
// create a new XML parser SxmpParser parser = new SxmpParser ( version ) ; // an instance of an operation we ' ll be processing as a request Operation operation = null ; try { // parse input stream into an operation ( this may operation = parser . parse ( is ) ; } catch ( SxmpParsingException e ) { // major issue parsing the request into something valid - - this // exception may contain a partially parsed operation - - if it does // then we want to return valid XML back to the caller of this session // don ' t dump stack trace ; instead just log error message and what of the operation we parsed if ( e . getOperation ( ) != null && e . getOperation ( ) . getType ( ) != null ) { logger . warn ( "Unable to fully parse XML into a request, returning ErrorResponse; error: " + e . getMessage ( ) + ", parsed: " + e . getOperation ( ) ) ; // we ' ll actually return a generic ErrorResponse back return new ErrorResponse ( e . getOperation ( ) . getType ( ) , e . getErrorCode ( ) . getIntValue ( ) , e . getErrorMessage ( ) ) ; } else { // otherwise , we should just return a generic error since nothing // really was parsed in the XML document throw new SAXException ( e . getMessage ( ) , e ) ; } } // at this point , we ' ll catch any SxmpErrorExceptions and make sure they // are always converted into an ErrorResponse object , rather than // the exception ever being thrown try { // can only handle requests if ( ! ( operation instanceof Request ) ) { throw new SxmpErrorException ( SxmpErrorCode . UNSUPPORTED_OPERATION , "A session can only process requests" ) ; } // convert to a request Request req = ( Request ) operation ; // was an account included ? if ( req . getAccount ( ) == null ) { throw new SxmpErrorException ( SxmpErrorCode . MISSING_REQUIRED_ELEMENT , "A request must include account credentials" ) ; } // authenticate the request if ( ! processor . authenticate ( req . getAccount ( ) ) ) { throw new SxmpErrorException ( SxmpErrorCode . AUTHENTICATION_FAILURE , "Authentication failure" ) ; } // handle request type if ( operation instanceof SubmitRequest ) { return processor . submit ( req . getAccount ( ) , ( SubmitRequest ) operation ) ; } else if ( operation instanceof DeliverRequest ) { return processor . deliver ( req . getAccount ( ) , ( DeliverRequest ) operation ) ; } else if ( operation instanceof DeliveryReportRequest ) { return processor . deliveryReport ( req . getAccount ( ) , ( DeliveryReportRequest ) operation ) ; } else { // if we got here , then a request we don ' t support occurred throw new SxmpErrorException ( SxmpErrorCode . UNSUPPORTED_OPERATION , "Unsupported operation request type" ) ; } } catch ( SxmpErrorException e ) { // because this is a mostly normal error in the course of processing a message // we don ' t want to print the full stacktrace - - we just want to print the message logger . warn ( e . getMessage ( ) ) ; // we ' ll actually return a generic ErrorResponse back return new ErrorResponse ( operation . getType ( ) , e . getErrorCode ( ) . getIntValue ( ) , e . getErrorMessage ( ) ) ; } catch ( Throwable t ) { logger . error ( "Major uncaught throwable while processing request, generating an ErrorResponse" , t ) ; // we ' ll actually return a generic ErrorResponse back return new ErrorResponse ( operation . getType ( ) , SxmpErrorCode . GENERIC . getIntValue ( ) , "Generic error while processing request" ) ; }
public class EnglishGrammaticalStructure { /** * Destructively modifies this < code > Collection & lt ; TypedDependency & gt ; < / code > * by collapsing several types of transitive pairs of dependencies . * < dl > * < dt > prepositional object dependencies : pobj < / dt > * < dd > * < code > prep ( cat , in ) < / code > and < code > pobj ( in , hat ) < / code > are collapsed to * < code > prep _ in ( cat , hat ) < / code > < / dd > * < dt > prepositional complement dependencies : pcomp < / dt > * < dd > * < code > prep ( heard , of ) < / code > and < code > pcomp ( of , attacking ) < / code > are * collapsed to < code > prepc _ of ( heard , attacking ) < / code > < / dd > * < dt > conjunct dependencies < / dt > * < dd > * < code > cc ( investors , and ) < / code > and * < code > conj ( investors , regulators ) < / code > are collapsed to * < code > conj _ and ( investors , regulators ) < / code > < / dd > * < dt > possessive dependencies : possessive < / dt > * < dd > * < code > possessive ( Montezuma , ' s ) < / code > will be erased . This is like a collapsing , but * due to the flatness of NPs , two dependencies are not actually composed . < / dd > * < dt > For relative clauses , it will collapse referent < / dt > * < dd > * < code > ref ( man , that ) < / code > and < code > dobj ( love , that ) < / code > are collapsed * to < code > dobj ( love , man ) < / code > < / dd > * < / dl > */ @ Override protected void collapseDependencies ( List < TypedDependency > list , boolean CCprocess ) { } }
if ( DEBUG ) { printListSorted ( "collapseDependencies: CCproc: " + CCprocess , list ) ; } correctDependencies ( list ) ; if ( DEBUG ) { printListSorted ( "After correctDependencies:" , list ) ; } eraseMultiConj ( list ) ; if ( DEBUG ) { printListSorted ( "After collapse multi conj:" , list ) ; } collapse2WP ( list ) ; if ( DEBUG ) { printListSorted ( "After collapse2WP:" , list ) ; } collapseFlatMWP ( list ) ; if ( DEBUG ) { printListSorted ( "After collapseFlatMWP:" , list ) ; } collapse2WPbis ( list ) ; if ( DEBUG ) { printListSorted ( "After collapse2WPbis:" , list ) ; } collapse3WP ( list ) ; if ( DEBUG ) { printListSorted ( "After collapse3WP:" , list ) ; } collapsePrepAndPoss ( list ) ; if ( DEBUG ) { printListSorted ( "After PrepAndPoss:" , list ) ; } collapseConj ( list ) ; if ( DEBUG ) { printListSorted ( "After conj:" , list ) ; } collapseReferent ( list ) ; if ( DEBUG ) { printListSorted ( "After collapse referent:" , list ) ; } if ( CCprocess ) { treatCC ( list ) ; if ( DEBUG ) { printListSorted ( "After treatCC:" , list ) ; } } removeDep ( list ) ; if ( DEBUG ) { printListSorted ( "After remove dep:" , list ) ; } Collections . sort ( list ) ; if ( DEBUG ) { printListSorted ( "After all collapse:" , list ) ; }
public class JKFormatUtil { /** * Gets the number formatter . * @ param pattern the pattern * @ return the number formatter */ public static Format getNumberFormatter ( final String pattern ) { } }
Format format = JKFormatUtil . formatMap . get ( pattern ) ; if ( format == null ) { format = new DecimalFormat ( pattern ) ; JKFormatUtil . formatMap . put ( pattern , format ) ; } return format ;
public class ItemAPI { /** * Returns the difference in fields values between the two revisions . * @ param itemId * The id of the item * @ param revisionFrom * The from revision * @ param revisionTo * The to revision * @ return The difference between the two revision */ public List < ItemFieldDifference > getItemRevisionDifference ( int itemId , int revisionFrom , int revisionTo ) { } }
return getResourceFactory ( ) . getApiResource ( "/item/" + itemId + "/revision/" + revisionFrom + "/" + revisionTo ) . get ( new GenericType < List < ItemFieldDifference > > ( ) { } ) ;
public class TrainingsImpl { /** * Create a project . * @ param name Name of the project * @ param createProjectOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the Project object */ public Observable < ServiceResponse < Project > > createProjectWithServiceResponseAsync ( String name , CreateProjectOptionalParameter createProjectOptionalParameter ) { } }
if ( name == null ) { throw new IllegalArgumentException ( "Parameter name is required and cannot be null." ) ; } if ( this . client . apiKey ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiKey() is required and cannot be null." ) ; } final String description = createProjectOptionalParameter != null ? createProjectOptionalParameter . description ( ) : null ; final UUID domainId = createProjectOptionalParameter != null ? createProjectOptionalParameter . domainId ( ) : null ; final String classificationType = createProjectOptionalParameter != null ? createProjectOptionalParameter . classificationType ( ) : null ; return createProjectWithServiceResponseAsync ( name , description , domainId , classificationType ) ;
public class PreviewAgentsResult { /** * The resulting list of agents . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setAgentPreviews ( java . util . Collection ) } or { @ link # withAgentPreviews ( java . util . Collection ) } if you want * to override the existing values . * @ param agentPreviews * The resulting list of agents . * @ return Returns a reference to this object so that method calls can be chained together . */ public PreviewAgentsResult withAgentPreviews ( AgentPreview ... agentPreviews ) { } }
if ( this . agentPreviews == null ) { setAgentPreviews ( new java . util . ArrayList < AgentPreview > ( agentPreviews . length ) ) ; } for ( AgentPreview ele : agentPreviews ) { this . agentPreviews . add ( ele ) ; } return this ;
public class CampaignEventFilterMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CampaignEventFilter campaignEventFilter , ProtocolMarshaller protocolMarshaller ) { } }
if ( campaignEventFilter == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( campaignEventFilter . getDimensions ( ) , DIMENSIONS_BINDING ) ; protocolMarshaller . marshall ( campaignEventFilter . getFilterType ( ) , FILTERTYPE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AcpOrd { /** * < p > Lazy Getter for quOrGdChk . < / p > * @ return String * @ throws IOException - IO exception */ public final String lazyGetQuOrGdChk ( ) throws IOException { } }
if ( this . quOrGdChk == null ) { String flName = "/webstore/ordGdChk.sql" ; this . quOrGdChk = loadString ( flName ) ; } return this . quOrGdChk ;
public class OrientedBox3f { /** * Set the second axis of the box . * The third axis is updated to be perpendicular to the two other axis . * @ param axis - the new values for the first axis . * @ param extent - the extent of the axis . */ @ Override public void setSecondAxis ( Vector3D axis , double extent ) { } }
setSecondAxis ( axis . getX ( ) , axis . getY ( ) , axis . getZ ( ) , extent ) ;
public class DatabaseHashMap { /** * close the connection */ void closeConnection ( Connection con ) { } }
if ( com . ibm . websphere . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_WAS . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_WAS . entering ( methodClassName , methodNames [ CLOSE_CONNECTION ] , "closing " + con ) ; } try { con . close ( ) ; } catch ( Throwable t ) { com . ibm . ws . ffdc . FFDCFilter . processException ( t , "com.ibm.ws.session.store.db.DatabaseHashMap.closeConnection" , "1056" , con ) ; LoggingUtil . SESSION_LOGGER_WAS . logp ( Level . SEVERE , methodClassName , methodNames [ CLOSE_CONNECTION ] , "CommonMessage.exception" , t ) ; } endDBContext ( ) ; // PK06395 / d321615 resumeTransaction ( ) ; if ( com . ibm . websphere . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_WAS . isLoggable ( Level . FINER ) ) { LoggingUtil . SESSION_LOGGER_WAS . exiting ( methodClassName , methodNames [ CLOSE_CONNECTION ] , "closed " + con ) ; }
public class Css { /** * Create a Css Selector Transform */ public static CssSel sel ( String selector , String value ) { } }
return j . sel ( selector , value ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EEnum getIfcLightFixtureTypeEnum ( ) { } }
if ( ifcLightFixtureTypeEnumEEnum == null ) { ifcLightFixtureTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1013 ) ; } return ifcLightFixtureTypeEnumEEnum ;
public class Timestamp { /** * Creates a new timestamp from given seconds and nanoseconds . * @ param seconds Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z . Must be * from from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive . * @ param nanos Non - negative fractions of a second at nanosecond resolution . Negative second * values with fractions must still have non - negative nanos values that count forward in time . * Must be from 0 to 999,999,999 inclusive . * @ return new { @ code Timestamp } with specified fields . * @ throws IllegalArgumentException if the arguments are out of range . * @ since 0.5 */ public static Timestamp create ( long seconds , int nanos ) { } }
if ( seconds < - MAX_SECONDS ) { throw new IllegalArgumentException ( "'seconds' is less than minimum (" + - MAX_SECONDS + "): " + seconds ) ; } if ( seconds > MAX_SECONDS ) { throw new IllegalArgumentException ( "'seconds' is greater than maximum (" + MAX_SECONDS + "): " + seconds ) ; } if ( nanos < 0 ) { throw new IllegalArgumentException ( "'nanos' is less than zero: " + nanos ) ; } if ( nanos > MAX_NANOS ) { throw new IllegalArgumentException ( "'nanos' is greater than maximum (" + MAX_NANOS + "): " + nanos ) ; } return new AutoValue_Timestamp ( seconds , nanos ) ;
public class JDBCResultSet { /** * # ifdef JAVA4 */ public void updateBlob ( int columnIndex , java . sql . Blob x ) throws SQLException { } }
if ( x instanceof JDBCBlobClient ) { throw Util . sqlException ( ErrorCode . JDBC_INVALID_ARGUMENT , "invalid Blob" ) ; } startUpdate ( columnIndex ) ; preparedStatement . setBlobParameter ( columnIndex , x ) ;
public class AbstractItem { /** * Updates an Item by its XML definition . * @ param source source of the Item ' s new definition . * The source should be either a < code > StreamSource < / code > or a < code > SAXSource < / code > , other * sources may not be handled . * @ since 1.473 */ public void updateByXml ( Source source ) throws IOException { } }
checkPermission ( CONFIGURE ) ; XmlFile configXmlFile = getConfigFile ( ) ; final AtomicFileWriter out = new AtomicFileWriter ( configXmlFile . getFile ( ) ) ; try { try { XMLUtils . safeTransform ( source , new StreamResult ( out ) ) ; out . close ( ) ; } catch ( TransformerException | SAXException e ) { throw new IOException ( "Failed to persist config.xml" , e ) ; } // try to reflect the changes by reloading Object o = new XmlFile ( Items . XSTREAM , out . getTemporaryFile ( ) ) . unmarshalNullingOut ( this ) ; if ( o != this ) { // ensure that we ' ve got the same job type . extending this code to support updating // to different job type requires destroying & creating a new job type throw new IOException ( "Expecting " + this . getClass ( ) + " but got " + o . getClass ( ) + " instead" ) ; } Items . whileUpdatingByXml ( new NotReallyRoleSensitiveCallable < Void , IOException > ( ) { @ Override public Void call ( ) throws IOException { onLoad ( getParent ( ) , getRootDir ( ) . getName ( ) ) ; return null ; } } ) ; Jenkins . getInstance ( ) . rebuildDependencyGraphAsync ( ) ; // if everything went well , commit this new version out . commit ( ) ; SaveableListener . fireOnChange ( this , getConfigFile ( ) ) ; } finally { out . abort ( ) ; // don ' t leave anything behind }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcMaterialConstituentSet ( ) { } }
if ( ifcMaterialConstituentSetEClass == null ) { ifcMaterialConstituentSetEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 359 ) ; } return ifcMaterialConstituentSetEClass ;
public class ShapeModifiersProcessor { /** * Override name of the enums , marshall / unmarshall location of the * members in the given shape model . */ private void postprocess_ModifyMemberProperty ( ShapeModel shapeModel , String memberName , ShapeModifier_ModifyModel modifyModel ) { } }
if ( modifyModel . getEmitEnumName ( ) != null ) { EnumModel enumModel = shapeModel . findEnumModelByValue ( memberName ) ; if ( enumModel == null ) { throw new IllegalStateException ( String . format ( "Cannot find enum [%s] in the intermediate model when processing " + "customization config shapeModifiers.%s" , memberName , memberName ) ) ; } enumModel . setName ( modifyModel . getEmitEnumName ( ) ) ; } if ( modifyModel . getMarshallLocationName ( ) != null ) { MemberModel memberModel = shapeModel . findMemberModelByC2jName ( memberName ) ; memberModel . getHttp ( ) . setMarshallLocationName ( modifyModel . getMarshallLocationName ( ) ) ; } if ( modifyModel . getUnmarshallLocationName ( ) != null ) { MemberModel memberModel = shapeModel . findMemberModelByC2jName ( memberName ) ; memberModel . getHttp ( ) . setUnmarshallLocationName ( modifyModel . getUnmarshallLocationName ( ) ) ; }
public class AutomationExecution { /** * The key - value map of execution parameters , which were supplied when calling StartAutomationExecution . * @ param parameters * The key - value map of execution parameters , which were supplied when calling StartAutomationExecution . * @ return Returns a reference to this object so that method calls can be chained together . */ public AutomationExecution withParameters ( java . util . Map < String , java . util . List < String > > parameters ) { } }
setParameters ( parameters ) ; return this ;
public class SqlgStartupManager { /** * get the build version * @ return the build version , or null if unknown */ String getBuildVersion ( ) { } }
if ( this . buildVersion == null ) { Properties prop = new Properties ( ) ; try { // try system URL u = ClassLoader . getSystemResource ( SQLG_APPLICATION_PROPERTIES ) ; if ( u == null ) { // try own class loader u = getClass ( ) . getClassLoader ( ) . getResource ( SQLG_APPLICATION_PROPERTIES ) ; } if ( u != null ) { try ( InputStream is = u . openStream ( ) ) { prop . load ( is ) ; } this . buildVersion = prop . getProperty ( APPLICATION_VERSION ) ; } } catch ( IOException e ) { throw new RuntimeException ( e ) ; } } return this . buildVersion ;
public class WdrVideoUrlParser { /** * Erzeugt eine Map aus der Auflösungsbreite und der Video - URL * @ param m3u8Content Inhalt der m3u8 - Datei * @ return */ private Map < Integer , String > getResolutionUrlMapFromM3u8 ( String m3u8Content ) { } }
Map < Integer , String > resolutionUrlMap = new HashMap < > ( ) ; // Split nach # , um für jede Auflösung eine eigenen String zu erhalten String [ ] parts = m3u8Content . split ( "#" ) ; for ( String part : parts ) { String resolution = getSubstring ( part , M3U8_RESOLUTION_BEGIN , M3U8_RESOLUTION_END ) ; String url = getSubstring ( part , M3U8_URL_BEGIN , M3U8_URL_END ) ; if ( ! resolution . isEmpty ( ) && ! url . isEmpty ( ) ) { url = M3U8_URL_BEGIN + url + M3U8_URL_END ; int resolutionValue = Integer . parseInt ( resolution ) ; resolutionUrlMap . put ( resolutionValue , url ) ; } } return resolutionUrlMap ;
public class QueuedExecutions { /** * Wraps BoundedQueue Take method to have a corresponding update in queuedFlowMap lookup table */ public Pair < ExecutionReference , ExecutableFlow > fetchHead ( ) throws InterruptedException { } }
final Pair < ExecutionReference , ExecutableFlow > pair = this . queuedFlowList . take ( ) ; if ( pair != null && pair . getFirst ( ) != null ) { this . queuedFlowMap . remove ( pair . getFirst ( ) . getExecId ( ) ) ; } return pair ;
public class MRCompactorJobPropCreator { /** * Create MR job properties for a { @ link Dataset } . */ protected Optional < Dataset > createJobProps ( Dataset dataset ) throws IOException { } }
if ( this . recompactFromOutputPaths && ( ! latePathsFound ( dataset ) ) ) { LOG . info ( String . format ( "Skipping recompaction for %s since there is no late data in %s" , new Object [ ] { dataset . inputPaths ( ) , dataset . inputLatePaths ( ) } ) ) ; return Optional . absent ( ) ; } State jobProps = new State ( ) ; jobProps . addAll ( this . state ) ; jobProps . setProp ( MRCompactor . COMPACTION_ENABLE_SUCCESS_FILE , false ) ; jobProps . setProp ( MRCompactor . COMPACTION_INPUT_DEDUPLICATED , this . inputDeduplicated ) ; jobProps . setProp ( MRCompactor . COMPACTION_OUTPUT_DEDUPLICATED , this . outputDeduplicated ) ; jobProps . setProp ( MRCompactor . COMPACTION_SHOULD_DEDUPLICATE , ! this . inputDeduplicated && this . outputDeduplicated ) ; if ( this . recompactFromOutputPaths || ! MRCompactor . datasetAlreadyCompacted ( this . fs , dataset , renameSourceDirEnabled ) ) { if ( renameSourceDirEnabled ) { Set < Path > newUnrenamedDirs = MRCompactor . getDeepestLevelUnrenamedDirsWithFileExistence ( this . fs , dataset . inputPaths ( ) ) ; if ( getAllFilePathsRecursively ( newUnrenamedDirs ) . isEmpty ( ) ) { return Optional . absent ( ) ; } LOG . info ( "[{}] has unprocessed directories for first time compaction: {}" , dataset . getDatasetName ( ) , newUnrenamedDirs ) ; dataset . overwriteInputPaths ( newUnrenamedDirs ) ; dataset . setRenamePaths ( newUnrenamedDirs ) ; } else { addInputLateFilesForFirstTimeCompaction ( jobProps , dataset ) ; } LOG . info ( String . format ( "Created MR job properties for input %s and output %s." , dataset . inputPaths ( ) , dataset . outputPath ( ) ) ) ; dataset . setJobProps ( jobProps ) ; return Optional . of ( dataset ) ; } else { return obtainDatasetWithJobProps ( jobProps , dataset ) ; }
public class ExcelFunctions { /** * Returns only the day of the month of a date ( 1 to 31) */ public static int day ( EvaluationContext ctx , Object date ) { } }
return Conversions . toDateOrDateTime ( date , ctx ) . get ( ChronoField . DAY_OF_MONTH ) ;
public class DateTime { /** * 计算相差时长 * @ param date 对比的日期 * @ param unit 单位 { @ link DateUnit } * @ param formatLevel 格式化级别 * @ return 相差时长 */ public String between ( Date date , DateUnit unit , BetweenFormater . Level formatLevel ) { } }
return new DateBetween ( this , date ) . toString ( formatLevel ) ;
public class TargetBulkUpdateWindowLayout { /** * Reset the values in popup . */ public void resetComponents ( ) { } }
dsNamecomboBox . clear ( ) ; descTextArea . clear ( ) ; targetBulkTokenTags . getTokenField ( ) . clear ( ) ; targetBulkTokenTags . populateContainer ( ) ; progressBar . setValue ( 0F ) ; progressBar . setVisible ( false ) ; managementUIState . getTargetTableFilters ( ) . getBulkUpload ( ) . setProgressBarCurrentValue ( 0F ) ; targetsCountLabel . setVisible ( false ) ;
public class ResponseLaunchTemplateData { /** * The elastic inference accelerator for the instance . * @ return The elastic inference accelerator for the instance . */ public java . util . List < LaunchTemplateElasticInferenceAcceleratorResponse > getElasticInferenceAccelerators ( ) { } }
if ( elasticInferenceAccelerators == null ) { elasticInferenceAccelerators = new com . amazonaws . internal . SdkInternalList < LaunchTemplateElasticInferenceAcceleratorResponse > ( ) ; } return elasticInferenceAccelerators ;
public class AggregateDirContextProcessor { /** * @ see org . springframework . ldap . core . DirContextProcessor # preProcess ( javax . naming . directory . DirContext ) */ public void preProcess ( DirContext ctx ) throws NamingException { } }
for ( DirContextProcessor processor : dirContextProcessors ) { processor . preProcess ( ctx ) ; }
public class JavaClassProcessor { /** * This is inconsistent with the behavior of Class . getSuperClass ( ) */ private Optional < String > getSuperClassName ( String superName , boolean isInterface ) { } }
return superName != null && ! isInterface ? Optional . of ( createTypeName ( superName ) ) : Optional . < String > absent ( ) ;
public class UserAPI { /** * 黑名单管理获取公众号的黑名单列表 < br > * 该接口每次调用最多可拉取 10000 个OpenID , 当列表数较多时 , 可以通过多次拉取的方式来满足需求 。 * @ since 2.8.1 * @ param access _ tokenaccess _ token * @ param begin _ openid当 begin _ openid 为空时 , 默认从开头拉取 。 * @ return result */ public static GetblacklistResult tagsMembersGetblacklist ( String access_token , String begin_openid ) { } }
String json = String . format ( "{\"begin_openid\":\"%s\"}" , begin_openid == null ? "" : begin_openid ) ; HttpUriRequest httpUriRequest = RequestBuilder . post ( ) . setHeader ( jsonHeader ) . setUri ( BASE_URI + "/cgi-bin/tags/members/getblacklist" ) . addParameter ( PARAM_ACCESS_TOKEN , API . accessToken ( access_token ) ) . setEntity ( new StringEntity ( json , Charset . forName ( "utf-8" ) ) ) . build ( ) ; return LocalHttpClient . executeJsonResult ( httpUriRequest , GetblacklistResult . class ) ;
public class PullRequestMergedStateChangedEventMetadataMarshaller { /** * Marshall the given parameter object . */ public void marshall ( PullRequestMergedStateChangedEventMetadata pullRequestMergedStateChangedEventMetadata , ProtocolMarshaller protocolMarshaller ) { } }
if ( pullRequestMergedStateChangedEventMetadata == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( pullRequestMergedStateChangedEventMetadata . getRepositoryName ( ) , REPOSITORYNAME_BINDING ) ; protocolMarshaller . marshall ( pullRequestMergedStateChangedEventMetadata . getDestinationReference ( ) , DESTINATIONREFERENCE_BINDING ) ; protocolMarshaller . marshall ( pullRequestMergedStateChangedEventMetadata . getMergeMetadata ( ) , MERGEMETADATA_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class UserPasswordHandler { /** * Called when a new blank record is required for the table / query . * @ param bDisplayOption If true , display any changes . */ public void doNewRecord ( boolean bDisplayOption ) { } }
Record recUserInfo = this . getOwner ( ) ; RecordOwner recordOwner = recUserInfo . getRecordOwner ( ) ; Record recUserScreenRecord = ( Record ) recordOwner . getScreenRecord ( ) ; recUserScreenRecord . getField ( UserScreenRecord . CURRENT_PASSWORD ) . setData ( null ) ; recUserScreenRecord . getField ( UserScreenRecord . NEW_PASSWORD_1 ) . setData ( null , DBConstants . DISPLAY , DBConstants . INIT_MOVE ) ; recUserScreenRecord . getField ( UserScreenRecord . NEW_PASSWORD_2 ) . setData ( null , DBConstants . DISPLAY , DBConstants . INIT_MOVE ) ; recUserScreenRecord . getField ( UserScreenRecord . CURRENT_PASSWORD ) . setModified ( false ) ; recUserScreenRecord . getField ( UserScreenRecord . NEW_PASSWORD_1 ) . setModified ( false ) ; recUserScreenRecord . getField ( UserScreenRecord . NEW_PASSWORD_2 ) . setModified ( false ) ; recUserScreenRecord . getField ( UserScreenRecord . STATUS_LINE ) . setString ( DBConstants . BLANK ) ; super . doNewRecord ( bDisplayOption ) ;
public class CompositeRecordReader { /** * Report progress as the minimum of all child RR progress . */ public float getProgress ( ) throws IOException { } }
float ret = 1.0f ; for ( RecordReader < K , ? extends Writable > rr : kids ) { ret = Math . min ( ret , rr . getProgress ( ) ) ; } return ret ;
public class CurrentGpsInfo { /** * Method to add a new { @ link GGASentence } . * @ param gaa the sentence to add . */ public void addGGA ( GGASentence gga ) { } }
try { if ( gga . isValid ( ) ) { gpsFixQuality = gga . getFixQuality ( ) ; position = gga . getPosition ( ) ; altitude = gga . getAltitude ( ) ; if ( time == null ) { time = gga . getTime ( ) ; } } } catch ( Exception e ) { // ignore it , this should be handled in the isValid , // if an exception is thrown , we can ' t deal with it here . }
public class DynamicReportBuilder { /** * Defines the text to show when the data source is empty . < br > * By default the title and column headers are shown * @ param text * @ param style : the style of the text * @ return */ public DynamicReportBuilder setWhenNoData ( String text , Style style ) { } }
this . report . setWhenNoDataStyle ( style ) ; this . report . setWhenNoDataText ( text ) ; this . report . setWhenNoDataType ( DJConstants . WHEN_NO_DATA_TYPE_NO_DATA_SECTION ) ; return this ;
public class Meter { /** * Marks the number of events . * @ param n the number of events */ public void mark ( final long n ) { } }
tickIfNecessary ( ) ; count . addAndGet ( n ) ; m1Thp . update ( n ) ; m5Thp . update ( n ) ; m15Thp . update ( n ) ;
public class JdepsFilter { /** * Tests if the given source includes classes specified in - include option * This method can be used to determine if the given source should eagerly * be processed . */ public boolean matches ( Archive source ) { } }
if ( includePattern != null ) { return source . reader ( ) . entries ( ) . stream ( ) . map ( name -> name . replace ( '/' , '.' ) ) . filter ( name -> ! name . equals ( "module-info.class" ) ) . anyMatch ( this :: matches ) ; } return hasTargetFilter ( ) ;
public class UniversalIdStrMessage { /** * Create a new { @ link UniversalIdStrMessage } object with specified content . * @ param content * @ return */ public static UniversalIdStrMessage newInstance ( byte [ ] content ) { } }
UniversalIdStrMessage msg = newInstance ( ) ; msg . setContent ( content ) ; return msg ;
public class MCF1RGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setCharRot ( Integer newCharRot ) { } }
Integer oldCharRot = charRot ; charRot = newCharRot ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . MCF1RG__CHAR_ROT , oldCharRot , charRot ) ) ;
public class DefaultEventbus { /** * - - - SEND EVENT TO ALL LISTENERS IN THE SPECIFIED GROUP - - - */ @ Override public void broadcast ( String name , Tree payload , Groups groups , boolean local ) { } }
String key = getCacheKey ( name , groups ) ; ListenerEndpoint [ ] endpoints ; if ( local ) { endpoints = localBroadcasterCache . get ( key ) ; } else { endpoints = broadcasterCache . get ( key ) ; } if ( endpoints == null ) { HashSet < ListenerEndpoint > list = new HashSet < > ( ) ; readLock . lock ( ) ; try { for ( Map . Entry < String , HashMap < String , Strategy < ListenerEndpoint > > > entry : listeners . entrySet ( ) ) { if ( Matcher . matches ( name , entry . getKey ( ) ) ) { for ( Map . Entry < String , Strategy < ListenerEndpoint > > test : entry . getValue ( ) . entrySet ( ) ) { if ( groups != null ) { final String testGroup = test . getKey ( ) ; for ( String group : groups . groups ( ) ) { if ( group . equals ( testGroup ) ) { for ( ListenerEndpoint endpoint : test . getValue ( ) . getAllEndpoints ( ) ) { if ( local ) { if ( endpoint . isLocal ( ) ) { list . add ( endpoint ) ; } } else { list . add ( endpoint ) ; } } } } } else { if ( local ) { for ( ListenerEndpoint endpoint : test . getValue ( ) . getAllEndpoints ( ) ) { if ( local ) { list . add ( endpoint ) ; } } } else { list . addAll ( test . getValue ( ) . getAllEndpoints ( ) ) ; } } } } } } finally { readLock . unlock ( ) ; } endpoints = new ListenerEndpoint [ list . size ( ) ] ; list . toArray ( endpoints ) ; if ( local ) { localBroadcasterCache . put ( key , endpoints ) ; } else { broadcasterCache . put ( key , endpoints ) ; } } if ( endpoints . length == 0 ) { return ; } if ( endpoints . length == 1 ) { try { endpoints [ 0 ] . on ( name , payload , groups , true ) ; } catch ( Exception cause ) { logger . error ( "Unable to invoke event listener!" , cause ) ; } return ; } HashSet < String > nodeSet = new HashSet < > ( endpoints . length * 2 ) ; for ( ListenerEndpoint endpoint : endpoints ) { if ( endpoint . isLocal ( ) || nodeSet . add ( endpoint . getNodeID ( ) ) ) { try { endpoint . on ( name , payload , groups , true ) ; } catch ( Exception cause ) { logger . error ( "Unable to invoke event listener!" , cause ) ; } } }
public class EndPointMgrImpl { /** * { @ inheritDoc } */ @ Override public EndPointInfo defineEndPoint ( String name , String host , int port ) { } }
try { EndPointInfoImpl ep ; synchronized ( this . endpoints ) { // if the endpoint with the same name already exists , // update it if ( this . endpoints . containsKey ( name ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "The new endpoint " + name + "already exists. Update the properties of the registered service" ) ; } ep = updateEndpointMBean ( name , host , port ) ; } else { // create and register the end point ep = new EndPointInfoImpl ( name , host , port ) ; registerEndpointMBean ( name , ep ) ; this . endpoints . put ( name , ep ) ; } } return ep ; } catch ( NotCompliantMBeanException ex ) { // This should never happen throw new IllegalStateException ( "Encountered a situation that should never occur. The EndPointInfo resulted in NotCompliantMBeanException" , ex ) ; }
public class MasterSlaveSchema { /** * Renew disabled data source names . * @ param disabledStateChangedEvent disabled state changed event */ @ Subscribe public synchronized void renew ( final DisabledStateChangedEvent disabledStateChangedEvent ) { } }
OrchestrationShardingSchema shardingSchema = disabledStateChangedEvent . getShardingSchema ( ) ; if ( getName ( ) . equals ( shardingSchema . getSchemaName ( ) ) ) { ( ( OrchestrationMasterSlaveRule ) masterSlaveRule ) . updateDisabledDataSourceNames ( shardingSchema . getDataSourceName ( ) , disabledStateChangedEvent . isDisabled ( ) ) ; }
public class PostgreSqlQueryGenerator { /** * Returns whether this attribute is stored in the entity table or another table such as a * junction table or referenced entity table . * @ param attr attribute * @ return whether this attribute is stored in another table than the entity table */ private static boolean isPersistedInOtherTable ( Attribute attr ) { } }
boolean bidirectionalOneToMany = attr . getDataType ( ) == ONE_TO_MANY && attr . isMappedBy ( ) ; return isMultipleReferenceType ( attr ) || bidirectionalOneToMany ;
public class VerificationConditionGenerator { /** * Generate the logically inverted expression corresponding to a given * comparator . For example , inverting " < = " gives " > " , inverting " = = " gives " ! = " , * etc . * @ param test * - - - the binary comparator being inverted . * @ return */ public Expr invertCondition ( Expr expr , WyilFile . Expr elem ) { } }
if ( expr instanceof Expr . Operator ) { Expr . Operator binTest = ( Expr . Operator ) expr ; switch ( binTest . getOpcode ( ) ) { case WyalFile . EXPR_eq : return new Expr . NotEqual ( binTest . getAll ( ) ) ; case WyalFile . EXPR_neq : return new Expr . Equal ( binTest . getAll ( ) ) ; case WyalFile . EXPR_gteq : return new Expr . LessThan ( binTest . getAll ( ) ) ; case WyalFile . EXPR_gt : return new Expr . LessThanOrEqual ( binTest . getAll ( ) ) ; case WyalFile . EXPR_lteq : return new Expr . GreaterThan ( binTest . getAll ( ) ) ; case WyalFile . EXPR_lt : return new Expr . GreaterThanOrEqual ( binTest . getAll ( ) ) ; case WyalFile . EXPR_and : { Expr [ ] operands = invertConditions ( binTest . getAll ( ) , elem ) ; return new Expr . LogicalOr ( operands ) ; } case WyalFile . EXPR_or : { Expr [ ] operands = invertConditions ( binTest . getAll ( ) , elem ) ; return new Expr . LogicalAnd ( operands ) ; } } } else if ( expr instanceof Expr . Is ) { Expr . Is ei = ( Expr . Is ) expr ; WyalFile . Type type = ei . getTestType ( ) ; return new Expr . Is ( ei . getTestExpr ( ) , new WyalFile . Type . Negation ( type ) ) ; } // Otherwise , compare against false // FIXME : this is just wierd and needs to be fixed . return new Expr . LogicalNot ( expr ) ;
public class VisualizationUtils { /** * Plots the MSD curve for trajectory t * @ param t Trajectory to calculate the msd curve * @ param lagMin Minimum timelag ( e . g . 1,2,3 . . ) lagMin * timelag = elapsed time in seconds * @ param lagMax Maximum timelag ( e . g . 1,2,3 . . ) lagMax * timelag = elapsed time in seconds * @ param msdeval Evaluates the mean squared displacment */ public static Chart getMSDLineChart ( Trajectory t , int lagMin , int lagMax , AbstractMeanSquaredDisplacmentEvaluator msdeval ) { } }
double [ ] xData = new double [ lagMax - lagMin + 1 ] ; double [ ] yData = new double [ lagMax - lagMin + 1 ] ; msdeval . setTrajectory ( t ) ; msdeval . setTimelag ( lagMin ) ; for ( int i = lagMin ; i < lagMax + 1 ; i ++ ) { msdeval . setTimelag ( i ) ; double msdhelp = msdeval . evaluate ( ) [ 0 ] ; xData [ i - lagMin ] = i ; yData [ i - lagMin ] = msdhelp ; } // Create Chart Chart chart = QuickChart . getChart ( "MSD Line" , "LAG" , "MSD" , "MSD" , xData , yData ) ; // Show it // new SwingWrapper ( chart ) . displayChart ( ) ; return chart ;
public class AlertWindow { /** * Display the alert window . */ public void show ( ) { } }
if ( isShowing ) { Log . w ( "AlertWindow" , "AlertWindow is already displayed." ) ; } else { isShowing = true ; mWindowManager . addView ( mContentView , mParams ) ; }
public class Filters { /** * { @ link Filter } that exclude all { @ link ArchivePath } s that match a given Regular Expression { @ link Pattern } . * @ param regexp * The expression to exclude * @ return A Regular Expression based exclude { @ link Filter } */ public static Filter < ArchivePath > exclude ( final String regexp ) { } }
return getFilterInstance ( IMPL_CLASS_NAME_EXCLUDE_REGEXP_PATHS , new Class < ? > [ ] { String . class } , new Object [ ] { regexp } ) ;
public class MetadataAwareClassVisitor { /** * An order - sensitive invocation of { @ link ClassVisitor # visitOuterClass ( String , String , String ) } . * @ param owner The outer class ' s internal name . * @ param name The outer method ' s name or { @ code null } if it does not exist . * @ param descriptor The outer method ' s descriptor or { @ code null } if it does not exist . */ protected void onVisitOuterClass ( String owner , String name , String descriptor ) { } }
super . visitOuterClass ( owner , name , descriptor ) ;
public class Version { /** * Serialization only looks at major and minor , not micro or below . */ public static String decodeVersionForSerialization ( short version ) { } }
int major = ( version & MAJOR_MASK ) >> MAJOR_SHIFT ; int minor = ( version & MINOR_MASK ) >> MINOR_SHIFT ; return major + "." + minor ;
public class CrystalCell { /** * Converts a set of points so that the reference point falls in the unit cell . * This is useful to transform a whole chain at once , allowing some of the * atoms to be outside the unit cell , but forcing the centroid to be within it . * @ param points A set of points to transform ( in orthonormal coordinates ) * @ param reference The reference point , which is unmodified but which would * be in the unit cell were it to have been transformed . It is safe to * use a member of the points array here . */ public void transfToOriginCell ( Tuple3d [ ] points , Tuple3d reference ) { } }
reference = new Point3d ( reference ) ; // clone transfToCrystal ( reference ) ; int x = ( int ) Math . floor ( reference . x ) ; int y = ( int ) Math . floor ( reference . y ) ; int z = ( int ) Math . floor ( reference . z ) ; for ( Tuple3d point : points ) { transfToCrystal ( point ) ; point . x -= x ; point . y -= y ; point . z -= z ; transfToOrthonormal ( point ) ; }
public class Criteria { /** * Adds GreaterOrEqual Than ( > = ) criteria , * customer _ id > = person _ id * @ param attribute The field name to be used * @ param value The field name to compare with */ public void addGreaterOrEqualThanField ( String attribute , Object value ) { } }
// PAW // addSelectionCriteria ( FieldCriteria . buildNotLessCriteria ( attribute , value , getAlias ( ) ) ) ; addSelectionCriteria ( FieldCriteria . buildNotLessCriteria ( attribute , value , getUserAlias ( attribute ) ) ) ;
public class StringSerializer { /** * Checks whether mime types is supported by this serializer implementation */ @ Override public boolean canRead ( @ Nonnull MediaType mimeType , Type resultType ) { } }
MediaType type = mimeType . withoutParameters ( ) ; return ( type . is ( MediaType . ANY_TEXT_TYPE ) || MediaType . APPLICATION_XML_UTF_8 . withoutParameters ( ) . is ( type ) || MediaType . JSON_UTF_8 . withoutParameters ( ) . is ( type ) ) && String . class . equals ( TypeToken . of ( resultType ) . getRawType ( ) ) ;
public class ExampleTemplateMatching { /** * Computes the template match intensity image and displays the results . Brighter intensity indicates * a better match to the template . */ public static void showMatchIntensity ( GrayF32 image , GrayF32 template , GrayF32 mask ) { } }
// create algorithm for computing intensity image TemplateMatchingIntensity < GrayF32 > matchIntensity = FactoryTemplateMatching . createIntensity ( TemplateScoreType . SUM_DIFF_SQ , GrayF32 . class ) ; // apply the template to the image matchIntensity . setInputImage ( image ) ; matchIntensity . process ( template , mask ) ; // get the results GrayF32 intensity = matchIntensity . getIntensity ( ) ; // adjust the intensity image so that white indicates a good match and black a poor match // the scale is kept linear to highlight how ambiguous the solution is float min = ImageStatistics . min ( intensity ) ; float max = ImageStatistics . max ( intensity ) ; float range = max - min ; PixelMath . plus ( intensity , - min , intensity ) ; PixelMath . divide ( intensity , range , intensity ) ; PixelMath . multiply ( intensity , 255.0f , intensity ) ; BufferedImage output = new BufferedImage ( image . width , image . height , BufferedImage . TYPE_INT_BGR ) ; VisualizeImageData . grayMagnitude ( intensity , output , - 1 ) ; ShowImages . showWindow ( output , "Match Intensity" , true ) ;
public class ArgumentImpl { /** * return a value matching to key * @ param intKey * @ return value matching key * @ throws PageException */ @ Override public Object getE ( int intKey ) throws PageException { } }
Iterator it = valueIterator ( ) ; // getMap ( ) . keySet ( ) . iterator ( ) ; int count = 0 ; Object o ; while ( it . hasNext ( ) ) { o = it . next ( ) ; if ( ( ++ count ) == intKey ) { return o ; // super . get ( o . toString ( ) ) ; } } throw new ExpressionException ( "invalid index [" + intKey + "] for argument scope" ) ;
public class ComputeInstanceMetadataResolverUtils { /** * Resolve a value as a string from the metadata json . * @ param json The json * @ param key The key * @ return An optional value */ public static Optional < String > stringValue ( JsonNode json , String key ) { } }
return Optional . ofNullable ( json . findValue ( key ) ) . map ( JsonNode :: asText ) ;
public class RobotUtil { /** * 截屏 * @ param screenRect 截屏的矩形区域 * @ param outFile 写出到的文件 * @ return 写出到的文件 */ public static File captureScreen ( Rectangle screenRect , File outFile ) { } }
ImgUtil . write ( captureScreen ( screenRect ) , outFile ) ; return outFile ;
public class BlobContainersInner { /** * Creates a new container under the specified account as described by request body . The container resource includes metadata and properties for that container . It does not include a list of the blobs contained by the container . * @ param resourceGroupName The name of the resource group within the user ' s subscription . The name is case insensitive . * @ param accountName The name of the storage account within the specified resource group . Storage account names must be between 3 and 24 characters in length and use numbers and lower - case letters only . * @ param containerName The name of the blob container within the specified storage account . Blob container names must be between 3 and 63 characters in length and use numbers , lower - case letters and dash ( - ) only . Every dash ( - ) character must be immediately preceded and followed by a letter or number . * @ param publicAccess Specifies whether data in the container may be accessed publicly and the level of access . Possible values include : ' Container ' , ' Blob ' , ' None ' * @ param metadata A name - value pair to associate with the container as metadata . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the BlobContainerInner object */ public Observable < BlobContainerInner > createAsync ( String resourceGroupName , String accountName , String containerName , PublicAccess publicAccess , Map < String , String > metadata ) { } }
return createWithServiceResponseAsync ( resourceGroupName , accountName , containerName , publicAccess , metadata ) . map ( new Func1 < ServiceResponse < BlobContainerInner > , BlobContainerInner > ( ) { @ Override public BlobContainerInner call ( ServiceResponse < BlobContainerInner > response ) { return response . body ( ) ; } } ) ;
public class SBTCompileMojo { /** * { @ inheritDoc } */ @ Override protected void internalExecute ( ) throws MojoExecutionException , MojoFailureException { } }
if ( skipMain ) { getLog ( ) . info ( "Not compiling main sources" ) ; return ; } super . internalExecute ( ) ; if ( outputDirectory . isDirectory ( ) ) { projectArtifact . setFile ( outputDirectory ) ; }
public class XmlTag { /** * Creates a compact string representation for the log . * @ param data the XmlTag to log * @ return string representation for log */ public static String toLog ( XmlTag data ) { } }
if ( data . channels == null ) { return data . getName ( ) + "(" + data . getOwner ( ) + ")" ; } else { return data . getName ( ) + "(" + data . getOwner ( ) + ")" + ( data . channels ) ; }
public class SubmitterLinkNameHtmlRenderer { /** * { @ inheritDoc } */ @ Override public String getNameHtml ( ) { } }
final SubmitterLink submitterLink = submitterLinkRenderer . getGedObject ( ) ; if ( ! submitterLink . isSet ( ) ) { return "" ; } final Submitter submitter = ( Submitter ) submitterLink . find ( submitterLink . getToString ( ) ) ; final GedRenderer < ? extends GedObject > renderer = new SimpleNameRenderer ( submitter . getName ( ) , submitterLinkRenderer . getRendererFactory ( ) , submitterLinkRenderer . getRenderingContext ( ) ) ; final String nameHtml = renderer . getNameHtml ( ) ; return "<a class=\"name\" href=\"submitter?db=" + submitterLink . getDbName ( ) + "&amp;id=" + submitterLink . getToString ( ) + "\">" + nameHtml + " [" + submitterLink . getToString ( ) + "]" + "</a>" ;
import java . io . * ; import java . lang . * ; import java . util . * ; import java . math . * ; class TilingOptions { /** * Function to calculate the number of ways the tiling problem can be solved . * Example : * > > > calculate _ tiling _ options ( 4) * > > > calculate _ tiling _ options ( 3) * > > > calculate _ tiling _ options ( 5) * Args : * n : Number of tiles . * Returns : * The number of ways 2 X N area can be filled using 2 X 1 bricks . */ public static int calculateTilingOptions ( int n ) { } }
if ( n == 0 ) { return 0 ; } if ( n == 1 ) { return 1 ; } return ( calculateTilingOptions ( n - 1 ) + calculateTilingOptions ( n - 2 ) ) ;
public class Quaternionf { /** * Set this quaternion to represent scaling , which results in a transformed vector to change * its length by the given < code > factor < / code > . * @ param factor * the scaling factor * @ return this */ public Quaternionf scaling ( float factor ) { } }
float sqrt = ( float ) Math . sqrt ( factor ) ; this . x = 0.0f ; this . y = 0.0f ; this . z = 0.0f ; this . w = sqrt ; return this ;
public class RowService { /** * Commits the pending changes . This operation is performed asynchronously . */ public final void commit ( ) { } }
if ( indexQueue == null ) { luceneIndex . commit ( ) ; } else { indexQueue . submitSynchronous ( new Runnable ( ) { @ Override public void run ( ) { luceneIndex . commit ( ) ; } } ) ; }
public class Predicates { /** * Returns a predicate that returns to true if the persistent attributes included with the { @ link HandlerInput } * contain the expected attribute value . * @ param key key of the attribute to evaluate * @ param value value of the attribute to evaluate * @ return true if the persistent attributes included with the { @ link HandlerInput } contain the expected * attribute value */ public static Predicate < HandlerInput > persistentAttribute ( String key , Object value ) { } }
return i -> i . getAttributesManager ( ) . getPersistentAttributes ( ) . containsKey ( key ) && value . equals ( i . getAttributesManager ( ) . getPersistentAttributes ( ) . get ( key ) ) ;
public class NamingRegisterRequestCodec { /** * Decodes the bytes to a name assignment . * @ param buf the byte array * @ return a naming registration request * @ throws org . apache . reef . io . network . naming . exception . NamingRuntimeException */ @ Override public NamingRegisterRequest decode ( final byte [ ] buf ) { } }
final AvroNamingRegisterRequest avroNamingRegisterRequest = AvroUtils . fromBytes ( buf , AvroNamingRegisterRequest . class ) ; return new NamingRegisterRequest ( new NameAssignmentTuple ( factory . getNewInstance ( avroNamingRegisterRequest . getId ( ) . toString ( ) ) , new InetSocketAddress ( avroNamingRegisterRequest . getHost ( ) . toString ( ) , avroNamingRegisterRequest . getPort ( ) ) ) ) ;
public class systemsession { /** * Use this API to fetch systemsession resources of given names . */ public static systemsession [ ] get ( nitro_service service , Long sid [ ] ) throws Exception { } }
if ( sid != null && sid . length > 0 ) { systemsession response [ ] = new systemsession [ sid . length ] ; systemsession obj [ ] = new systemsession [ sid . length ] ; for ( int i = 0 ; i < sid . length ; i ++ ) { obj [ i ] = new systemsession ( ) ; obj [ i ] . set_sid ( sid [ i ] ) ; response [ i ] = ( systemsession ) obj [ i ] . get_resource ( service ) ; } return response ; } return null ;
public class CollisionFormulaConfig { /** * Remove the formula node . * @ param root The root node ( must not be < code > null < / code > ) . * @ param formula The formula name to remove ( must not be < code > null < / code > ) . * @ throws LionEngineException If invalid argument . */ public static void remove ( Xml root , String formula ) { } }
Check . notNull ( root ) ; Check . notNull ( formula ) ; for ( final Xml node : root . getChildren ( NODE_FORMULA ) ) { if ( node . readString ( ATT_NAME ) . equals ( formula ) ) { root . removeChild ( node ) ; } }
public class JDBCResultSet { /** * < ! - - start generic documentation - - > * Moves the cursor to the front of * this < code > ResultSet < / code > object , just before the * first row . This method has no effect if the result set contains no rows . * < ! - - end generic documentation - - > * @ exception SQLException if a database access error * occurs , this method is called on a closed result set or the * result set type is < code > TYPE _ FORWARD _ ONLY < / code > * @ exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @ since JDK 1.2 ( JDK 1.1 . x developers : read the overview for * JDBCResultSet ) */ public void beforeFirst ( ) throws SQLException { } }
checkClosed ( ) ; checkNotForwardOnly ( ) ; if ( isOnInsertRow || isRowUpdated ) { throw Util . sqlExceptionSQL ( ErrorCode . X_24513 ) ; } navigator . beforeFirst ( ) ;