signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class BaseBigtableInstanceAdminClient { /** * Gets information about a cluster . * < p > Sample code : * < pre > < code > * try ( BaseBigtableInstanceAdminClient baseBigtableInstanceAdminClient = BaseBigtableInstanceAdminClient . create ( ) ) { * ClusterName name = ClusterName . of ( " [ PROJECT ] " , " [ INSTANCE ] " , " [ CLUSTER ] " ) ; * Cluster response = baseBigtableInstanceAdminClient . getCluster ( name . toString ( ) ) ; * < / code > < / pre > * @ param name The unique name of the requested cluster . Values are of the form * ` projects / & lt ; project & gt ; / instances / & lt ; instance & gt ; / clusters / & lt ; cluster & gt ; ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final Cluster getCluster ( String name ) { } }
GetClusterRequest request = GetClusterRequest . newBuilder ( ) . setName ( name ) . build ( ) ; return getCluster ( request ) ;
public class CPSpecificationOptionPersistenceImpl { /** * Returns the first cp specification option in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; . * @ param uuid the uuid * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching cp specification option * @ throws NoSuchCPSpecificationOptionException if a matching cp specification option could not be found */ @ Override public CPSpecificationOption findByUuid_C_First ( String uuid , long companyId , OrderByComparator < CPSpecificationOption > orderByComparator ) throws NoSuchCPSpecificationOptionException { } }
CPSpecificationOption cpSpecificationOption = fetchByUuid_C_First ( uuid , companyId , orderByComparator ) ; if ( cpSpecificationOption != null ) { return cpSpecificationOption ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( ", companyId=" ) ; msg . append ( companyId ) ; msg . append ( "}" ) ; throw new NoSuchCPSpecificationOptionException ( msg . toString ( ) ) ;
public class HCHead { /** * Append some JavaScript code * @ param aJS * The JS to be added . May not be < code > null < / code > . * @ return this */ @ Nonnull public final HCHead addJS ( @ Nonnull final IHCNode aJS ) { } }
ValueEnforcer . notNull ( aJS , "JS" ) ; if ( ! HCJSNodeDetector . isJSNode ( aJS ) ) throw new IllegalArgumentException ( aJS + " is not a valid JS node!" ) ; m_aJS . add ( aJS ) ; return this ;
public class OsmMapShapeConverter { /** * Convert a { @ link GeometryCollection } to a list of Map shapes and add to * the map * @ param map * @ param geometryCollection * @ return */ public List < OsmDroidMapShape > addToMap ( MapView map , GeometryCollection < Geometry > geometryCollection ) { } }
List < OsmDroidMapShape > shapes = new ArrayList < OsmDroidMapShape > ( ) ; for ( Geometry geometry : geometryCollection . getGeometries ( ) ) { OsmDroidMapShape shape = addToMap ( map , geometry ) ; shapes . add ( shape ) ; } return shapes ;
public class ConfigImpl { /** * is file a directory or not , touch if not exist * @ param directory * @ return true if existing directory or has created new one */ protected boolean isDirectory ( Resource directory ) { } }
if ( directory . exists ( ) ) return directory . isDirectory ( ) ; try { directory . createDirectory ( true ) ; return true ; } catch ( IOException e ) { e . printStackTrace ( getErrWriter ( ) ) ; } return false ;
public class AbstractCache { /** * The complete cache is read and then replaces the current stored values * in the cache . The cache must be read in this order : * < ul > * < li > create new cache map < / li > * < li > read cache in the new cache map < / li > * < li > replace old cache map with the new cache map < / li > * < / ul > * This order is imported , otherwise * < ul > * < li > if the cache initialized first time , the cache is not * < code > null < / code > and returns then wrong values < / li > * < li > existing and read values could not be read while a reload is * done < / li > * < / ul > * @ throws CacheReloadException if the cache could not be read ( the * exception is also written into the error * log ) */ private void readCache ( ) throws CacheReloadException { } }
// if cache is not initialized , the correct order is required ! // otherwise the cache is not null and returns wrong values ! final Map < Long , T > newCache4Id = getNewCache4Id ( ) ; final Map < String , T > newCache4Name = getNewCache4Name ( ) ; final Map < UUID , T > newCache4UUID = getNewCache4UUID ( ) ; try { readCache ( newCache4Id , newCache4Name , newCache4UUID ) ; } catch ( final CacheReloadException e ) { AbstractCache . LOG . error ( "Read Cache for " + getClass ( ) + " failed" , e ) ; throw e ; // CHECKSTYLE : OFF } catch ( final Exception e ) { // CHECKSTYLE : ON AbstractCache . LOG . error ( "Unexpected error while reading Cache for " + getClass ( ) , e ) ; throw new CacheReloadException ( "Unexpected error while reading Cache " + "for " + getClass ( ) , e ) ; } // replace old cache with new values // it is thread save because of volatile this . cache4Id = newCache4Id ; this . cache4Name = newCache4Name ; this . cache4UUID = newCache4UUID ;
public class SearchRequestEntity { /** * Get query language . * @ return query language * @ throws UnsupportedQueryException { @ link UnsupportedQueryException } */ public String getQueryLanguage ( ) throws UnsupportedQueryException { } }
if ( body . getChild ( 0 ) . getName ( ) . getNamespaceURI ( ) . equals ( "DAV:" ) && body . getChild ( 0 ) . getName ( ) . getLocalPart ( ) . equals ( "sql" ) ) { return "sql" ; } else if ( body . getChild ( 0 ) . getName ( ) . getNamespaceURI ( ) . equals ( "DAV:" ) && body . getChild ( 0 ) . getName ( ) . getLocalPart ( ) . equals ( "xpath" ) ) { return "xpath" ; } throw new UnsupportedOperationException ( ) ;
public class SingleDbJDBCConnection { /** * { @ inheritDoc } */ protected ResultSet findWorkspacePropertiesOnValueStorage ( ) throws SQLException { } }
if ( findWorkspacePropertiesOnValueStorage == null ) { findWorkspacePropertiesOnValueStorage = dbConnection . prepareStatement ( FIND_WORKSPACE_PROPERTIES_ON_VALUE_STORAGE ) ; } findWorkspacePropertiesOnValueStorage . setString ( 1 , containerConfig . containerName ) ; return findWorkspacePropertiesOnValueStorage . executeQuery ( ) ;
public class IndexChangeAdapters { /** * Create an { @ link IndexChangeAdapter } implementation that handles node type information . * @ param propertyName a symbolic name of the property that will be sent to the { @ link ProvidedIndex } when the adapter * notices that there are either primary type of mixin type changes . * @ param context the execution context ; may not be null * @ param matcher the node type matcher used to determine which nodes should be included in the index ; may not be null * @ param workspaceName the name of the workspace ; may not be null * @ param index the local index that should be used ; may not be null * @ return the new { @ link IndexChangeAdapter } ; never null */ public static IndexChangeAdapter forNodeTypes ( String propertyName , ExecutionContext context , NodeTypePredicate matcher , String workspaceName , ProvidedIndex < ? > index ) { } }
return new NodeTypesChangeAdapter ( propertyName , context , matcher , workspaceName , index ) ;
public class HystrixInvocationHandler { /** * If the method param of InvocationHandler . invoke is not accessible , i . e in a package - private * interface , the fallback call in hystrix command will fail cause of access restrictions . But * methods in dispatch are copied methods . So setting access to dispatch method doesn ' t take * effect to the method in InvocationHandler . invoke . Use map to store a copy of method to invoke * the fallback to bypass this and reducing the count of reflection calls . * @ return cached methods map for fallback invoking */ static Map < Method , Method > toFallbackMethod ( Map < Method , MethodHandler > dispatch ) { } }
Map < Method , Method > result = new LinkedHashMap < Method , Method > ( ) ; for ( Method method : dispatch . keySet ( ) ) { method . setAccessible ( true ) ; result . put ( method , method ) ; } return result ;
public class DefaultGrailsControllerClass { /** * Register a new { @ link grails . web . UrlConverter } with the controller * @ param urlConverter The { @ link grails . web . UrlConverter } to register */ @ Override public void registerUrlConverter ( UrlConverter urlConverter ) { } }
for ( String actionName : new ArrayList < String > ( actions . keySet ( ) ) ) { actionUriToViewName . put ( urlConverter . toUrlElement ( actionName ) , actionName ) ; actions . put ( urlConverter . toUrlElement ( actionName ) , actions . remove ( actionName ) ) ; } defaultActionName = urlConverter . toUrlElement ( defaultActionName ) ;
public class SmoothieMap { /** * Performs the given action for each entry in this map until all entries have been processed or * the action throws an exception . Actions are performed in the order of { @ linkplain # entrySet ( ) * entry set } iteration . Exceptions thrown by the action are relayed to the caller . * @ param action The action to be performed for each entry * @ throws NullPointerException if the specified action is null * @ throws ConcurrentModificationException if any structural modification of the map ( new entry * insertion or an entry removal ) is detected during iteration * @ see # forEachWhile ( BiPredicate ) */ @ Override public final void forEach ( BiConsumer < ? super K , ? super V > action ) { } }
Objects . requireNonNull ( action ) ; int mc = this . modCount ; Segment < K , V > segment ; for ( long segmentIndex = 0 ; segmentIndex >= 0 ; segmentIndex = nextSegmentIndex ( segmentIndex , segment ) ) { ( segment = segment ( segmentIndex ) ) . forEach ( action ) ; } if ( mc != modCount ) throw new ConcurrentModificationException ( ) ;
public class CommerceAccountOrganizationRelPersistenceImpl { /** * Caches the commerce account organization rel in the entity cache if it is enabled . * @ param commerceAccountOrganizationRel the commerce account organization rel */ @ Override public void cacheResult ( CommerceAccountOrganizationRel commerceAccountOrganizationRel ) { } }
entityCache . putResult ( CommerceAccountOrganizationRelModelImpl . ENTITY_CACHE_ENABLED , CommerceAccountOrganizationRelImpl . class , commerceAccountOrganizationRel . getPrimaryKey ( ) , commerceAccountOrganizationRel ) ; commerceAccountOrganizationRel . resetOriginalValues ( ) ;
public class BuildDatabase { /** * Sets the Duplicate IDs for all the SpecTopics in the Database . */ public void setDatabaseDuplicateIds ( ) { } }
// Set the spec topic duplicate ids based on topic title for ( final Entry < Integer , List < ITopicNode > > topicTitleEntry : topics . entrySet ( ) ) { final List < ITopicNode > topics = topicTitleEntry . getValue ( ) ; if ( topics . size ( ) > 1 ) { for ( int i = 1 ; i < topics . size ( ) ; i ++ ) { topics . get ( i ) . setDuplicateId ( Integer . toString ( i ) ) ; } } }
public class DateTimeApiParameterMapper { /** * { @ inheritDoc } * @ see jp . co . future . uroborosql . parameter . mapper . BindParameterMapper # toJdbc ( java . lang . Object , java . sql . Connection , jp . co . future . uroborosql . parameter . mapper . BindParameterMapperManager ) */ @ Override public Object toJdbc ( final TemporalAccessor original , final Connection connection , final BindParameterMapperManager parameterMapperManager ) { } }
/* Dateに変換 */ if ( original instanceof LocalDateTime ) { return java . sql . Timestamp . valueOf ( ( LocalDateTime ) original ) ; } if ( original instanceof OffsetDateTime ) { return new java . sql . Timestamp ( ( ( OffsetDateTime ) original ) . toInstant ( ) . toEpochMilli ( ) ) ; } if ( original instanceof ZonedDateTime ) { return new java . sql . Timestamp ( ( ( ZonedDateTime ) original ) . toInstant ( ) . toEpochMilli ( ) ) ; } if ( original instanceof LocalDate ) { return java . sql . Date . valueOf ( ( LocalDate ) original ) ; } if ( original instanceof LocalTime ) { return new java . sql . Time ( toTime ( original ) ) ; } if ( original instanceof OffsetTime ) { Calendar calendar = Calendar . getInstance ( ) ; int year = calendar . get ( Calendar . YEAR ) ; long thisYearDate = ( ( OffsetTime ) original ) . atDate ( LocalDate . of ( year , Month . JANUARY , 1 ) ) . toInstant ( ) . toEpochMilli ( ) ; calendar . setTimeInMillis ( thisYearDate ) ; // yearを引いて1970年にする calendar . add ( Calendar . YEAR , 1970 - year ) ; return new java . sql . Time ( calendar . getTimeInMillis ( ) ) ; } /* 数値に変換 */ if ( original instanceof Year ) { return ( ( Year ) original ) . getValue ( ) ; } if ( original instanceof YearMonth ) { YearMonth yearMonth = ( YearMonth ) original ; return yearMonth . getYear ( ) * 100 + yearMonth . getMonthValue ( ) ; } if ( original instanceof MonthDay ) { MonthDay monthDay = ( MonthDay ) original ; return monthDay . getMonthValue ( ) * 100 + monthDay . getDayOfMonth ( ) ; } if ( original instanceof Month ) { return ( ( Month ) original ) . getValue ( ) ; } if ( original instanceof DayOfWeek ) { return ( ( DayOfWeek ) original ) . getValue ( ) ; } if ( original instanceof Era ) { return ( ( Era ) original ) . getValue ( ) ; } // JapaneseDate等のChronoLocalDateの変換 Dateに変換 if ( original instanceof ChronoLocalDate ) { return new java . sql . Date ( ( ( ChronoLocalDate ) original ) . atTime ( LocalTime . MIDNIGHT ) . atZone ( ZoneId . systemDefault ( ) ) . toInstant ( ) . toEpochMilli ( ) ) ; } // その他の型 if ( original instanceof Instant ) { return new java . sql . Timestamp ( ( ( Instant ) original ) . toEpochMilli ( ) ) ; } if ( isCastTarget ( original ) ) { boolean incDate = Arrays . stream ( DATE_TARGET ) . anyMatch ( original :: isSupported ) ; boolean incTime = Arrays . stream ( TIME_TARGET ) . anyMatch ( original :: isSupported ) ; if ( incDate && incTime ) { return new java . sql . Timestamp ( toTime ( original ) ) ; } else if ( incDate ) { return new java . sql . Date ( toTime ( original ) ) ; } else { return new java . sql . Time ( toTime ( original ) ) ; } } return original ;
public class LPredicateBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static < T > LPredicate < T > predicateFrom ( Consumer < LPredicateBuilder < T > > buildingFunction ) { } }
LPredicateBuilder builder = new LPredicateBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class LogFileEntry { /** * Writes the next numberOfBytes bytes from the stream to the outputWriter , assuming that the bytes are UTF - 8 encoded * characters . * @ param stream * @ param outputWriter * @ param numberOfBytes * @ throws IOException */ private void write ( final DataInputStream stream , final Writer outputWriter , final int numberOfBytes ) throws IOException { } }
final byte [ ] buf = new byte [ 65535 ] ; int lenRemaining = numberOfBytes ; while ( lenRemaining > 0 ) { final int len = stream . read ( buf , 0 , lenRemaining > 65535 ? 65535 : lenRemaining ) ; if ( len > 0 ) { outputWriter . write ( new String ( buf , 0 , len , "UTF-8" ) ) ; lenRemaining -= len ; } else { break ; } }
public class JobHistoryService { /** * Returns the { @ link Flow } instance matching the application ID and run ID . * @ param cluster the cluster identifier * @ param user the user running the jobs * @ param appId the application description * @ param runId the specific run ID for the flow * @ param populateTasks whether or not to populate the task details for each * job * @ return */ public Flow getFlow ( String cluster , String user , String appId , long runId , boolean populateTasks ) throws IOException { } }
Flow flow = null ; byte [ ] startRow = ByteUtil . join ( Constants . SEP_BYTES , Bytes . toBytes ( cluster ) , Bytes . toBytes ( user ) , Bytes . toBytes ( appId ) , Bytes . toBytes ( FlowKey . encodeRunId ( runId ) ) , Constants . EMPTY_BYTES ) ; LOG . info ( "Reading job_history rows start at " + Bytes . toStringBinary ( startRow ) ) ; Scan scan = new Scan ( ) ; // start scanning history at cluster ! user ! app ! run ! scan . setStartRow ( startRow ) ; // require that all results match this flow prefix scan . setFilter ( new WhileMatchFilter ( new PrefixFilter ( startRow ) ) ) ; List < Flow > flows = createFromResults ( scan , populateTasks , 1 ) ; if ( flows . size ( ) > 0 ) { flow = flows . get ( 0 ) ; } return flow ;
public class ScriptDataContextUtil { /** * @ return a data context for executing a script plugin or provider , which contains two datasets : * plugin : { vardir : [ dir ] , tmpdir : [ dir ] } * and * rundeck : { base : [ basedir ] } * @ param framework framework */ public static DataContext createScriptDataContext ( final Framework framework ) { } }
BaseDataContext data = new BaseDataContext ( ) ; final File vardir = new File ( Constants . getBaseVar ( framework . getBaseDir ( ) . getAbsolutePath ( ) ) ) ; final File tmpdir = new File ( vardir , "tmp" ) ; data . group ( "plugin" ) . put ( "vardir" , vardir . getAbsolutePath ( ) ) ; data . group ( "plugin" ) . put ( "tmpdir" , tmpdir . getAbsolutePath ( ) ) ; data . put ( "rundeck" , "base" , framework . getBaseDir ( ) . getAbsolutePath ( ) ) ; return data ;
public class HostsResource { /** * Returns various status information about the hosts . * @ param hosts The hosts . * @ param statusFilter An optional status filter . * @ return The response . */ @ POST @ Path ( "/statuses" ) @ Produces ( APPLICATION_JSON ) @ Timed @ ExceptionMetered public Map < String , HostStatus > hostStatuses ( final List < String > hosts , @ QueryParam ( "status" ) @ DefaultValue ( "" ) final String statusFilter ) { } }
final Map < String , HostStatus > statuses = Maps . newHashMap ( ) ; for ( final String host : hosts ) { final HostStatus status = model . getHostStatus ( host ) ; if ( status != null ) { if ( isNullOrEmpty ( statusFilter ) || statusFilter . equals ( status . getStatus ( ) . toString ( ) ) ) { statuses . put ( host , status ) ; } } } return statuses ;
public class BoneCPConnectionProvider { /** * { @ inheritDoc } * @ see org . hibernate . engine . jdbc . connections . spi . ConnectionProvider # getConnection ( ) */ public Connection getConnection ( ) throws SQLException { } }
Connection connection = this . pool . getConnection ( ) ; // set the Transaction Isolation if defined try { // set the Transaction Isolation if defined if ( ( this . isolation != null ) && ( connection . getTransactionIsolation ( ) != this . isolation . intValue ( ) ) ) { connection . setTransactionIsolation ( this . isolation . intValue ( ) ) ; } // toggle autoCommit to false if set if ( connection . getAutoCommit ( ) != this . autocommit ) { connection . setAutoCommit ( this . autocommit ) ; } return connection ; } catch ( SQLException e ) { try { connection . close ( ) ; } catch ( Exception e2 ) { logger . warn ( "Setting connection properties failed and closing this connection failed again" , e ) ; } throw e ; }
public class HttpUtil { /** * Determine if a uri is in asterisk - form according to * < a href = " https : / / tools . ietf . org / html / rfc7230 # section - 5.3 " > rfc7230 , 5.3 < / a > . */ public static boolean isAsteriskForm ( URI uri ) { } }
return "*" . equals ( uri . getPath ( ) ) && uri . getScheme ( ) == null && uri . getSchemeSpecificPart ( ) == null && uri . getHost ( ) == null && uri . getAuthority ( ) == null && uri . getQuery ( ) == null && uri . getFragment ( ) == null ;
public class ObjectWriter { /** * Should switch to IOUtils . read ( ) when we update to the latest version of * commons - io * @ param in * @ param b * @ param off * @ param len * @ return * @ throws IOException */ private static int readFully ( InputStream in , byte [ ] b , int off , int len ) throws IOException { } }
int total = 0 ; for ( ; ; ) { int got = in . read ( b , off + total , len - total ) ; if ( got < 0 ) { return ( total == 0 ) ? - 1 : total ; } else { total += got ; if ( total == len ) return total ; } }
public class IfcBSplineSurfaceWithKnotsImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < Long > getVMultiplicities ( ) { } }
return ( EList < Long > ) eGet ( Ifc4Package . Literals . IFC_BSPLINE_SURFACE_WITH_KNOTS__VMULTIPLICITIES , true ) ;
public class NodeManager { /** * This method rebuilds members related to a ResourceRequestInfo instance , * which were not directly persisted themselves . * @ param resourceRequestInfo The ResourceRequestInfo instance to be restored */ public void restoreResourceRequestInfo ( ResourceRequestInfo resourceRequestInfo ) { } }
List < RequestedNode > requestedNodes = null ; List < String > hosts = resourceRequestInfo . getHosts ( ) ; if ( hosts != null && hosts . size ( ) > 0 ) { requestedNodes = new ArrayList < RequestedNode > ( hosts . size ( ) ) ; for ( String host : hosts ) { requestedNodes . add ( resolve ( host , resourceRequestInfo . getType ( ) ) ) ; } } resourceRequestInfo . nodes = requestedNodes ;
public class Edge { /** * Insert the given suffix at the supplied active point . * @ param suffix The suffix to insert . * @ param activePoint The active point to insert it at . * @ return */ void insert ( Suffix < T , S > suffix , ActivePoint < T , S > activePoint ) { } }
Object item = suffix . getEndItem ( ) ; Object nextItem = getItemAt ( activePoint . getLength ( ) ) ; if ( item . equals ( nextItem ) ) { activePoint . incrementLength ( ) ; } else { split ( suffix , activePoint ) ; suffix . decrement ( ) ; activePoint . updateAfterInsert ( suffix ) ; if ( suffix . isEmpty ( ) ) { } else tree . insert ( suffix ) ; }
public class U { /** * Documented , # keys */ public static < K , V > Set < K > keys ( final Map < K , V > object ) { } }
return object . keySet ( ) ;
public class NodeWrapper { /** * { @ inheritDoc } */ @ Override public int getFingerprint ( ) { } }
int retVal ; final int nameCount = getNameCode ( ) ; if ( nameCount == - 1 ) { retVal = - 1 ; } else { retVal = nameCount & 0xfffff ; } return retVal ;
public class CmsFlexController { /** * Checks if the request has the " If - Modified - Since " header set , and if so , * if the header date value is equal to the provided last modification date . < p > * @ param req the request to set the " If - Modified - Since " date header from * @ param dateLastModified the date to compare the header with * @ return < code > true < / code > if the header is set and the header date is equal to the provided date */ public static boolean isNotModifiedSince ( HttpServletRequest req , long dateLastModified ) { } }
// check if the request contains a last modified header try { long lastModifiedHeader = req . getDateHeader ( CmsRequestUtil . HEADER_IF_MODIFIED_SINCE ) ; // if last modified header is set ( > - 1 ) , compare it to the requested resource return ( ( lastModifiedHeader > - 1 ) && ( ( ( dateLastModified / 1000 ) * 1000 ) == lastModifiedHeader ) ) ; } catch ( Exception ex ) { // some clients ( e . g . User - Agent : BlackBerry7290/4.1.0 Profile / MIDP - 2.0 Configuration / CLDC - 1.1 VendorID / 111) // send an invalid " If - Modified - Since " header ( e . g . in german locale ) // which breaks with http : / / www . w3 . org / Protocols / rfc2616 / rfc2616 - sec3 . html // this has to be caught because the subsequent request for the 500 error handler // would run into the same exception . LOG . warn ( Messages . get ( ) . getBundle ( ) . key ( Messages . ERR_HEADER_IFMODIFIEDSINCE_FORMAT_3 , new Object [ ] { CmsRequestUtil . HEADER_IF_MODIFIED_SINCE , req . getHeader ( CmsRequestUtil . HEADER_USER_AGENT ) , req . getHeader ( CmsRequestUtil . HEADER_IF_MODIFIED_SINCE ) } ) ) ; } return false ;
public class MutateInBuilder { /** * Prepend multiple values at once in an existing array , pushing all values in the collection ' s iteration order to * the front / start of the array . * First value becomes the first element of the array , second value the second , etc . . . All existing values * are shifted right in the array , by the number of inserted elements . * Each item in the collection is inserted as an individual element of the array , but a bit of overhead * is saved compared to individual { @ link # arrayPrepend ( String , Object , boolean ) } ( String , Object ) } by grouping * mutations in a single packet . * For example given an array [ A , B , C ] , prepending the values X and Y yields [ X , Y , A , B , C ] * and not [ [ X , Y ] , A , B , C ] . * @ param path the path of the array . * @ param values the collection of values to insert at the front of the array as individual elements . * @ param optionsBuilder { @ link SubdocOptionsBuilder } * @ param < T > the type of data in the collection ( must be JSON serializable ) . */ public < T > MutateInBuilder arrayPrependAll ( String path , Collection < T > values , SubdocOptionsBuilder optionsBuilder ) { } }
asyncBuilder . arrayPrependAll ( path , values , optionsBuilder ) ; return this ;
public class GenKUmsAllCamt05200107 { /** * Erzeugt eine einzelne CAMT - Umsatzbuchung . * @ param line eine Umsatzbuchung aus HBCI4Java . * @ return die CAMT - Umsatzbuchung . * @ throws Exception */ private ReportEntry9 createLine ( UmsLine line ) throws Exception { } }
ReportEntry9 entry = new ReportEntry9 ( ) ; EntryDetails8 detail = new EntryDetails8 ( ) ; entry . getNtryDtls ( ) . add ( detail ) ; EntryTransaction9 tx = new EntryTransaction9 ( ) ; detail . getTxDtls ( ) . add ( tx ) ; // Checken , ob es Soll - oder Habenbuchung ist boolean haben = line . value != null && line . value . getBigDecimalValue ( ) . compareTo ( BigDecimal . ZERO ) > 0 ; entry . setCdtDbtInd ( haben ? CreditDebitCode . CRDT : CreditDebitCode . DBIT ) ; // Buchungs - ID { TransactionReferences3 ref = new TransactionReferences3 ( ) ; tx . setRefs ( ref ) ; ProprietaryReference1 prt = new ProprietaryReference1 ( ) ; prt . setRef ( line . id ) ; ref . getPrtry ( ) . add ( prt ) ; } // Gegenkonto : IBAN + Name if ( line . other != null ) { TransactionParties4 other = new TransactionParties4 ( ) ; tx . setRltdPties ( other ) ; CashAccount24 acc = new CashAccount24 ( ) ; if ( haben ) other . setDbtrAcct ( acc ) ; else other . setCdtrAcct ( acc ) ; AccountIdentification4Choice id = new AccountIdentification4Choice ( ) ; acc . setId ( id ) ; id . setIBAN ( line . other . iban ) ; Party35Choice party = new Party35Choice ( ) ; PartyIdentification125 pi = new PartyIdentification125 ( ) ; pi . setNm ( line . other . name ) ; party . setPty ( pi ) ; if ( haben ) other . setDbtr ( party ) ; else other . setCdtr ( party ) ; } // Gegenkonto : BIC if ( line . other != null ) { TransactionAgents4 banks = new TransactionAgents4 ( ) ; tx . setRltdAgts ( banks ) ; BranchAndFinancialInstitutionIdentification5 bank = new BranchAndFinancialInstitutionIdentification5 ( ) ; if ( haben ) banks . setDbtrAgt ( bank ) ; else banks . setCdtrAgt ( bank ) ; FinancialInstitutionIdentification8 bic = new FinancialInstitutionIdentification8 ( ) ; bank . setFinInstnId ( bic ) ; bic . setBICFI ( line . other . bic ) ; } // Verwendungszweck if ( line . usage != null && line . usage . size ( ) > 0 ) { RemittanceInformation15 usages = new RemittanceInformation15 ( ) ; usages . getUstrd ( ) . addAll ( line . usage ) ; tx . setRmtInf ( usages ) ; } // Betrag if ( line . value != null ) { ActiveOrHistoricCurrencyAndAmount amt = new ActiveOrHistoricCurrencyAndAmount ( ) ; entry . setAmt ( amt ) ; BigDecimal val = line . value . getBigDecimalValue ( ) ; amt . setValue ( val . abs ( ) ) ; // Hier gibt es keine negativen Werte . Wir haben stattdessen DEB / CRED - Merkmale amt . setCcy ( line . value . getCurr ( ) ) ; } // Storno - Kennzeichen if ( line . isStorno ) entry . setRvslInd ( Boolean . TRUE ) ; // Buchungs - und Valuta - Datum Date bdate = line . bdate ; Date valuta = line . valuta ; if ( bdate == null ) bdate = valuta ; if ( valuta == null ) valuta = bdate ; if ( bdate != null ) { DateAndDateTime2Choice d = new DateAndDateTime2Choice ( ) ; d . setDt ( this . createCalendar ( bdate . getTime ( ) ) ) ; entry . setBookgDt ( d ) ; } if ( valuta != null ) { DateAndDateTime2Choice d = new DateAndDateTime2Choice ( ) ; d . setDt ( this . createCalendar ( valuta . getTime ( ) ) ) ; entry . setValDt ( d ) ; } // Art und Kundenreferenz entry . setAddtlNtryInf ( line . text ) ; entry . setAcctSvcrRef ( line . customerref ) ; // Primanota , GV - Code und GV - Code - Ergaenzung StringBuilder sb = new StringBuilder ( ) ; if ( line . gvcode != null && line . gvcode . length ( ) > 0 ) { sb . append ( line . gvcode ) ; sb . append ( "+" ) ; } if ( line . primanota != null && line . primanota . length ( ) > 0 ) { sb . append ( line . primanota ) ; sb . append ( "+" ) ; } if ( line . addkey != null && line . addkey . length ( ) > 0 ) { sb . append ( line . addkey ) ; } String s = sb . toString ( ) ; if ( s . length ( ) > 0 ) { BankTransactionCodeStructure4 b = new BankTransactionCodeStructure4 ( ) ; tx . setBkTxCd ( b ) ; ProprietaryBankTransactionCodeStructure1 pb = new ProprietaryBankTransactionCodeStructure1 ( ) ; pb . setCd ( s ) ; b . setPrtry ( pb ) ; } // Purpose - Code if ( line . purposecode != null ) { Purpose2Choice c = new Purpose2Choice ( ) ; c . setCd ( line . purposecode ) ; tx . setPurp ( c ) ; } return entry ;
public class AsyncCallObject { void read_attribute_reply ( int timeout ) { } }
DevError [ ] errors = null ; DeviceAttribute [ ] argout = null ; try { if ( timeout == NO_TIMEOUT ) argout = dev . read_attribute_reply ( id ) ; else argout = dev . read_attribute_reply ( id , timeout ) ; } catch ( AsynReplyNotArrived e ) { errors = e . errors ; } catch ( DevFailed e ) { errors = e . errors ; } cb . attr_read ( new AttrReadEvent ( dev , names , argout , errors ) ) ;
public class JsonDataProviderImpl { /** * Parses the JSON file as a 2D Object array for TestNg dataprovider usage . < br > * < pre > * < i > Array Of Objects mapped to a user defined type : < / i > * " name " : " Optimus Prime " , * " password " : 123456, * " accountNumber " : 99999, * " amount " : 50000, * " areaCode " : [ { " areaCode " : " area1 " } , * { " areaCode " : " area2 " } ] , * " bank " : { * " name " : " Bank1 " , * " type " : " Savings " , * " address " : { * " street " : " 1234 Some St " * " phoneNumber " : " 11111 " , * " preintTest " : 10 * " name " : " Megatron " , * " password " : 123456, * " accountNumber " : 99999, * " amount " : 80000, * " areaCode " : [ { " areaCode " : " area3 " } , * { " areaCode " : " area4 " } ] , * " bank " : { * " name " : " Bank2 " , * " type " : " Current " , * " address " : { * " street " : " 1234 any St " * " phoneNumber " : " 11111 " , * " preintTest " : 100 * < i > Test Method Signature < / i > * { @ code public void readJsonArray ( TestData testData ) } * < / pre > */ @ Override public Object [ ] [ ] getAllData ( ) { } }
logger . entering ( resource ) ; Class < ? > arrayType ; Object [ ] [ ] dataToBeReturned = null ; JsonReader reader = new JsonReader ( getReader ( resource ) ) ; try { // The type specified must be converted to array type for the parser // to deal with array of JSON objects arrayType = Array . newInstance ( resource . getCls ( ) , 0 ) . getClass ( ) ; logger . log ( Level . FINE , "The Json Data is mapped as" , arrayType ) ; dataToBeReturned = mapJsonData ( reader , arrayType ) ; } catch ( Exception e ) { throw new DataProviderException ( "Error while parsing Json Data" , e ) ; } finally { IOUtils . closeQuietly ( reader ) ; } logger . exiting ( ( Object [ ] ) dataToBeReturned ) ; return dataToBeReturned ;
public class UserHandlerImpl { /** * Notifying listeners after user deletion . * @ param user * the user which is used in delete operation * @ throws Exception * if any listener failed to handle the event */ private void postDelete ( User user ) throws Exception { } }
for ( UserEventListener listener : listeners ) { listener . postDelete ( user ) ; }
public class JmxTransformer { /** * Are we a file and a JSON or YAML file ? */ private boolean isProcessConfigFile ( File file ) { } }
if ( this . configuration . getProcessConfigDirOrFile ( ) . isFile ( ) ) { return file . equals ( this . configuration . getProcessConfigDirOrFile ( ) ) ; } // If the file doesn ' t exist anymore , treat it as a regular file ( to handle file deletion events ) if ( file . exists ( ) && ! file . isFile ( ) ) { return false ; } final String fileName = file . getName ( ) ; return ! fileName . startsWith ( "." ) && ( fileName . endsWith ( ".json" ) || fileName . endsWith ( ".yml" ) || fileName . endsWith ( ".yaml" ) ) ;
public class TarHeader { /** * This method , like getNameBytes ( ) , is intended to place a name into a TarHeader ' s buffer . However , this method is * sophisticated enough to recognize long names ( name . length ( ) > NAMELEN ) . In these cases , the method will break the * name into a prefix and suffix and place the name in the header in ' ustar ' format . It is up to the TarEntry to * manage the " entry header format " . This method assumes the name is valid for the type of archive being generated . * @ param outbuf * The buffer containing the entry header to modify . * @ param newName * The new name to place into the header buffer . * @ return The current offset in the tar header ( always TarHeader . NAMELEN ) . * @ throws InvalidHeaderException * If the name will not fit in the header . */ public static int getFileNameBytes ( String newName , byte [ ] outbuf ) throws InvalidHeaderException { } }
if ( newName . length ( ) > 100 ) { // Locate a pathname " break " prior to the maximum name length . . . int index = newName . indexOf ( "/" , newName . length ( ) - 100 ) ; if ( index == - 1 ) { throw new InvalidHeaderException ( "file name is greater than 100 characters, " + newName ) ; } // Get the " suffix subpath " of the name . String name = newName . substring ( index + 1 ) ; // Get the " prefix subpath " , or " prefix " , of the name . String prefix = newName . substring ( 0 , index ) ; if ( prefix . length ( ) > TarHeader . PREFIXLEN ) { throw new InvalidHeaderException ( "file prefix is greater than 155 characters" ) ; } TarHeader . getNameBytes ( new StringBuffer ( name ) , outbuf , TarHeader . NAMEOFFSET , TarHeader . NAMELEN ) ; TarHeader . getNameBytes ( new StringBuffer ( prefix ) , outbuf , TarHeader . PREFIXOFFSET , TarHeader . PREFIXLEN ) ; } else { TarHeader . getNameBytes ( new StringBuffer ( newName ) , outbuf , TarHeader . NAMEOFFSET , TarHeader . NAMELEN ) ; } // The offset , regardless of the format , is now the end of the // original name field . return TarHeader . NAMELEN ;
public class DefaultStackTraceFilterer { /** * Whether the given class name is an internal class and should be filtered * @ param className The class name * @ return true if is internal */ protected boolean isApplicationClass ( String className ) { } }
for ( String packageName : packagesToFilter ) { if ( className . startsWith ( packageName ) ) return false ; } return true ;
public class SysUtil { /** * Return an list of ABIs we supported on this device ordered according to preference . Use a * separate inner class to isolate the version - dependent call where it won ' t cause the whole * class to fail preverification . * @ return Ordered array of supported ABIs */ public static String [ ] getSupportedAbis ( ) { } }
if ( Build . VERSION . SDK_INT < Build . VERSION_CODES . LOLLIPOP ) { return new String [ ] { Build . CPU_ABI , Build . CPU_ABI2 } ; } else { return LollipopSysdeps . getSupportedAbis ( ) ; }
public class DrawerView { /** * Adds a profile to the drawer view * @ param profile Profile to add */ public DrawerView addProfile ( DrawerProfile profile ) { } }
if ( profile . getId ( ) <= 0 ) { profile . setId ( System . nanoTime ( ) * 100 + Math . round ( Math . random ( ) * 100 ) ) ; } for ( DrawerProfile oldProfile : mProfileAdapter . getItems ( ) ) { if ( oldProfile . getId ( ) == profile . getId ( ) ) { mProfileAdapter . remove ( oldProfile ) ; break ; } } profile . attachTo ( this ) ; mProfileAdapter . add ( profile ) ; if ( mProfileAdapter . getCount ( ) == 1 ) { selectProfile ( profile ) ; } updateProfile ( ) ; return this ;
public class CloudFoundryEntityImpl { /** * If custom behaviour is required by sub - classes , consider overriding { @ link # doStop ( ) } . */ @ Override public final void stop ( ) { } }
if ( DynamicTasks . getTaskQueuingContext ( ) != null ) { doStop ( ) ; } else { Task < ? > task = Tasks . builder ( ) . name ( "stop" ) . body ( new Runnable ( ) { public void run ( ) { doStop ( ) ; } } ) . build ( ) ; Entities . submit ( this , task ) . getUnchecked ( ) ; }
public class DocumentElement { /** * getter for width - gets * @ generated * @ return value of the feature */ public float getWidth ( ) { } }
if ( DocumentElement_Type . featOkTst && ( ( DocumentElement_Type ) jcasType ) . casFeat_width == null ) jcasType . jcas . throwFeatMissing ( "width" , "ch.epfl.bbp.uima.types.DocumentElement" ) ; return jcasType . ll_cas . ll_getFloatValue ( addr , ( ( DocumentElement_Type ) jcasType ) . casFeatCode_width ) ;
public class HttpResponseMessageImpl { /** * Set the status code of the response message . An input code that does * not match an existing defined StatusCode will create a new " Undefined " * code where the getByteArray ( ) API will return the input code as a * byte [ ] . * @ param code */ @ Override public void setStatusCode ( int code ) { } }
StatusCodes val = null ; try { val = StatusCodes . getByOrdinal ( code ) ; } catch ( IndexOutOfBoundsException e ) { // no FFDC required // nothing to do , just make the undefined value below } // this could be null because the ordinal lookup returned an empty // status code , or because it was out of bounds if ( null == val ) { val = StatusCodes . makeUndefinedValue ( code ) ; } setStatusCode ( val ) ;
public class LoggingScopeFactory { /** * Get a new instance of LoggingScope with msg and params through new . * @ param msg * @ param params * @ return */ public LoggingScope getNewLoggingScope ( final String msg , final Object [ ] params ) { } }
return new LoggingScopeImpl ( LOG , logLevel , msg , params ) ;
public class ObjectGraphDump { /** * For some types , we don ' t care about their internals , so just summarise the size . * @ param node the node to summarise . */ private void summariseNode ( final ObjectGraphNode node ) { } }
int size = node . getSize ( ) ; node . removeAll ( ) ; node . setSize ( size ) ;
public class GeometryDeserializer { /** * Parses the JSON as a MultiPolygon geometry * @ param coords the coordinates of a multipolygon which is just a list of coordinates of polygons . * @ param crsId * @ return an instance of multipolygon * @ throws IOException if the given json does not correspond to a multipolygon or can be parsed as such */ private MultiPolygon asMultiPolygon ( List < List < List < List > > > coords , CrsId crsId ) throws IOException { } }
if ( coords == null || coords . isEmpty ( ) ) { throw new IOException ( "A multipolygon should have at least one polyon." ) ; } Polygon [ ] polygons = new Polygon [ coords . size ( ) ] ; for ( int i = 0 ; i < coords . size ( ) ; i ++ ) { polygons [ i ] = asPolygon ( coords . get ( i ) , crsId ) ; } return new MultiPolygon ( polygons ) ;
public class SRTOutputStream { /** * This method was created in VisualAge . */ public void close ( ) throws java . io . IOException { } }
if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { // 306998.15 logger . logp ( Level . FINE , CLASS_NAME , "close" , "Closing" ) ; } if ( _observer != null ) { _observer . alertClose ( ) ; } super . close ( ) ;
public class Op { /** * Creates an array with the specified elements and an < i > operation expression < / i > on it . * @ param elements the elements of the array being created * @ return an operator , ready for chaining */ public static < T > Level0ArrayOperator < Boolean [ ] , Boolean > onArrayFor ( final Boolean ... elements ) { } }
return onArrayOf ( Types . BOOLEAN , VarArgsUtil . asRequiredObjectArray ( elements ) ) ;
public class CobolZonedDecimalType { /** * { @ inheritDoc } */ public boolean isValidInternal ( Class < T > javaClass , CobolContext cobolContext , byte [ ] hostData , int start ) { } }
int end = start + getBytesLen ( ) ; int [ ] nibbles = new int [ 2 ] ; // check all bytes excluding sign // all right hand size nibbles must contain a digit for ( int i = start + ( signLeading ? 1 : 0 ) ; i < end - ( signLeading ? 0 : 1 ) ; i ++ ) { setNibbles ( nibbles , hostData [ i ] ) ; if ( ! isDigit ( nibbles [ 1 ] ) ) { return false ; } } // check the sign if ( signSeparate ) { int separateSign = ( signLeading ? hostData [ start ] : hostData [ end - 1 ] ) & 0xFF ; if ( separateSign != cobolContext . getHostPlusSign ( ) && separateSign != cobolContext . getHostMinusSign ( ) ) { return false ; } } else { setNibbles ( nibbles , signLeading ? hostData [ start ] : hostData [ end - 1 ] ) ; if ( ! isDigit ( nibbles [ 1 ] ) ) { return false ; } if ( isSigned ( ) ) { if ( nibbles [ 0 ] != cobolContext . getPositiveSignNibbleValue ( ) && nibbles [ 0 ] != cobolContext . getNegativeSignNibbleValue ( ) ) { return false ; } } else { if ( nibbles [ 0 ] != cobolContext . getUnspecifiedSignNibbleValue ( ) ) { return false ; } } } return true ;
public class CreateProcedure { /** * Parse and validate the substring containing ALLOW and PARTITION * clauses for CREATE PROCEDURE . * @ param clauses the substring to parse * @ param descriptor procedure descriptor populated with role names from ALLOW clause * @ return parsed and validated partition data or null if there was no PARTITION clause * @ throws VoltCompilerException */ protected ProcedurePartitionData parseCreateProcedureClauses ( ProcedureDescriptor descriptor , String clauses ) throws VoltCompilerException { } }
// Nothing to do if there were no clauses . // Null means there ' s no partition data to return . // There ' s also no roles to add . if ( clauses == null || clauses . isEmpty ( ) ) { return null ; } ProcedurePartitionData data = null ; Matcher matcher = SQLParser . matchAnyCreateProcedureStatementClause ( clauses ) ; int start = 0 ; while ( matcher . find ( start ) ) { start = matcher . end ( ) ; if ( matcher . group ( 1 ) != null ) { // Add roles if it ' s an ALLOW clause . More that one ALLOW clause is okay . for ( String roleName : StringUtils . split ( matcher . group ( 1 ) , ',' ) ) { // Don ' t put the same role in the list more than once . String roleNameFixed = roleName . trim ( ) . toLowerCase ( ) ; if ( ! descriptor . m_authGroups . contains ( roleNameFixed ) ) { descriptor . m_authGroups . add ( roleNameFixed ) ; } } } else { // Add partition info if it ' s a PARTITION clause . Only one is allowed . if ( data != null ) { throw m_compiler . new VoltCompilerException ( "Only one PARTITION clause is allowed for CREATE PROCEDURE." ) ; } data = new ProcedurePartitionData ( matcher . group ( 2 ) , matcher . group ( 3 ) , matcher . group ( 4 ) , matcher . group ( 5 ) , matcher . group ( 6 ) , matcher . group ( 7 ) ) ; } } return data ;
public class AnalysisRunnerJobDelegate { /** * Runs the job * @ return */ public AnalysisResultFuture run ( ) { } }
try { // the injection manager is job scoped final InjectionManager injectionManager = _configuration . getEnvironment ( ) . getInjectionManagerFactory ( ) . getInjectionManager ( _configuration , _job ) ; final LifeCycleHelper rowProcessingLifeCycleHelper = new LifeCycleHelper ( injectionManager , _includeNonDistributedTasks ) ; final RowProcessingPublishers publishers = new RowProcessingPublishers ( _job , _analysisListener , _errorAware , _taskRunner , rowProcessingLifeCycleHelper ) ; final AnalysisJobMetrics analysisJobMetrics = publishers . getAnalysisJobMetrics ( ) ; // A task listener that will register either succesfull executions // or unexpected errors ( which will be delegated to the // errorListener ) final JobCompletionTaskListener jobCompletionTaskListener = new JobCompletionTaskListener ( analysisJobMetrics , _analysisListener , 1 ) ; _analysisListener . jobBegin ( _job , analysisJobMetrics ) ; validateSingleTableInput ( _job ) ; // at this point we are done validating the job , it will run . scheduleRowProcessing ( publishers , rowProcessingLifeCycleHelper , jobCompletionTaskListener , analysisJobMetrics ) ; return new AnalysisResultFutureImpl ( _resultQueue , jobCompletionTaskListener , _errorAware ) ; } catch ( final RuntimeException e ) { _analysisListener . errorUnknown ( _job , e ) ; throw e ; }
public class ReportingApi { /** * Get statistics * Get the statistics for the specified subscription IDs . * @ param ids The IDs of the subscriptions . ( required ) * @ return InlineResponse2001 * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public InlineResponse2001 peekMultiple ( String ids ) throws ApiException { } }
ApiResponse < InlineResponse2001 > resp = peekMultipleWithHttpInfo ( ids ) ; return resp . getData ( ) ;
public class MessageTransportInfo { /** * Set up the key areas . */ public void setupKeys ( ) { } }
KeyAreaInfo keyArea = null ; keyArea = new KeyAreaInfo ( this , Constants . UNIQUE , ID_KEY ) ; keyArea . addKeyField ( ID , Constants . ASCENDING ) ; keyArea = new KeyAreaInfo ( this , Constants . UNIQUE , MESSAGE_PROCESS_INFO_ID_KEY ) ; keyArea . addKeyField ( MESSAGE_PROCESS_INFO_ID , Constants . ASCENDING ) ; keyArea . addKeyField ( MESSAGE_TRANSPORT_ID , Constants . ASCENDING ) ; keyArea . addKeyField ( MESSAGE_VERSION_ID , Constants . ASCENDING ) ;
public class ThirdPartyAudienceSegment { /** * Gets the licenseType value for this ThirdPartyAudienceSegment . * @ return licenseType * Specifies the license type of the external segment . This attribute * is read - only . */ public com . google . api . ads . admanager . axis . v201805 . ThirdPartyAudienceSegmentLicenseType getLicenseType ( ) { } }
return licenseType ;
public class UCharacter { /** * < p > Returns the titlecase version of the argument string . * < p > Position for titlecasing is determined by the argument break * iterator , hence the user can customize his break iterator for * a specialized titlecasing . In this case only the forward iteration * needs to be implemented . * If the break iterator passed in is null , the default Unicode algorithm * will be used to determine the titlecase positions . * < p > Only positions returned by the break iterator will be title cased , * character in between the positions will all be in lower case . * < p > Casing is dependent on the argument locale and context - sensitive * @ param locale which string is to be converted in * @ param str source string to be performed on * @ param titleIter break iterator to determine the positions in which * the character should be title cased . * @ param options bit set to modify the titlecasing operation * @ return lowercase version of the argument string * @ see # TITLECASE _ NO _ LOWERCASE * @ see # TITLECASE _ NO _ BREAK _ ADJUSTMENT */ public static String toTitleCase ( ULocale locale , String str , BreakIterator titleIter , int options ) { } }
if ( titleIter == null ) { if ( locale == null ) { locale = ULocale . getDefault ( ) ; } titleIter = BreakIterator . getWordInstance ( locale ) ; } titleIter . setText ( str ) ; return toTitleCase ( getCaseLocale ( locale ) , options , titleIter , str ) ;
public class StandardFieldsDialog { /** * Tells whether or not the tab with the given label is scrollable . * < strong > Note : < / strong > The scrollable state returned by this method only applies to tabs that were set to be ( or not ) * scrollable through the method { @ link # setTabScrollable ( String , boolean ) } , not to " panels " added directly to a tab with * { @ link # setCustomTabPanel ( int , JComponent ) } . * @ param tabLabel the label of the tab to check . * @ return { @ code true } if the tab is scrollable , { @ code false } otherwise . * @ since 2.7.0 */ protected boolean isTabScrollable ( String tabLabel ) { } }
JPanel tabPanel = this . tabNameMap . get ( tabLabel ) ; if ( tabPanel == null ) { return false ; } return isTabScrollable ( tabPanel ) ;
public class PairedEndFastqReader { /** * Stream the specified interleaved paired end reads . Per the interleaved format , all reads must be sorted and paired . * @ param reader reader , must not be null * @ param listener paired end listener , must not be null * @ throws IOException if an I / O error occurs * @ deprecated by { @ link # streamInterleaved ( Readable , PairedEndListener ) } , will be removed in version 2.0 */ public static void streamInterleaved ( final Reader reader , final PairedEndListener listener ) throws IOException { } }
streamInterleaved ( ( Readable ) reader , listener ) ;
public class ELBinder { /** * Bind an ELInterceptor to the given annotation with the given * { @ link ELBinder . ExecutionPolicy } . * @ param annotationClass EL annotation to intercept * @ param policy determine when the EL is evaluated * @ return ELBinder */ public ELBinder bindELAnnotation ( Class < ? extends Annotation > annotationClass , ExecutionPolicy policy ) { } }
ELInterceptor interceptor = new ELInterceptor ( annotationClass , policy ) ; binder . requestInjection ( interceptor ) ; binder . bindInterceptor ( Matchers . any ( ) , handlerMethodMatcher ( annotationClass ) , interceptor ) ; return this ;
public class Simulator { /** * Start simulation * @ return the winner player index */ public Optional < Integer > start ( ) { } }
LOGGER . info ( "Starting match with: {}" , Arrays . toString ( players ) ) ; try ( ViewerWriter viewerWriter = createViewerWriter ( ) ) { listeners . forEach ( simulatorListener -> simulatorListener . beforeGameStart ( players , gameMap ) ) ; Arrays . stream ( players ) . forEach ( p -> { GameStartApiResponse startData = getStartData ( p . getPlayerIndex ( ) ) ; p . getBot ( ) . onGameStarted ( new GameStartedApiResponseImpl ( startData ) ) ; } ) ; for ( ; ; ) { Map < Integer , PlayerStats > playersStats = calculatePlayersStats ( gameMap . getTiles ( ) ) ; Arrays . stream ( players ) . map ( player -> sendGameUpdate ( player , playersStats ) ) . map ( gameMap :: move ) . filter ( Optional :: isPresent ) . map ( Optional :: get ) . forEach ( this :: endPlayer ) ; gameMap . tick ( ) ; viewerWriter . write ( gameMap ) ; listeners . forEach ( simulatorListener -> simulatorListener . afterHalfTurn ( gameMap . getHalfTurnCounter ( ) , gameMap . getTiles ( ) ) ) ; boolean reachedMaxTurns = gameMap . getHalfTurnCounter ( ) > ( configuration . getMaxTurns ( ) * 2 ) ; Player [ ] alive = Arrays . stream ( this . players ) . filter ( Player :: isAlive ) . toArray ( Player [ ] :: new ) ; if ( alive . length == 1 ) { Player winner = alive [ 0 ] ; sendGameUpdate ( winner , calculatePlayersStats ( gameMap . getTiles ( ) ) ) ; return endGame ( winner ) ; } else if ( reachedMaxTurns ) { return disconnectPlayers ( this . players ) ; } } }
public class SecurityClient { public void setId ( String id ) { } }
if ( id == null || id . length ( ) == 0 ) throw new IllegalArgumentException ( "id required in setId" ) ; this . setId ( Integer . valueOf ( id ) ) ;
public class HibernateUtils { /** * Creates an instance of < code > clazz < / code > and populates fields fetched * from MongoDB document object . Field names are determined from * < code > columns < / code > * @ param documentObj * the document obj * @ param clazz * the clazz * @ param columns * the columns * @ return the object from document * @ throws IllegalAccessException * @ throws InstantiationException */ private static Object getObjectFromDocument ( Metamodel metamodel , Map < String , Object > documentObj , Set < Attribute > columns , Object obj ) throws InstantiationException , IllegalAccessException { } }
for ( Attribute column : columns ) { Object value = documentObj . get ( ( ( AbstractAttribute ) column ) . getJPAColumnName ( ) ) ; if ( ( ( MetamodelImpl ) metamodel ) . isEmbeddable ( ( ( AbstractAttribute ) column ) . getBindableJavaType ( ) ) ) { onViaEmbeddable ( column , obj , metamodel , ( Map < String , Object > ) value ) ; } else { setFieldValue ( obj , value , column ) ; } } return obj ;
public class RestrictionValidator { /** * Validates the number of digits present in the received { @ code value } . * @ param totalDigits The number of total digits . * @ param value The { @ link Double } to be validated . */ public static void validateTotalDigits ( int totalDigits , double value ) { } }
String doubleValue = String . valueOf ( value ) ; int numberOfDigits ; if ( value != ( ( int ) value ) ) { numberOfDigits = doubleValue . length ( ) - 1 ; } else { numberOfDigits = doubleValue . length ( ) ; } if ( numberOfDigits != totalDigits ) { throw new RestrictionViolationException ( "Violation of fractionDigits restriction, value should have a exactly " + totalDigits + " decimal places." ) ; }
public class EnvironmentClassLoader { /** * Adds self as a listener . */ private void initListeners ( ) { } }
ClassLoader parent = getParent ( ) ; for ( ; parent != null ; parent = parent . getParent ( ) ) { if ( parent instanceof EnvironmentClassLoader ) { EnvironmentClassLoader loader = ( EnvironmentClassLoader ) parent ; if ( _stopListener == null ) _stopListener = new WeakStopListener ( this ) ; loader . addListener ( _stopListener ) ; return ; } }
public class DateFuncSup { /** * set date to next years * @ param next count of next years to set to * @ return < code > this < / code > */ public DateFuncSup nextYear ( int next ) { } }
Calendar cal = Calendar . getInstance ( ) ; cal . setTime ( date ) ; cal . add ( Calendar . YEAR , next ) ; date . setTime ( cal . getTimeInMillis ( ) ) ; return this ;
public class DrawerBuilder { /** * get the adapter ( null safe ) * @ return the FastAdapter used with this drawer */ protected FastAdapter < IDrawerItem > getAdapter ( ) { } }
if ( mAdapter == null ) { mAdapter = FastAdapter . with ( Arrays . asList ( mHeaderAdapter , mItemAdapter , mFooterAdapter ) , Arrays . < IAdapterExtension < IDrawerItem > > asList ( mExpandableExtension ) ) ; mAdapter . withSelectable ( true ) ; mAdapter . withMultiSelect ( false ) ; mAdapter . withAllowDeselection ( false ) ; mAdapter . setHasStableIds ( mHasStableIds ) ; } return mAdapter ;
public class BdbStorageConfiguration { /** * Clean up the environment object for the given storage engine */ @ Override public void removeStorageEngine ( StorageEngine < ByteArray , byte [ ] , byte [ ] > engine ) { } }
String storeName = engine . getName ( ) ; BdbStorageEngine bdbEngine = ( BdbStorageEngine ) engine ; synchronized ( lock ) { // Only cleanup the environment if it is per store . We cannot // cleanup a shared ' Environment ' object if ( useOneEnvPerStore ) { Environment environment = this . environments . get ( storeName ) ; if ( environment == null ) { // Nothing to clean up . return ; } // Remove from the set of unreserved stores if needed . if ( this . unreservedStores . remove ( environment ) ) { logger . info ( "Removed environment for store name: " + storeName + " from unreserved stores" ) ; } else { logger . info ( "No environment found in unreserved stores for store name: " + storeName ) ; } // Try to delete the BDB directory associated File bdbDir = environment . getHome ( ) ; if ( bdbDir . exists ( ) && bdbDir . isDirectory ( ) ) { String bdbDirPath = bdbDir . getPath ( ) ; try { FileUtils . deleteDirectory ( bdbDir ) ; logger . info ( "Successfully deleted BDB directory : " + bdbDirPath + " for store name: " + storeName ) ; } catch ( IOException e ) { logger . error ( "Unable to delete BDB directory: " + bdbDirPath + " for store name: " + storeName ) ; } } // Remove the reference to BdbEnvironmentStats , which holds a // reference to the Environment BdbEnvironmentStats bdbEnvStats = bdbEngine . getBdbEnvironmentStats ( ) ; this . aggBdbStats . unTrackEnvironment ( bdbEnvStats ) ; // Unregister the JMX bean for Environment if ( voldemortConfig . isJmxEnabled ( ) ) { ObjectName name = JmxUtils . createObjectName ( JmxUtils . getPackageName ( bdbEnvStats . getClass ( ) ) , storeName ) ; // Un - register the environment stats mbean JmxUtils . unregisterMbean ( name ) ; } // Cleanup the environment environment . close ( ) ; this . environments . remove ( storeName ) ; logger . info ( "Successfully closed the environment for store name : " + storeName ) ; } }
public class LocalTranCoordImpl { /** * Removes the provided < CODE > resource < / CODE > from the list of resources * that need to be cleaned - up when the LTC completes . * This method should be called when the application completes the RMLT . * @ param resource The < CODE > OnePhaseXAResource < / CODE > to stop tracking * @ exception IllegalStateException * Thrown if the LocalTransactionCoordinator is not in a * valid state to execute the operation , for example if * a global transaction is active . */ public void delistFromCleanup ( OnePhaseXAResource resource ) throws IllegalStateException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "delistFromCleanup" , resource ) ; if ( LocalTranCurrentImpl . globalTranExists ( ) ) { final IllegalStateException ise = new IllegalStateException ( "Cannot delist Resource from cleanup. A Global transaction is active." ) ; FFDCFilter . processException ( ise , "com.ibm.tx.ltc.LocalTranCoordImpl.delistFromCleanup" , "525" , this ) ; Tr . error ( tc , "ERR_DELIST_TX_GLB_ACT" ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "delistFromCleanup" , ise ) ; throw ise ; } if ( _cleanupResources == null ) { final IllegalStateException ise = new IllegalStateException ( "Cannot delist Resource. It is not enlisted for cleanup with this LocalTransactionCoordinator." ) ; FFDCFilter . processException ( ise , "com.ibm.tx.ltc.LocalTranCoordImpl.delistFromCleanup" , "534" , this ) ; Tr . error ( tc , "ERR_DELIST_NOT_ENLISTED" ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "delistFromCleanup" , ise ) ; throw ise ; } if ( ( _state == Running ) || ( _state == Suspended ) ) { int index = _cleanupResources . indexOf ( resource ) ; if ( index == - 1 ) { final IllegalStateException ise = new IllegalStateException ( "Cannot delist Resource. It is not enlisted for cleanup with this LocalTransactionCoordinator." ) ; FFDCFilter . processException ( ise , "com.ibm.tx.ltc.LocalTranCoordImpl.delistFromCleanup" , "547" , this ) ; Tr . error ( tc , "ERR_DELIST_NOT_ENLISTED" ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "delistFromCleanup" , ise ) ; throw ise ; } _cleanupResources . remove ( index ) ; } // Defect 156223 // If we are completing or completed then we can ' t allow the // delist but throwing an exception may cause problems in // the ConnectionManager so we simply do nothing . if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "delistFromCleanup" ) ;
public class nat64 { /** * Use this API to fetch all the nat64 resources that are configured on netscaler . */ public static nat64 [ ] get ( nitro_service service ) throws Exception { } }
nat64 obj = new nat64 ( ) ; nat64 [ ] response = ( nat64 [ ] ) obj . get_resources ( service ) ; return response ;
public class Utils { /** * Converts the day of the week from android . text . format . Time to java . util . Calendar */ public static int convertDayOfWeekFromTimeToCalendar ( int timeDayOfWeek ) { } }
switch ( timeDayOfWeek ) { case Time . MONDAY : return Calendar . MONDAY ; case Time . TUESDAY : return Calendar . TUESDAY ; case Time . WEDNESDAY : return Calendar . WEDNESDAY ; case Time . THURSDAY : return Calendar . THURSDAY ; case Time . FRIDAY : return Calendar . FRIDAY ; case Time . SATURDAY : return Calendar . SATURDAY ; case Time . SUNDAY : return Calendar . SUNDAY ; default : throw new IllegalArgumentException ( "Argument must be between Time.SUNDAY and " + "Time.SATURDAY" ) ; }
public class LessParser { /** * Read a quoted string . * @ param quote the quote character . * @ return the string with quotes */ private String readQuote ( char quote ) { } }
StringBuilder builder = cachesBuilder ; builder . setLength ( 0 ) ; readQuote ( quote , builder ) ; String str = builder . toString ( ) ; builder . setLength ( 0 ) ; return str ;
public class ComponentTagHandlerDelegate { /** * If the binding attribute was specified , use that in conjuction with our componentType String variable to call * createComponent on the Application , otherwise just pass the componentType String . < p / > If the binding was used , * then set the ValueExpression " binding " on the created UIComponent . * @ see Application # createComponent ( javax . faces . el . ValueBinding , javax . faces . context . FacesContext , java . lang . String ) * @ see Application # createComponent ( java . lang . String ) * @ param ctx * FaceletContext to use in creating a component * @ return */ protected UIComponent createComponent ( FaceletContext ctx ) { } }
if ( _componentBuilderHandlerDelegate != null ) { // the call to Application . createComponent ( FacesContext , Resource ) // is delegated because we don ' t have here the required Resource instance return _componentBuilderHandlerDelegate . createComponent ( ctx ) ; } UIComponent c = null ; FacesContext faces = ctx . getFacesContext ( ) ; Application app = faces . getApplication ( ) ; if ( _delegate . getBinding ( ) != null ) { ValueExpression ve = _delegate . getBinding ( ) . getValueExpression ( ctx , Object . class ) ; if ( PhaseId . RESTORE_VIEW . equals ( faces . getCurrentPhaseId ( ) ) ) { if ( ! ve . isReadOnly ( faces . getELContext ( ) ) ) { try { // force reset it is an easy and cheap way to allow " binding " attribute to work on // view scope beans or flow scope beans ( using a transient variable ) ve . setValue ( faces . getELContext ( ) , null ) ; } catch ( Exception e ) { // ignore } } } if ( this . _rendererType == null ) { c = app . createComponent ( ve , faces , this . _componentType ) ; } else { c = app . createComponent ( ve , faces , this . _componentType , this . _rendererType ) ; } if ( c != null ) { c . setValueExpression ( "binding" , ve ) ; if ( ! ve . isReadOnly ( faces . getELContext ( ) ) ) { ComponentSupport . getViewRoot ( ctx , c ) . getAttributes ( ) . put ( "oam.CALL_PRE_DISPOSE_VIEW" , Boolean . TRUE ) ; c . subscribeToEvent ( PreDisposeViewEvent . class , new ClearBindingValueExpressionListener ( ) ) ; } if ( c . getChildCount ( ) > 0 || c . getFacetCount ( ) > 0 ) { // In this case , this component is used to hold a subtree that is generated // dynamically . In this case , the best is mark this component to be restored // fully , because this ensures the state is correctly preserved . Note this // is only necessary when the component has additional children or facets , // because those components requires an unique id provided by createUniqueId ( ) , // and this ensures stability of the generated ids . c . getAttributes ( ) . put ( DefaultFaceletsStateManagementStrategy . COMPONENT_ADDED_AFTER_BUILD_VIEW , ComponentState . REMOVE_ADD ) ; if ( FaceletViewDeclarationLanguageBase . isDynamicComponentNeedsRefresh ( ctx . getFacesContext ( ) ) ) { FaceletViewDeclarationLanguageBase . resetDynamicComponentNeedsRefreshFlag ( ctx . getFacesContext ( ) ) ; FaceletCompositionContext mctx = FaceletCompositionContext . getCurrentInstance ( ctx ) ; if ( mctx . isUsingPSSOnThisView ( ) ) { FaceletViewDeclarationLanguage . cleanTransientBuildOnRestore ( faces ) ; } else { FaceletViewDeclarationLanguageBase . activateDynamicComponentRefreshTransientBuild ( faces ) ; } // Mark top binding component to be dynamically refreshed . In that way , facelets algorithm // will be able to decide if the component children requires to be refreshed dynamically // or not . c . getAttributes ( ) . put ( FaceletDynamicComponentRefreshTransientBuildEvent . DYNAMIC_COMPONENT_BINDING_NEEDS_REFRESH , Boolean . TRUE ) ; } } } } else { // According to the , spec call the second alternative with null rendererType gives // the same result , but without the unnecesary call for FacesContext . getCurrentInstance ( ) . // Saves 1 call per component without rendererType ( f : viewParam , h : column , f : selectItem , . . . ) // and it does not have any side effects ( the spec javadoc mentions in a explicit way // that rendererType can be null ! ) . /* if ( this . _ rendererType = = null ) c = app . createComponent ( this . _ componentType ) ; else */ c = app . createComponent ( faces , this . _componentType , this . _rendererType ) ; } return c ;
public class BytecodeHelper { /** * / * public void dup ( ) { * mv . visitInsn ( DUP ) ; */ private static boolean hasGenerics ( Parameter [ ] param ) { } }
if ( param . length == 0 ) return false ; for ( Parameter parameter : param ) { ClassNode type = parameter . getType ( ) ; if ( hasGenerics ( type ) ) return true ; } return false ;
public class ByteArrayValueData { /** * { @ inheritDoc } */ protected boolean internalEquals ( ValueData another ) { } }
if ( another instanceof ByteArrayValueData ) { return Arrays . equals ( ( ( ByteArrayValueData ) another ) . value , value ) ; } return false ;
public class PathNormalizer { /** * Normalizes all the paths in a Set . * @ param paths * @ return */ public static final Set < String > normalizePaths ( Set < String > paths ) { } }
Set < String > ret = new HashSet < > ( ) ; for ( Iterator < String > it = paths . iterator ( ) ; it . hasNext ( ) ; ) { String path = normalizePath ( ( String ) it . next ( ) ) ; ret . add ( path ) ; } return ret ;
public class ParameterUtil { /** * Get parameter value from a string represenation * @ param parameterClass parameter class * @ param value string value representation * @ param sdf SimpleDateFormat used to parse Date from String * @ return parameter value from string representation * @ throws Exception if string value cannot be parse */ public static Object getParameterValueFromString ( String parameterClass , String value , SimpleDateFormat sdf ) throws Exception { } }
Object result = value ; if ( result == null ) { return result ; } try { if ( QueryParameter . INTEGER_VALUE . equals ( parameterClass ) ) { result = Integer . parseInt ( value ) ; } else if ( QueryParameter . BYTE_VALUE . equals ( parameterClass ) ) { result = Byte . parseByte ( value ) ; } else if ( QueryParameter . SHORT_VALUE . equals ( parameterClass ) ) { result = Short . parseShort ( value ) ; } else if ( QueryParameter . LONG_VALUE . equals ( parameterClass ) ) { result = Long . parseLong ( value ) ; } else if ( QueryParameter . FLOAT_VALUE . equals ( parameterClass ) ) { result = Float . parseFloat ( value ) ; } else if ( QueryParameter . DOUBLE_VALUE . equals ( parameterClass ) ) { result = Double . parseDouble ( value ) ; } else if ( QueryParameter . BOOLEAN_VALUE . equals ( parameterClass ) ) { result = Boolean . parseBoolean ( value ) ; } else if ( QueryParameter . BIGDECIMAL_VALUE . equals ( parameterClass ) ) { result = new BigDecimal ( value ) ; } else if ( QueryParameter . BIGINTEGER_VALUE . equals ( parameterClass ) ) { result = new BigInteger ( value ) ; } else if ( QueryParameter . DATE_VALUE . equals ( parameterClass ) ) { if ( sdf == null ) { sdf = new SimpleDateFormat ( ) ; try { // see StringUtil . getValueAsString // help us to have date drill down parameters ( if no pattern is present ) ! // day and time result = sdf . parse ( value ) ; } catch ( ParseException ex ) { // day without time result = DateFormat . getDateInstance ( ) . parse ( value ) ; } } else { // server request for url query parameters ( SimpleDateFormat is hardcoded ) result = sdf . parse ( value ) ; } } else if ( QueryParameter . TIME_VALUE . equals ( parameterClass ) ) { if ( sdf != null ) { result = sdf . parse ( value ) ; } else { result = Time . valueOf ( value ) ; } } else if ( QueryParameter . TIMESTAMP_VALUE . equals ( parameterClass ) ) { if ( sdf != null ) { result = sdf . parse ( value ) ; } else { result = Timestamp . valueOf ( value ) ; } } return result ; } catch ( NumberFormatException ex ) { throw new Exception ( "Cannot parse " + parameterClass + " value from text " + value ) ; } catch ( ParseException ex ) { throw new Exception ( "Cannot parse " + parameterClass + " value from text " + value ) ; }
public class ByteCodeWriter { /** * Writes a float */ public void writeFloat ( float v ) throws IOException { } }
int bits = Float . floatToIntBits ( v ) ; _os . write ( bits >> 24 ) ; _os . write ( bits >> 16 ) ; _os . write ( bits >> 8 ) ; _os . write ( bits ) ;
public class SimpleMapSerializer { /** * 简单 map 的反序列化过程 , 用来反序列化 bolt 的 header * { @ link SofaRpcSerialization # deserializeHeader ( com . alipay . remoting . rpc . RequestCommand ) } * @ param bytes bolt header * @ return 反序列化后的 Map 对象 * @ throws DeserializationException DeserializationException */ public Map < String , String > decode ( byte [ ] bytes ) throws DeserializationException { } }
Map < String , String > map = new HashMap < String , String > ( ) ; if ( bytes == null || bytes . length == 0 ) { return map ; } UnsafeByteArrayInputStream in = new UnsafeByteArrayInputStream ( bytes ) ; try { while ( in . available ( ) > 0 ) { String key = readString ( in ) ; String value = readString ( in ) ; map . put ( key , value ) ; } return map ; } catch ( IOException ex ) { throw new DeserializationException ( ex . getMessage ( ) , ex ) ; }
public class Transformation2D { /** * be zeroed . */ void transformWithoutShift ( Point2D [ ] pointsIn , int from , int count , Point2D [ ] pointsOut ) { } }
for ( int i = from , n = from + count ; i < n ; i ++ ) { Point2D p = pointsIn [ i ] ; double new_x = xx * p . x + xy * p . y ; double new_y = yx * p . x + yy * p . y ; pointsOut [ i ] . setCoords ( new_x , new_y ) ; }
public class FilterList { /** * Build the address tree from a string of data which contains valid * IPv4 and / or IPv6 addresses . The string array should contain all the * addresses . * @ param data * list of IPv4 and / or IPv6 address which are * to be used to create a new address tree . */ protected void buildData ( String [ ] data , boolean validateOnly ) { } }
if ( data == null ) { return ; } int length = data . length ; for ( int i = 0 ; i < length ; i ++ ) { addAddressToList ( data [ i ] , validateOnly ) ; }
public class SshPublicKeyFileFactory { /** * Decode an SSH2 encoded public key as specified in the SSH2 transport * protocol . This consists of a String identifier specifying the algorithm * of the public key and the remaining data is formatted depending upon the * public key type . The supported key types are as follows : * < pre > * ssh - rsa is encoded as * String " ssh - rsa " * BigInteger e * BigInteger n * ssh - dsa is encoded as * String " ssh - dsa " * BigInteger p * BigInteger q * BigItneger g * BigInteger y * < / pre > * @ param encoded * @ return SshPublicKey * @ throws IOException */ public static SshPublicKey decodeSSH2PublicKey ( byte [ ] encoded ) throws IOException { } }
ByteArrayReader bar = new ByteArrayReader ( encoded ) ; try { String algorithm = bar . readString ( ) ; try { SshPublicKey publickey = ( SshPublicKey ) ComponentManager . getInstance ( ) . supportedPublicKeys ( ) . getInstance ( algorithm ) ; publickey . init ( encoded , 0 , encoded . length ) ; return publickey ; } catch ( SshException ex ) { throw new SshIOException ( ex ) ; } } catch ( OutOfMemoryError ex2 ) { throw new IOException ( "An error occurred parsing a public key file! Is the file corrupt?" ) ; } finally { try { bar . close ( ) ; } catch ( IOException e ) { } }
public class DimensionFromConstraint { /** * Public for Unit test * @ param constraint Constraint value ex : ST _ COORDIM ( the _ geom ) = 3 * @ param columnName Column name ex : the _ geom * @ return The dimension constraint [ 2-3] */ public static int dimensionFromConstraint ( String constraint , String columnName ) { } }
Matcher matcher = Z_CONSTRAINT_PATTERN . matcher ( constraint ) ; if ( matcher . find ( ) ) { String extractedColumnName = matcher . group ( 1 ) . replace ( "\"" , "" ) . replace ( "`" , "" ) ; if ( extractedColumnName . equalsIgnoreCase ( columnName ) ) { int constraint_value = Integer . valueOf ( matcher . group ( 8 ) ) ; String sign = matcher . group ( 5 ) ; if ( "<>" . equals ( sign ) || "!=" . equals ( sign ) ) { constraint_value = constraint_value == 3 ? 2 : 3 ; } if ( "<" . equals ( sign ) ) { constraint_value = 2 ; } if ( ">" . equals ( sign ) ) { constraint_value = constraint_value == 2 ? 3 : 2 ; } return constraint_value ; } } return 2 ;
public class LongTermRetentionBackupsInner { /** * Lists all long term retention backups for a database . * @ param locationName The location of the database * @ param longTermRetentionServerName the String value * @ param longTermRetentionDatabaseName the String value * @ param onlyLatestPerDatabase Whether or not to only get the latest backup for each database . * @ param databaseState Whether to query against just live databases , just deleted databases , or all databases . Possible values include : ' All ' , ' Live ' , ' Deleted ' * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; LongTermRetentionBackupInner & gt ; object if successful . */ public PagedList < LongTermRetentionBackupInner > listByDatabase ( final String locationName , final String longTermRetentionServerName , final String longTermRetentionDatabaseName , final Boolean onlyLatestPerDatabase , final LongTermRetentionDatabaseState databaseState ) { } }
ServiceResponse < Page < LongTermRetentionBackupInner > > response = listByDatabaseSinglePageAsync ( locationName , longTermRetentionServerName , longTermRetentionDatabaseName , onlyLatestPerDatabase , databaseState ) . toBlocking ( ) . single ( ) ; return new PagedList < LongTermRetentionBackupInner > ( response . body ( ) ) { @ Override public Page < LongTermRetentionBackupInner > nextPage ( String nextPageLink ) { return listByDatabaseNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class FileTransferNegotiator { /** * Returns the file transfer negotiator related to a particular connection . * When this class is requested on a particular connection the file transfer * service is automatically enabled . * @ param connection The connection for which the transfer manager is desired * @ return The FileTransferNegotiator */ public static synchronized FileTransferNegotiator getInstanceFor ( final XMPPConnection connection ) { } }
FileTransferNegotiator fileTransferNegotiator = INSTANCES . get ( connection ) ; if ( fileTransferNegotiator == null ) { fileTransferNegotiator = new FileTransferNegotiator ( connection ) ; INSTANCES . put ( connection , fileTransferNegotiator ) ; } return fileTransferNegotiator ;
public class GeoPackageCoreImpl { /** * { @ inheritDoc } */ @ Override public BoundingBox getBoundingBox ( Projection projection , String table ) { } }
return getBoundingBox ( projection , table , false ) ;
public class Repeater { /** * When using the repeater ' s pad tag , it is possible to require a minimum number of * items render in the repeater . This method pads out the number of items until it * reaches the { @ link org . apache . beehive . netui . tags . databinding . repeater . pad . PadContext } ' s * < code > minRepeat < / code > property . */ private final void doPadding ( ) { } }
if ( _padContext != null && ! _padContext . checkMinRepeat ( _renderedItems ) ) { /* since padding is now running , un - set the current item so that the last item isn ' t accessible during any later data binding */ _currentItem = null ; for ( int i = _renderedItems ; ! _padContext . checkMinRepeat ( i ) ; i ++ ) { _currentIndex ++ ; addContent ( _padContext . getPadText ( ) ) ; } }
public class PartialApplicator { /** * Returns a Function with 5 arguments applied to the supplied HexFunction * @ param t1 Generic argument * @ param t2 Generic argument * @ param t3 Generic argument * @ param t4 Generic argument * @ param t5 Generic argument * @ param hexFunc Function that accepts 6 parameters * @ param < T1 > Generic argument type * @ param < T2 > Generic argument type * @ param < T3 > Generic argument type * @ param < T4 > Generic argument type * @ param < T5 > Generic argument type * @ param < T6 > Generic argument type * @ param < R > Function generic return type * @ return Function as a result of 5 arguments being applied to the incoming HexFunction */ public static < T1 , T2 , T3 , T4 , T5 , T6 , R > Supplier < R > partial6 ( final T1 t1 , final T2 t2 , final T3 t3 , final T4 t4 , final T5 t5 , final T6 t6 , final Function6 < T1 , T2 , T3 , T4 , T5 , T6 , R > hexFunc ) { } }
return ( ) -> hexFunc . apply ( t1 , t2 , t3 , t4 , t5 , t6 ) ;
public class Ssh2Channel { /** * Sends a channel request . Many channels have extensions that are specific * to that particular channel type , an example of which is requesting a * pseudo terminal from an interactive session . * @ param requesttype * the name of the request , for example " pty - req " * @ param wantreply * specifies whether the remote side should send a * success / failure message * @ param requestdata * the request data * @ return < code > true < / code > if the request succeeded and wantreply = true , * otherwise < code > false < / code > * @ throws IOException */ public boolean sendRequest ( String requesttype , boolean wantreply , byte [ ] requestdata ) throws SshException { } }
return sendRequest ( requesttype , wantreply , requestdata , true ) ;
public class JMessageClient { /** * Add sensitive words * @ param words String Set * @ return No content * @ throws APIConnectionException connect exception * @ throws APIRequestException request exception */ public ResponseWrapper addSensitiveWords ( Set < String > words ) throws APIConnectionException , APIRequestException { } }
return _sensitiveWordClient . addSensitiveWords ( words ) ;
public class PHPObjectMessage { /** * { @ inheritDoc } */ @ Override public Object send ( String ... arg0 ) throws Exception { } }
LOGGER . info ( "Send called with " + arg0 . length ) ; return object . invoke ( "get" + name ) ;
public class TIFFImageMetadata { /** * See : http : / / download . java . net / media / jai - imageio / javadoc / 1.1 / com / sun / media / imageio / plugins / tiff / package - summary . html */ @ Override protected IIOMetadataNode getStandardChromaNode ( ) { } }
IIOMetadataNode chroma = new IIOMetadataNode ( "Chroma" ) ; // Handle ColorSpaceType ( RGB / CMYK / YCbCr etc ) . . . Entry photometricTag = ifd . getEntryById ( TIFF . TAG_PHOTOMETRIC_INTERPRETATION ) ; int photometricValue = getValueAsInt ( photometricTag ) ; // No default for this tag ! int numChannelsValue = getSamplesPerPixelWithFallback ( ) ; IIOMetadataNode colorSpaceType = new IIOMetadataNode ( "ColorSpaceType" ) ; chroma . appendChild ( colorSpaceType ) ; switch ( photometricValue ) { case TIFFBaseline . PHOTOMETRIC_WHITE_IS_ZERO : case TIFFBaseline . PHOTOMETRIC_BLACK_IS_ZERO : case TIFFBaseline . PHOTOMETRIC_MASK : // It ' s really a transparency mask / alpha channel , but . . . colorSpaceType . setAttribute ( "name" , "GRAY" ) ; break ; case TIFFBaseline . PHOTOMETRIC_RGB : case TIFFBaseline . PHOTOMETRIC_PALETTE : colorSpaceType . setAttribute ( "name" , "RGB" ) ; break ; case TIFFExtension . PHOTOMETRIC_YCBCR : colorSpaceType . setAttribute ( "name" , "YCbCr" ) ; break ; case TIFFExtension . PHOTOMETRIC_CIELAB : case TIFFExtension . PHOTOMETRIC_ICCLAB : case TIFFExtension . PHOTOMETRIC_ITULAB : colorSpaceType . setAttribute ( "name" , "Lab" ) ; break ; case TIFFExtension . PHOTOMETRIC_SEPARATED : // TODO : May be CMYK , or something else . . . Consult InkSet and NumberOfInks ! if ( numChannelsValue == 3 ) { colorSpaceType . setAttribute ( "name" , "CMY" ) ; } else { colorSpaceType . setAttribute ( "name" , "CMYK" ) ; } break ; case TIFFCustom . PHOTOMETRIC_LOGL : case TIFFCustom . PHOTOMETRIC_LOGLUV : colorSpaceType . setAttribute ( "name" , "Luv" ) ; break ; case TIFFCustom . PHOTOMETRIC_CFA : case TIFFCustom . PHOTOMETRIC_LINEAR_RAW : // . . . or is this RGB ? colorSpaceType . setAttribute ( "name" , "3CLR" ) ; break ; default : colorSpaceType . setAttribute ( "name" , Integer . toHexString ( numChannelsValue ) + "CLR" ) ; break ; } // NumChannels IIOMetadataNode numChannels = new IIOMetadataNode ( "NumChannels" ) ; chroma . appendChild ( numChannels ) ; if ( photometricValue == TIFFBaseline . PHOTOMETRIC_PALETTE ) { numChannels . setAttribute ( "value" , "3" ) ; } else { numChannels . setAttribute ( "value" , Integer . toString ( numChannelsValue ) ) ; } // BlackIsZero ( defaults to TRUE ) IIOMetadataNode blackIsZero = new IIOMetadataNode ( "BlackIsZero" ) ; chroma . appendChild ( blackIsZero ) ; switch ( photometricValue ) { case TIFFBaseline . PHOTOMETRIC_WHITE_IS_ZERO : blackIsZero . setAttribute ( "value" , "FALSE" ) ; break ; default : break ; } Entry colorMapTag = ifd . getEntryById ( TIFF . TAG_COLOR_MAP ) ; if ( colorMapTag != null ) { int [ ] colorMapValues = ( int [ ] ) colorMapTag . getValue ( ) ; IIOMetadataNode palette = new IIOMetadataNode ( "Palette" ) ; chroma . appendChild ( palette ) ; int count = colorMapValues . length / 3 ; for ( int i = 0 ; i < count ; i ++ ) { IIOMetadataNode paletteEntry = new IIOMetadataNode ( "PaletteEntry" ) ; paletteEntry . setAttribute ( "index" , Integer . toString ( i ) ) ; // TODO : See TIFFImageReader createIndexColorModel , to detect 8 bit colorMap paletteEntry . setAttribute ( "red" , Integer . toString ( ( colorMapValues [ i ] >> 8 ) & 0xff ) ) ; paletteEntry . setAttribute ( "green" , Integer . toString ( ( colorMapValues [ i + count ] >> 8 ) & 0xff ) ) ; paletteEntry . setAttribute ( "blue" , Integer . toString ( ( colorMapValues [ i + count * 2 ] >> 8 ) & 0xff ) ) ; palette . appendChild ( paletteEntry ) ; } } return chroma ;
public class WorkUnitDao { /** * Note : The ( a ) sub - query is written deliberately before the sub - queries ( b ) and ( c ) . * ( b ) and ( c ) return null values for woit _ ref _ num and waiting _ since . PostgreSQL is only able to * determine the type of this fields if there is a preceding sub - query ( that is ( a ) in our case ) * that returns a non - null value for that field . * If ( b ) and ( c ) were preceding ( a ) , than PostgreSQL would use VARCHAR as a fallback type for the * null value fields . This would make the UNION of the sub - queries fail due to a type mismatch . */ private String getSql ( String clusterName ) { } }
String clusterCondition = getClusterCondition ( clusterName ) ; String sql = "" // ( a ) completing signals , tasks , human tasks + "SELECT woin.ref_num AS woin_ref_num, " + " '" + WorkType . COMPLETE_WORK_ITEM . name ( ) + "' AS type, " + " woit.ref_num AS woit_ref_num, " + " woit.date_updated AS waiting_since " + " FROM " + getSchema ( ) + "work_items woit " + " JOIN " + getSchema ( ) + "workflow_instances woin " + " ON woin.ref_num = woit.woin_ref_num " + " WHERE woin.status = '" + WorkflowInstanceStatus . EXECUTING . name ( ) + "' " + " AND woin.locked = 'N' " + clusterCondition + " AND woit.status = '" + WorkItemStatus . EXECUTED . name ( ) + "' " + " AND woit.due_date IS NULL " + "UNION ALL " // ( b ) starting workflows + "SELECT woin.ref_num AS woin_ref_num, " + " '" + WorkType . START_WORKFLOW . name ( ) + "' AS type, " + " null AS woit_ref_num, " + " null AS waiting_since " + " FROM " + getSchema ( ) + "workflow_instances woin " + " WHERE woin.status = '" + WorkflowInstanceStatus . NEW . name ( ) + "' " + " AND woin.locked = 'N' " + clusterCondition + " UNION ALL " // ( c ) aborting workflows + "SELECT woin.ref_num AS woin_ref_num, " + " '" + WorkType . ABORT_WORKFLOW . name ( ) + "' AS type, " + " null AS woit_ref_num, " + " null AS waiting_since " + " FROM " + getSchema ( ) + "workflow_instances woin " + " WHERE woin.status = '" + WorkflowInstanceStatus . ABORT . name ( ) + "' " + " AND woin.locked = 'N' " + clusterCondition + " UNION ALL " // ( d ) completing timers + "SELECT woin.ref_num AS woin_ref_num, " + " '" + WorkType . COMPLETE_WORK_ITEM . name ( ) + "' AS type, " + " woit.ref_num AS woit_ref_num, " + " woit.due_date AS waiting_since " + " FROM " + getSchema ( ) + "work_items woit " + " JOIN " + getSchema ( ) + "workflow_instances woin " + " ON woin.ref_num = woit.woin_ref_num " + " WHERE woin.status = '" + WorkflowInstanceStatus . EXECUTING . name ( ) + "' " + " AND woin.locked = 'N' " + clusterCondition + " AND woit.status = '" + WorkItemStatus . NEW . name ( ) + "' " + " AND woit.due_date < ? " + "UNION ALL " // ( e ) executing tasks + "SELECT woin.ref_num AS woin_ref_num, " + " '" + WorkType . EXECUTE_TASK . name ( ) + "' AS type, " + " woit.ref_num AS woit_ref_num, " + " COALESCE(woit.date_updated, woit.date_created) AS waiting_since " + " FROM " + getSchema ( ) + "work_items woit " + " JOIN " + getSchema ( ) + "workflow_instances woin " + " ON woin.ref_num = woit.woin_ref_num " + " WHERE woin.status = '" + WorkflowInstanceStatus . EXECUTING . name ( ) + "' " + " AND woin.locked = 'N' " + clusterCondition + " AND woit.status = '" + WorkItemStatus . NEW . name ( ) + "' " + " AND NOT woit.bean IS NULL " + " ORDER BY woin_ref_num ASC, waiting_since ASC NULLS FIRST, woit_ref_num ASC" ; return sql . replaceAll ( "\\s+" , " " ) ;
public class Simulator { /** * Returns the actors to broadcast trace events to . */ private List < Routee > makeRoutes ( ) { } }
return Registry . policies ( settings ) . stream ( ) . map ( policy -> { ActorRef actorRef = context ( ) . actorOf ( Props . create ( PolicyActor . class , policy ) ) ; context ( ) . watch ( actorRef ) ; return new ActorRefRoutee ( actorRef ) ; } ) . collect ( toList ( ) ) ;
public class ShellConsole { /** * Provides a specialized { @ link ShellConsole } to handle line editing , * history and completion . Relies on the JLine library * ( see < a href = " http : / / jline . sourceforge . net " > http : / / jline . sourceforge . net < / a > ) . */ public static ShellConsole getConsole ( Scriptable scope , Charset cs ) { } }
// We don ' t want a compile - time dependency on the JLine jar , so use // reflection to load and reference the JLine classes . ClassLoader classLoader = ShellConsole . class . getClassLoader ( ) ; if ( classLoader == null ) { // If the attempt to get a class specific class loader above failed // then fallback to the system class loader . classLoader = ClassLoader . getSystemClassLoader ( ) ; } if ( classLoader == null ) { // If for some reason we still don ' t have a handle to a class // loader then give up ( avoid a NullPointerException ) . return null ; } try { // first try to load JLine v2 . . . Class < ? > readerClass = Kit . classOrNull ( classLoader , "jline.console.ConsoleReader" ) ; if ( readerClass != null ) { return getJLineShellConsoleV2 ( classLoader , readerClass , scope , cs ) ; } // . . . if that fails , try to load JLine v1 readerClass = Kit . classOrNull ( classLoader , "jline.ConsoleReader" ) ; if ( readerClass != null ) { return getJLineShellConsoleV1 ( classLoader , readerClass , scope , cs ) ; } } catch ( NoSuchMethodException e ) { } catch ( IllegalAccessException e ) { } catch ( InstantiationException e ) { } catch ( InvocationTargetException e ) { } return null ;
public class HessianServlet { /** * Initialize the service , including the service object . */ public void init ( ServletConfig config ) throws ServletException { } }
super . init ( config ) ; try { if ( _homeImpl != null ) { } else if ( getInitParameter ( "home-class" ) != null ) { String className = getInitParameter ( "home-class" ) ; Class < ? > homeClass = loadClass ( className ) ; _homeImpl = homeClass . newInstance ( ) ; init ( _homeImpl ) ; } else if ( getInitParameter ( "service-class" ) != null ) { String className = getInitParameter ( "service-class" ) ; Class < ? > homeClass = loadClass ( className ) ; _homeImpl = homeClass . newInstance ( ) ; init ( _homeImpl ) ; } else { if ( getClass ( ) . equals ( HessianServlet . class ) ) throw new ServletException ( "server must extend HessianServlet" ) ; _homeImpl = this ; } if ( _homeAPI != null ) { } else if ( getInitParameter ( "home-api" ) != null ) { String className = getInitParameter ( "home-api" ) ; _homeAPI = loadClass ( className ) ; } else if ( getInitParameter ( "api-class" ) != null ) { String className = getInitParameter ( "api-class" ) ; _homeAPI = loadClass ( className ) ; } else if ( _homeImpl != null ) { _homeAPI = findRemoteAPI ( _homeImpl . getClass ( ) ) ; if ( _homeAPI == null ) _homeAPI = _homeImpl . getClass ( ) ; _homeAPI = _homeImpl . getClass ( ) ; } if ( _objectImpl != null ) { } else if ( getInitParameter ( "object-class" ) != null ) { String className = getInitParameter ( "object-class" ) ; Class < ? > objectClass = loadClass ( className ) ; _objectImpl = objectClass . newInstance ( ) ; init ( _objectImpl ) ; } if ( _objectAPI != null ) { } else if ( getInitParameter ( "object-api" ) != null ) { String className = getInitParameter ( "object-api" ) ; _objectAPI = loadClass ( className ) ; } else if ( _objectImpl != null ) _objectAPI = _objectImpl . getClass ( ) ; _homeSkeleton = new HessianSkeleton ( _homeImpl , _homeAPI ) ; if ( _objectAPI != null ) _homeSkeleton . setObjectClass ( _objectAPI ) ; if ( _objectImpl != null ) { _objectSkeleton = new HessianSkeleton ( _objectImpl , _objectAPI ) ; _objectSkeleton . setHomeClass ( _homeAPI ) ; } else _objectSkeleton = _homeSkeleton ; if ( "true" . equals ( getInitParameter ( "debug" ) ) ) { } if ( "false" . equals ( getInitParameter ( "send-collection-type" ) ) ) setSendCollectionType ( false ) ; } catch ( ServletException e ) { throw e ; } catch ( Exception e ) { throw new ServletException ( e ) ; }
public class RouteVersionFilter { /** * Filters route matches by specified version . * @ param < T > The target type * @ param < R > The return type * @ param request The HTTP request * @ return A filtered list of route matches */ @ Override public < T , R > Predicate < UriRouteMatch < T , R > > filter ( HttpRequest < ? > request ) { } }
ArgumentUtils . requireNonNull ( "request" , request ) ; if ( resolvingStrategies == null || resolvingStrategies . isEmpty ( ) ) { return ( match ) -> true ; } Optional < String > defaultVersion = defaultVersionProvider == null ? Optional . empty ( ) : Optional . of ( defaultVersionProvider . resolveDefaultVersion ( ) ) ; Optional < String > version = resolvingStrategies . stream ( ) . map ( strategy -> strategy . resolve ( request ) . orElse ( null ) ) . filter ( Objects :: nonNull ) . findFirst ( ) ; return ( match ) -> { Optional < String > routeVersion = getVersion ( match ) ; if ( routeVersion . isPresent ( ) ) { String resolvedVersion = version . orElse ( defaultVersion . orElse ( null ) ) ; // no version found and no default version configured if ( resolvedVersion == null ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Route specifies a version {} and no version information resolved for request to URI {}" , routeVersion . get ( ) , request . getUri ( ) ) ; } return true ; } else { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Route specifies a version {} and the version {} was resolved for request to URI {}" , routeVersion . get ( ) , resolvedVersion , request . getUri ( ) ) ; } return resolvedVersion . equals ( routeVersion . get ( ) ) ; } } else { // route is not versioned but request is if ( version . isPresent ( ) ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Route does not specify a version but the version {} was resolved for request to URI {}" , version . get ( ) , request . getUri ( ) ) ; } return false ; } else { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Route does not specify a version and no version was resolved for request to URI {}" , request . getUri ( ) ) ; } return true ; } }
public class DataFrames { /** * Convert a datavec schema to a * struct type in spark * @ param schema the schema to convert * @ return the datavec struct type */ public static StructType fromSchema ( Schema schema ) { } }
StructField [ ] structFields = new StructField [ schema . numColumns ( ) ] ; for ( int i = 0 ; i < structFields . length ; i ++ ) { switch ( schema . getColumnTypes ( ) . get ( i ) ) { case Double : structFields [ i ] = new StructField ( schema . getName ( i ) , DataTypes . DoubleType , false , Metadata . empty ( ) ) ; break ; case Integer : structFields [ i ] = new StructField ( schema . getName ( i ) , DataTypes . IntegerType , false , Metadata . empty ( ) ) ; break ; case Long : structFields [ i ] = new StructField ( schema . getName ( i ) , DataTypes . LongType , false , Metadata . empty ( ) ) ; break ; case Float : structFields [ i ] = new StructField ( schema . getName ( i ) , DataTypes . FloatType , false , Metadata . empty ( ) ) ; break ; default : throw new IllegalStateException ( "This api should not be used with strings , binary data or ndarrays. This is only for columnar data" ) ; } } return new StructType ( structFields ) ;
public class DeleteHandler { /** * Delete all traits from the specified vertex . * @ param instanceVertex * @ throws AtlasException */ private void deleteAllTraits ( AtlasVertex instanceVertex ) throws AtlasException { } }
List < String > traitNames = GraphHelper . getTraitNames ( instanceVertex ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Deleting traits {} for {}" , traitNames , string ( instanceVertex ) ) ; } String typeName = GraphHelper . getTypeName ( instanceVertex ) ; for ( String traitNameToBeDeleted : traitNames ) { String relationshipLabel = GraphHelper . getTraitLabel ( typeName , traitNameToBeDeleted ) ; deleteEdgeReference ( instanceVertex , relationshipLabel , DataTypes . TypeCategory . TRAIT , false ) ; }
public class RebootManager { /** * Notify all PendingShutdownObservers of the pending shutdown ! */ protected void notifyObservers ( int level ) { } }
final int warningsLeft = getWarnings ( ) . length - level - 1 ; final long msLeft = _nextReboot - System . currentTimeMillis ( ) ; _observers . apply ( new ObserverList . ObserverOp < PendingShutdownObserver > ( ) { public boolean apply ( PendingShutdownObserver observer ) { observer . shutdownPlanned ( warningsLeft , msLeft ) ; return true ; } } ) ;
public class AWSCodeCommitClient { /** * Updates the status of a pull request . * @ param updatePullRequestStatusRequest * @ return Result of the UpdatePullRequestStatus operation returned by the service . * @ throws PullRequestDoesNotExistException * The pull request ID could not be found . Make sure that you have specified the correct repository name and * pull request ID , and then try again . * @ throws InvalidPullRequestIdException * The pull request ID is not valid . Make sure that you have provided the full ID and that the pull request * is in the specified repository , and then try again . * @ throws PullRequestIdRequiredException * A pull request ID is required , but none was provided . * @ throws InvalidPullRequestStatusUpdateException * The pull request status update is not valid . The only valid update is from < code > OPEN < / code > to * < code > CLOSED < / code > . * @ throws InvalidPullRequestStatusException * The pull request status is not valid . The only valid values are < code > OPEN < / code > and < code > CLOSED < / code > * @ throws PullRequestStatusRequiredException * A pull request status is required , but none was provided . * @ throws EncryptionIntegrityChecksFailedException * An encryption integrity check failed . * @ throws EncryptionKeyAccessDeniedException * An encryption key could not be accessed . * @ throws EncryptionKeyDisabledException * The encryption key is disabled . * @ throws EncryptionKeyNotFoundException * No encryption key was found . * @ throws EncryptionKeyUnavailableException * The encryption key is not available . * @ sample AWSCodeCommit . UpdatePullRequestStatus * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / codecommit - 2015-04-13 / UpdatePullRequestStatus " * target = " _ top " > AWS API Documentation < / a > */ @ Override public UpdatePullRequestStatusResult updatePullRequestStatus ( UpdatePullRequestStatusRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUpdatePullRequestStatus ( request ) ;
public class ModelMapper { /** * Transform client / server model to a database model * @ param comment - client / server model to transform * @ return the database model */ public DbComment getDbComment ( final Comment comment ) { } }
final DbComment dbComment = new DbComment ( ) ; dbComment . setEntityId ( comment . getEntityId ( ) ) ; dbComment . setEntityType ( comment . getEntityType ( ) ) ; dbComment . setAction ( comment . getAction ( ) ) ; dbComment . setDbCommentText ( comment . getCommentText ( ) ) ; dbComment . setDbCommentedBy ( comment . getCommentedBy ( ) ) ; dbComment . setDbCreatedDateTime ( comment . getCreatedDateTime ( ) ) ; return dbComment ;
public class RoundHelper { /** * Round using the { @ link RoundingMode # HALF _ EVEN } mode and exponential * representation * @ param dValue * The value to be rounded * @ param nScale * The precision scale * @ return the rounded value */ public static double getRoundedEvenExp ( final double dValue , @ Nonnegative final int nScale ) { } }
return getRounded ( dValue , nScale , RoundingMode . HALF_EVEN , EDecimalType . EXP ) ;