signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class LongColumn { /** * Returns a new numeric column initialized with the given name and size . The values in the column are
* integers beginning at startsWith and continuing through size ( exclusive ) , monotonically increasing by 1
* TODO consider a generic fill function including steps or random samples from various distributions */
public static LongColumn indexColumn ( final String columnName , final int size , final int startsWith ) { } } | final LongColumn indexColumn = LongColumn . create ( columnName , size ) ; for ( int i = 0 ; i < size ; i ++ ) { indexColumn . append ( i + startsWith ) ; } return indexColumn ; |
public class Ix { /** * Removes those elements via Iterator . remove ( ) from this sequence that match the
* given predicate .
* The result ' s iterator ( ) forwards the calls to remove ( ) to this ' Iterator .
* @ param predicate the function called with the current element and returns true
* if that particular element should be removed .
* @ return the new Ix instance
* @ throws NullPointerException if predicate is null
* @ since 1.0
* @ see # retain ( IxPredicate ) */
public final Ix < T > remove ( IxPredicate < ? super T > predicate ) { } } | return new IxRemove < T > ( this , nullCheck ( predicate , "predicate is null" ) ) ; |
public class RelaxNGSchemaFactory { /** * Non - public methods */
@ Override protected XMLValidationSchema loadSchema ( InputSource src , Object sysRef ) throws XMLStreamException { } } | /* 26 - Oct - 2007 , TSa : Are sax parser factories safe to share ?
* If not , should just create new instances for each
* parsed schema . */
/* Another thing ; should we use a controller to get notified about
* errors in parsing ? */
SAXParserFactory saxFactory = getSaxFactory ( ) ; MyGrammarController ctrl = new MyGrammarController ( ) ; TREXGrammar grammar = RELAXNGReader . parse ( src , saxFactory , ctrl ) ; if ( grammar == null ) { String msg = "Failed to load RelaxNG schema from '" + sysRef + "'" ; String emsg = ctrl . mErrorMsg ; if ( emsg != null ) { msg = msg + ": " + emsg ; } throw new XMLStreamException ( msg ) ; } return new RelaxNGSchema ( grammar ) ; |
public class TruncatedNormalDistribution { /** * { @ inheritDoc }
* @ since 3.2 */
@ Override public double inverseCumulativeProbability ( final double p ) throws OutOfRangeException { } } | if ( p < 0.0 || p > 1.0 ) { throw new OutOfRangeException ( p , 0 , 1 ) ; } if ( means != null ) throw new IllegalStateException ( "Unable to sample from more than one mean" ) ; return mean + standardDeviation * SQRT2 * Erf . erfInv ( 2 * p - 1 ) ; |
public class ObjectLiteral { /** * Adds an element to the list , and sets its parent to this node .
* @ param element the property node to append to the end of the list
* @ throws IllegalArgumentException } if element is { @ code null } */
public void addElement ( ObjectProperty element ) { } } | assertNotNull ( element ) ; if ( elements == null ) { elements = new ArrayList < ObjectProperty > ( ) ; } elements . add ( element ) ; element . setParent ( this ) ; |
public class AbstractSerializer { /** * * * * Map * * * */
@ SuppressWarnings ( { } } | "rawtypes" , "unchecked" } ) protected ISynchronizationPoint < ? extends Exception > serializeMapValue ( SerializationContext context , Map < ? , ? > map , TypeDefinition typeDef , String path , List < SerializationRule > rules ) { TypeDefinition type = new TypeDefinition ( MapEntry . class , typeDef . getParameters ( ) ) ; type = new TypeDefinition ( ArrayList . class , type ) ; ArrayList < MapEntry > entries = new ArrayList < > ( map . size ( ) ) ; for ( Map . Entry e : map . entrySet ( ) ) { MapEntry me = new MapEntry ( ) ; me . key = e . getKey ( ) ; me . value = e . getValue ( ) ; entries . add ( me ) ; } return serializeValue ( context , entries , type , path , rules ) ; |
public class AggressiveInlineAliases { /** * Adds properties of ` name ` to the worklist if the following conditions hold :
* < ol >
* < li > 1 . The given property of ` name ` either meets condition ( a ) or is unsafely collapsible ( as
* defined by { @ link Name # canCollapse ( ) }
* < li > 2 . ` name ` meets condition ( b )
* < / ol >
* This only adds direct properties of a name , not all its descendants . For example , this adds
* ` a . b ` given ` a ` , but not ` a . b . c ` . */
private void maybeAddPropertiesToWorklist ( Name name , Deque < Name > workList ) { } } | if ( ! ( name . isObjectLiteral ( ) || name . isFunction ( ) || name . isClass ( ) ) ) { // Don ' t add properties for things like ` Foo ` in
// const Foo = someMysteriousFunctionCall ( ) ;
// Since ` Foo ` is not declared as an object , class , or function literal , assume its value
// may be aliased somewhere and its properties do not meet condition ( a ) .
return ; } if ( isUnsafelyReassigned ( name ) ) { // Don ' t add properties if this was assigned multiple times , except for ' safe ' reassignments :
// var ns = ns | | { } ;
// This is equivalent to condition ( b )
return ; } if ( name . props == null ) { return ; } if ( name . getAliasingGets ( ) == 0 ) { // All of { @ code name } ' s children meet condition ( a ) , so they can be
// added to the worklist .
workList . addAll ( name . props ) ; } else { // The children do NOT meet condition ( a ) but we may try to add them anyway .
// This is because CollapseProperties will unsafely collapse properties on constructors and
// enums , so we want to be more aggressive about inlining references to their children .
for ( Name property : name . props ) { // Only add properties that would be unsafely collapsed by CollapseProperties
if ( property . canCollapse ( ) ) { workList . add ( property ) ; } } } |
public class PoolablePreparedStatement { /** * Method execute .
* @ return boolean
* @ throws SQLException
* @ see java . sql . PreparedStatement # execute ( ) */
@ Override public boolean execute ( ) throws SQLException { } } | boolean isOk = false ; try { boolean result = internalStmt . execute ( ) ; isOk = true ; return result ; } finally { poolableConn . updateLastSQLExecutionTime ( isOk ) ; } |
public class SimpleHostConnectionPool { /** * Try to open a new connection asynchronously . We don ' t actually return a
* connection here . Instead , the connection will be added to idle queue when
* it ' s ready . */
private boolean tryOpenAsync ( ) { } } | Connection < CL > connection = null ; // Try to open a new connection , as long as we haven ' t reached the max
if ( activeCount . get ( ) < config . getMaxConnsPerHost ( ) ) { try { if ( activeCount . incrementAndGet ( ) <= config . getMaxConnsPerHost ( ) ) { // Don ' t try to open too many connections at the same time .
if ( pendingConnections . incrementAndGet ( ) > config . getMaxPendingConnectionsPerHost ( ) ) { pendingConnections . decrementAndGet ( ) ; } else { try { connectAttempt . incrementAndGet ( ) ; connection = factory . createConnection ( this ) ; connection . openAsync ( new Connection . AsyncOpenCallback < CL > ( ) { @ Override public void success ( Connection < CL > connection ) { openConnections . incrementAndGet ( ) ; pendingConnections . decrementAndGet ( ) ; availableConnections . add ( connection ) ; // Sanity check in case the connection
// pool was closed
if ( isShutdown ( ) ) { discardIdleConnections ( ) ; } } @ Override public void failure ( Connection < CL > conn , ConnectionException e ) { failedOpenConnections . incrementAndGet ( ) ; pendingConnections . decrementAndGet ( ) ; activeCount . decrementAndGet ( ) ; if ( e instanceof IsDeadConnectionException ) { noteError ( e ) ; } } } ) ; return true ; } catch ( ThrottledException e ) { // Trying to open way too many connections here
} finally { if ( connection == null ) pendingConnections . decrementAndGet ( ) ; } } } } finally { if ( connection == null ) { activeCount . decrementAndGet ( ) ; } } } return false ; |
public class AdGroupServiceLocator { /** * For the given interface , get the stub implementation .
* If this service has no port for the given interface ,
* then ServiceException is thrown . */
public java . rmi . Remote getPort ( Class serviceEndpointInterface ) throws javax . xml . rpc . ServiceException { } } | try { if ( com . google . api . ads . adwords . axis . v201809 . cm . AdGroupServiceInterface . class . isAssignableFrom ( serviceEndpointInterface ) ) { com . google . api . ads . adwords . axis . v201809 . cm . AdGroupServiceSoapBindingStub _stub = new com . google . api . ads . adwords . axis . v201809 . cm . AdGroupServiceSoapBindingStub ( new java . net . URL ( AdGroupServiceInterfacePort_address ) , this ) ; _stub . setPortName ( getAdGroupServiceInterfacePortWSDDServiceName ( ) ) ; return _stub ; } } catch ( java . lang . Throwable t ) { throw new javax . xml . rpc . ServiceException ( t ) ; } throw new javax . xml . rpc . ServiceException ( "There is no stub implementation for the interface: " + ( serviceEndpointInterface == null ? "null" : serviceEndpointInterface . getName ( ) ) ) ; |
public class MemoryConsumer { /** * Allocate a memory block with at least ` required ` bytes .
* @ throws SparkOutOfMemoryError */
protected MemoryBlock allocatePage ( long required ) { } } | MemoryBlock page = taskMemoryManager . allocatePage ( Math . max ( pageSize , required ) , this ) ; if ( page == null || page . size ( ) < required ) { throwOom ( page , required ) ; } used += page . size ( ) ; return page ; |
public class GlobalOperationClient { /** * Retrieves the specified Operations resource . Gets a list of operations by making a list ( )
* request .
* < p > Sample code :
* < pre > < code >
* try ( GlobalOperationClient globalOperationClient = GlobalOperationClient . create ( ) ) {
* ProjectGlobalOperationName operation = ProjectGlobalOperationName . of ( " [ PROJECT ] " , " [ OPERATION ] " ) ;
* Operation response = globalOperationClient . getGlobalOperation ( operation . toString ( ) ) ;
* < / code > < / pre >
* @ param operation Name of the Operations resource to return .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation getGlobalOperation ( String operation ) { } } | GetGlobalOperationHttpRequest request = GetGlobalOperationHttpRequest . newBuilder ( ) . setOperation ( operation ) . build ( ) ; return getGlobalOperation ( request ) ; |
public class VictimsRecord { /** * Processes a given { @ link Metadata } object for for Manifest keys to
* determine vendor , version and name .
* @ param md */
private void setFromMetadata ( Metadata md ) { } } | // TODO : add pom . properties support ?
String vendorkey = Attributes . Name . IMPLEMENTATION_VENDOR . toString ( ) ; String versionkey = Attributes . Name . IMPLEMENTATION_VERSION . toString ( ) ; String namekey = Attributes . Name . IMPLEMENTATION_TITLE . toString ( ) ; if ( this . vendor . equals ( UNKNOWN ) && md . containsKey ( vendorkey ) ) { this . vendor = md . get ( vendorkey ) ; } if ( this . version . equals ( UNKNOWN ) && md . containsKey ( versionkey ) ) { this . version = md . get ( versionkey ) ; } if ( this . name . equals ( UNKNOWN ) && md . containsKey ( namekey ) ) { this . name = md . get ( namekey ) ; } |
public class RuleScene { /** * / * - - - - - [ Antialiasing ] - - - - - */
public void paintChildren ( ) { } } | Graphics2D g = getGraphics ( ) ; Object anti = g . getRenderingHint ( RenderingHints . KEY_ANTIALIASING ) ; Object textAnti = g . getRenderingHint ( RenderingHints . KEY_TEXT_ANTIALIASING ) ; g . setRenderingHint ( RenderingHints . KEY_ANTIALIASING , RenderingHints . VALUE_ANTIALIAS_ON ) ; g . setRenderingHint ( RenderingHints . KEY_TEXT_ANTIALIASING , RenderingHints . VALUE_TEXT_ANTIALIAS_ON ) ; super . paintChildren ( ) ; g . setRenderingHint ( RenderingHints . KEY_ANTIALIASING , anti ) ; g . setRenderingHint ( RenderingHints . KEY_TEXT_ANTIALIASING , textAnti ) ; |
public class rnat { /** * Use this API to update rnat resources . */
public static base_responses update ( nitro_service client , rnat resources [ ] ) throws Exception { } } | base_responses result = null ; if ( resources != null && resources . length > 0 ) { rnat updateresources [ ] = new rnat [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { updateresources [ i ] = new rnat ( ) ; updateresources [ i ] . network = resources [ i ] . network ; updateresources [ i ] . netmask = resources [ i ] . netmask ; updateresources [ i ] . natip = resources [ i ] . natip ; updateresources [ i ] . td = resources [ i ] . td ; updateresources [ i ] . aclname = resources [ i ] . aclname ; updateresources [ i ] . redirectport = resources [ i ] . redirectport ; updateresources [ i ] . natip2 = resources [ i ] . natip2 ; } result = update_bulk_request ( client , updateresources ) ; } return result ; |
public class Mail { /** * set the value bcc Indicates addresses to copy the e - mail message to , without listing them in the
* message header . " bcc " stands for " blind carbon copy . "
* @ param strBcc value to set
* @ throws ApplicationException */
public void setBcc ( Object bcc ) throws ApplicationException { } } | if ( StringUtil . isEmpty ( bcc ) ) return ; try { smtp . addBCC ( bcc ) ; } catch ( Exception e ) { throw new ApplicationException ( "attribute [bcc] of the tag [mail] is invalid" , e . getMessage ( ) ) ; } |
public class TableFactor { /** * Gets a { @ code TableFactor } over { @ code vars } which assigns unit weight to
* { @ code assignment } and 0 to all other assignments . Requires
* { @ code assignment } to contain all of { @ code vars } . The weights in the
* returned factor are represented in logspace .
* @ param vars
* @ param assignment
* @ return */
public static TableFactor logPointDistribution ( VariableNumMap vars , Assignment assignment ) { } } | DenseTensorBuilder builder = new DenseTensorBuilder ( vars . getVariableNumsArray ( ) , vars . getVariableSizes ( ) , Double . NEGATIVE_INFINITY ) ; builder . put ( vars . assignmentToIntArray ( assignment ) , 0.0 ) ; return new TableFactor ( vars , new LogSpaceTensorAdapter ( builder . build ( ) ) ) ; |
public class CommercePriceListAccountRelLocalServiceBaseImpl { /** * Deletes the commerce price list account rel from the database . Also notifies the appropriate model listeners .
* @ param commercePriceListAccountRel the commerce price list account rel
* @ return the commerce price list account rel that was removed
* @ throws PortalException */
@ Indexable ( type = IndexableType . DELETE ) @ Override public CommercePriceListAccountRel deleteCommercePriceListAccountRel ( CommercePriceListAccountRel commercePriceListAccountRel ) throws PortalException { } } | return commercePriceListAccountRelPersistence . remove ( commercePriceListAccountRel ) ; |
public class ZMsg { /** * Create a new ZMsg from one or more Strings
* @ param strings
* Strings to add as frames .
* @ return
* ZMsg object */
public static ZMsg newStringMsg ( String ... strings ) { } } | ZMsg msg = new ZMsg ( ) ; for ( String data : strings ) { msg . addString ( data ) ; } return msg ; |
public class ConferenceProviderMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ConferenceProvider conferenceProvider , ProtocolMarshaller protocolMarshaller ) { } } | if ( conferenceProvider == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( conferenceProvider . getArn ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( conferenceProvider . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( conferenceProvider . getType ( ) , TYPE_BINDING ) ; protocolMarshaller . marshall ( conferenceProvider . getIPDialIn ( ) , IPDIALIN_BINDING ) ; protocolMarshaller . marshall ( conferenceProvider . getPSTNDialIn ( ) , PSTNDIALIN_BINDING ) ; protocolMarshaller . marshall ( conferenceProvider . getMeetingSetting ( ) , MEETINGSETTING_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ExecutorBuilder { /** * 构建ThreadPoolExecutor
* @ param builder { @ link ExecutorBuilder }
* @ return { @ link ThreadPoolExecutor } */
private static ThreadPoolExecutor build ( ExecutorBuilder builder ) { } } | final int corePoolSize = builder . corePoolSize ; final int maxPoolSize = builder . maxPoolSize ; final long keepAliveTime = builder . keepAliveTime ; final BlockingQueue < Runnable > workQueue ; if ( null != builder . workQueue ) { workQueue = builder . workQueue ; } else { // corePoolSize为0则要使用SynchronousQueue避免无限阻塞
workQueue = ( corePoolSize <= 0 ) ? new SynchronousQueue < Runnable > ( ) : new LinkedBlockingQueue < Runnable > ( ) ; } final ThreadFactory threadFactory = ( null != builder . threadFactory ) ? builder . threadFactory : Executors . defaultThreadFactory ( ) ; RejectedExecutionHandler handler = ObjectUtil . defaultIfNull ( builder . handler , new ThreadPoolExecutor . AbortPolicy ( ) ) ; final ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor ( corePoolSize , maxPoolSize , keepAliveTime , TimeUnit . NANOSECONDS , workQueue , threadFactory , handler ) ; if ( null != builder . allowCoreThreadTimeOut ) { threadPoolExecutor . allowCoreThreadTimeOut ( builder . allowCoreThreadTimeOut ) ; } return threadPoolExecutor ; |
public class StringGroovyMethods { /** * Convenience method to capitalize the first letter of a CharSequence
* ( typically the first letter of a word ) . Example usage :
* < pre class = " groovyTestCase " >
* assert ' h ' . capitalize ( ) = = ' H '
* assert ' hello ' . capitalize ( ) = = ' Hello '
* assert ' hello world ' . capitalize ( ) = = ' Hello world '
* assert ' Hello World ' = =
* ' hello world ' . split ( ' ' ) . collect { it . capitalize ( ) } . join ( ' ' )
* < / pre >
* @ param self The CharSequence to capitalize
* @ return A String containing the capitalized toString ( ) of the CharSequence
* @ see # capitalize ( String )
* @ since 1.8.2 */
public static String capitalize ( CharSequence self ) { } } | String s = self . toString ( ) ; if ( s == null || s . length ( ) == 0 ) return s ; return Character . toUpperCase ( s . charAt ( 0 ) ) + s . substring ( 1 ) ; |
public class AbstractProxySessionManager { /** * Invalidates the given session .
* No more heartbeats will be sent for the given session . */
public final void invalidateSession ( RaftGroupId groupId , long id ) { } } | SessionState session = sessions . get ( groupId ) ; if ( session != null && session . id == id ) { sessions . remove ( groupId , session ) ; } |
public class ResultStreamer { /** * Return a stream over the given results . If the results are not inline , the
* results will come from S3
* @ param resultValue result
* @ return stream
* @ throws Exception errors */
public Reader getResults ( ResultValue resultValue ) throws Exception { } } | if ( resultValue . isInline ( ) ) { return new StringReader ( resultValue . getResults ( ) ) ; } return readFromS3 ( resultValue . getResult_location ( ) ) ; |
public class SameJSONAs { /** * Creates a matcher that compares { @ code JSONArray } s .
* @ param expected the expected { @ code JSONArray } instance
* @ return the { @ code Matcher } instance */
@ Factory public static SameJSONAs < JSONArray > sameJSONArrayAs ( JSONArray expected ) { } } | return new SameJSONAs < JSONArray > ( expected , modalComparatorFor ( jsonArrayComparison ( ) ) ) ; |
public class ResolvingXMLConfiguration { /** * Resolve the IP address from the special " resolver " network interface
* format .
* @ param niSpec the special " resolver " network interface format
* @ param name the name of the resolveIP item
* @ return either UNKNOWN , the original string or the resolved IP address */
private String ipAddressFromNI ( String niSpec , String name ) { } } | String result = "UNKNOWN" ; NetworkInterface ni = null ; String [ ] parts = niSpec . split ( ":" ) ; String niName = "eth0" ; // default NIC name
Scheme scheme = Scheme . ipv4 ; int index = 0 ; // default index
Scope scope = Scope . global ; // can be global , linklocal or sitelocal - is
// global by default
// Parse up the spec
for ( int idx = 0 ; idx < parts . length ; idx ++ ) { switch ( idx ) { case 0 : niName = parts [ idx ] ; break ; case 1 : String _schemeStr = parts [ idx ] . toLowerCase ( ) ; try { scheme = Scheme . valueOf ( _schemeStr ) ; } catch ( Exception e ) { warn ( "Error parsing scheme for resolveIP named [" + name + "]. Expecting ipv4 or ipv6 but got [" + _schemeStr + "]. Using default of ipv4." ) ; scheme = Scheme . ipv4 ; // default
} break ; case 2 : String scopeTarget = parts [ idx ] . toLowerCase ( ) ; try { scope = Scope . valueOf ( scopeTarget ) ; } catch ( Exception e ) { warn ( "Error parsing scope for resolveIP named [" + name + "]. Expecting global, sitelocal or linklocal but got [" + scopeTarget + "]. Using default of global." ) ; scope = Scope . global ; // default
} break ; case 3 : try { index = Integer . parseInt ( parts [ idx ] ) ; } catch ( NumberFormatException e ) { index = 0 ; // default
} break ; default : break ; } } // Find the specified NIC
try { // if the niName is localhost , get the IP address associated with
// localhost
if ( niName . equalsIgnoreCase ( "localhost" ) ) { if ( scope != Scope . sitelocal ) { warn ( "resolveIP named [" + name + "] has ni of localhost and will default to scope of sitelocal (or it won't work). Expects sitelocal but got [" + scope + "]." ) ; scope = Scope . sitelocal ; // force scope to site local
} try { InetAddress addr = InetAddress . getLocalHost ( ) ; ni = NetworkInterface . getByInetAddress ( addr ) ; } catch ( UnknownHostException e ) { // This should not happen
warn ( "The lookup of the NI for localhost for resolveIP named [" + name + "] caused an exception. Look for odd entries in /etc/hosts." ) ; return "UNKNOWN NI" ; } } else { ni = NetworkInterface . getByName ( niName ) ; } } catch ( SocketException e ) { error ( "An error occured looking up the interface named [" + niName + "] for resolveIP named [" + name + "]" , e ) ; return "UNKNOWN NI" ; } // if we have a network interface , then get the right ip
List < InetAddress > ipv4Addrs = new ArrayList < InetAddress > ( ) ; List < InetAddress > ipv6Addrs = new ArrayList < InetAddress > ( ) ; if ( ni != null ) { // group the two types of addresses
Enumeration < InetAddress > addrList = ni . getInetAddresses ( ) ; do { InetAddress addr = addrList . nextElement ( ) ; // filter out only the type specified ( linklocal , sitelocal or global )
switch ( scope ) { case linklocal : if ( addr . isLinkLocalAddress ( ) ) { if ( addr instanceof Inet4Address ) ipv4Addrs . add ( ( Inet4Address ) addr ) ; if ( addr instanceof Inet6Address ) ipv6Addrs . add ( ( Inet6Address ) addr ) ; } break ; case sitelocal : if ( addr . isSiteLocalAddress ( ) ) { if ( addr instanceof Inet4Address ) ipv4Addrs . add ( ( Inet4Address ) addr ) ; if ( addr instanceof Inet6Address ) ipv6Addrs . add ( ( Inet6Address ) addr ) ; } break ; case global : if ( ! addr . isSiteLocalAddress ( ) && ! addr . isLinkLocalAddress ( ) ) { if ( addr instanceof Inet4Address ) ipv4Addrs . add ( ( Inet4Address ) addr ) ; if ( addr instanceof Inet6Address ) ipv6Addrs . add ( ( Inet6Address ) addr ) ; } break ; default : break ; } } while ( addrList . hasMoreElements ( ) ) ; } List < InetAddress > targetAddrs = null ; switch ( scheme ) { case ipv4 : targetAddrs = ipv4Addrs ; break ; case ipv6 : targetAddrs = ipv6Addrs ; break ; default : break ; } // Get a candidate addr from the list
InetAddress candidateAddr = null ; if ( ! targetAddrs . isEmpty ( ) ) { if ( index < targetAddrs . size ( ) ) { candidateAddr = targetAddrs . get ( index ) ; result = candidateAddr . getHostAddress ( ) ; } else { error ( "Error getting index [" + index + "] addrees for resolveIP named [" + name + "]. Index is out of bounds." ) ; return "INDEX OUT OF BOUNDS" ; } } else { error ( "Empty list of addresses for resolveIP named [" + name + "]" ) ; return "EMPTY LIST" ; } return result ; |
public class DataCollectorList { /** * Adds a DataCollector to the given list .
* @ param collectorName The collector that should be added */
protected final void addDataCollector ( final Class collectorName ) { } } | if ( ! DataCollector . class . isAssignableFrom ( collectorName ) ) { throw new RuntimeException ( "Class must be subclass of DataCollector!" ) ; } collectors . add ( collectorName ) ; |
public class XMLUtil { /** * Replies the float value that corresponds to the specified attribute ' s path .
* < p > The path is an ordered list of tag ' s names and ended by the name of
* the attribute .
* Be careful about the fact that the names are case sensitives .
* @ param document is the XML document to explore .
* @ param path is the list of and ended by the attribute ' s name .
* @ return the float value of the specified attribute or < code > 0 < / code > . */
@ Pure public static float getAttributeFloat ( Node document , String ... path ) { } } | assert document != null : AssertMessages . notNullParameter ( 0 ) ; return getAttributeFloatWithDefault ( document , true , 0f , path ) ; |
public class ServerRedirectService { /** * Delete a server group by id
* @ param id server group ID */
public void deleteServerGroup ( int id ) { } } | try { sqlService . executeUpdate ( "DELETE FROM " + Constants . DB_TABLE_SERVER_GROUPS + " WHERE " + Constants . GENERIC_ID + " = " + id + ";" ) ; sqlService . executeUpdate ( "DELETE FROM " + Constants . DB_TABLE_SERVERS + " WHERE " + Constants . SERVER_REDIRECT_GROUP_ID + " = " + id ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } |
public class PodApp { /** * @ Override
* public void preConfigInit ( ) throws Exception */
public void postClassLoaderInit ( ) { } } | // bartender . setLocalShard ( pod . getNode ( podNode ) ) ;
ServiceManagerBuilderAmp builder = ServicesAmp . newManager ( ) ; builder . name ( EnvLoader . getEnvironmentName ( ) ) ; BartenderSystem bartender = BartenderSystem . current ( ) ; if ( bartender != null ) { PodBartender pod = bartender . findActivePod ( getPodName ( ) ) ; Objects . requireNonNull ( pod ) ; int podNode = _builder . getPodNode ( ) ; bartender . setLocalPod ( pod ) ; // builder . setPodNode ( new ServiceNodeImpl ( pod . getNode ( podNode ) ) ) ;
} /* / / builder . setJournalMaxCount ( _ builder . getJournalMaxCount ( ) ) ;
builder . setJournalDelay ( _ builder . getJournalDelay ( ) ) ;
/ / XXX : config timing issue
builder . debug ( _ builder . isDebug ( ) ) ;
builder . debugQueryTimeout ( _ builder . getDebugQueryTimeout ( ) ) ;
builder . autoStart ( false ) ; */
_ampManager = builder . start ( ) ; /* if ( _ controller . getConfigException ( ) ! = null ) {
setAmpConfigException ( _ controller . getConfigException ( ) ) ;
else if ( _ configException ! = null ) {
setAmpConfigException ( _ configException ) ; */
Amp . contextManager ( _ampManager ) ; /* _ ampManager . run ( ( ) - > {
EmbedBuilder embedBuilder = new EmbedBuilder ( ) ;
embedBuilder . scanClassLoader ( ) ;
embedBuilder . build ( ) ; */ |
public class OrchestrationMasterSlaveRule { /** * Get slave data source names .
* @ return available slave data source names */
@ Override public Collection < String > getSlaveDataSourceNames ( ) { } } | if ( disabledDataSourceNames . isEmpty ( ) ) { return super . getSlaveDataSourceNames ( ) ; } Collection < String > result = new LinkedList < > ( super . getSlaveDataSourceNames ( ) ) ; result . removeAll ( disabledDataSourceNames ) ; return result ; |
public class TokenIndex { /** * TODO : we do not have to store witnesses ! */
public void prepare ( ) { } } | this . token_array = this . prepareTokenArray ( ) ; SuffixData suffixData = SuffixArrays . createWithLCP ( token_array , new SAIS ( ) , comparator ) ; this . suffix_array = suffixData . getSuffixArray ( ) ; this . LCP_array = suffixData . getLCP ( ) ; this . blocks = splitLCP_ArrayIntoIntervals ( ) ; constructWitnessToBlockInstancesMap ( ) ; |
public class Asm { /** * Create pointer operand with not specified size . */
public static final Mem ptr ( Register base , Register index , int shift , long disp ) { } } | return _ptr_build ( base , index , shift , disp , 0 ) ; |
public class ListRulesPackagesRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListRulesPackagesRequest listRulesPackagesRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( listRulesPackagesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listRulesPackagesRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listRulesPackagesRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class BatchAPIRequest { /** * Parses btch api response to create a list of BoxAPIResponse objects .
* @ param batchResponse response of a batch api request
* @ return list of BoxAPIResponses */
protected List < BoxAPIResponse > parseResponse ( BoxJSONResponse batchResponse ) { } } | JsonObject responseJSON = JsonObject . readFrom ( batchResponse . getJSON ( ) ) ; List < BoxAPIResponse > responses = new ArrayList < BoxAPIResponse > ( ) ; Iterator < JsonValue > responseIterator = responseJSON . get ( "responses" ) . asArray ( ) . iterator ( ) ; while ( responseIterator . hasNext ( ) ) { JsonObject jsonResponse = responseIterator . next ( ) . asObject ( ) ; BoxAPIResponse response = null ; // Gather headers
Map < String , String > responseHeaders = new HashMap < String , String > ( ) ; if ( jsonResponse . get ( "headers" ) != null ) { JsonObject batchResponseHeadersObject = jsonResponse . get ( "headers" ) . asObject ( ) ; for ( JsonObject . Member member : batchResponseHeadersObject ) { String headerName = member . getName ( ) ; String headerValue = member . getValue ( ) . asString ( ) ; responseHeaders . put ( headerName , headerValue ) ; } } // Construct a BoxAPIResponse when response is null , or a BoxJSONResponse when there ' s a response
// ( not anticipating any other response as per current APIs .
// Ideally we should do it based on response header )
if ( jsonResponse . get ( "response" ) == null || jsonResponse . get ( "response" ) . isNull ( ) ) { response = new BoxAPIResponse ( jsonResponse . get ( "status" ) . asInt ( ) , responseHeaders ) ; } else { response = new BoxJSONResponse ( jsonResponse . get ( "status" ) . asInt ( ) , responseHeaders , jsonResponse . get ( "response" ) . asObject ( ) ) ; } responses . add ( response ) ; } return responses ; |
public class ClassRefTypeSignature { /** * Parse a class type signature .
* @ param parser
* The parser .
* @ param definingClassName
* The name of the defining class ( for resolving type variables ) .
* @ return The class type signature .
* @ throws ParseException
* If the type signature could not be parsed . */
static ClassRefTypeSignature parse ( final Parser parser , final String definingClassName ) throws ParseException { } } | if ( parser . peek ( ) == 'L' ) { parser . next ( ) ; if ( ! TypeUtils . getIdentifierToken ( parser , /* separator = */
'/' , /* separatorReplace = */
'.' ) ) { throw new ParseException ( parser , "Could not parse identifier token" ) ; } final String className = parser . currToken ( ) ; final List < TypeArgument > typeArguments = TypeArgument . parseList ( parser , definingClassName ) ; List < String > suffixes ; List < List < TypeArgument > > suffixTypeArguments ; if ( parser . peek ( ) == '.' ) { suffixes = new ArrayList < > ( ) ; suffixTypeArguments = new ArrayList < > ( ) ; while ( parser . peek ( ) == '.' ) { parser . expect ( '.' ) ; if ( ! TypeUtils . getIdentifierToken ( parser , /* separator = */
'/' , /* separatorReplace = */
'.' ) ) { throw new ParseException ( parser , "Could not parse identifier token" ) ; } suffixes . add ( parser . currToken ( ) ) ; suffixTypeArguments . add ( TypeArgument . parseList ( parser , definingClassName ) ) ; } } else { suffixes = Collections . emptyList ( ) ; suffixTypeArguments = Collections . emptyList ( ) ; } parser . expect ( ';' ) ; return new ClassRefTypeSignature ( className , typeArguments , suffixes , suffixTypeArguments ) ; } else { return null ; } |
public class KNN { /** * 分类 , 返回标签 , 格式可自定义
* @ param instance
* @ return */
public TPredict classify ( Instance instance , int n ) { } } | LinkedPredict < String > pred = new LinkedPredict < String > ( k ) ; for ( int i = 0 ; i < prototypes . size ( ) ; i ++ ) { Instance curInst = prototypes . get ( i ) ; // if ( ( ( String ) curInst . getSource ( ) ) . contains ( " 听 # per # 的歌 " ) )
// System . out . println ( " " ) ;
float score ; try { score = sim . calc ( instance . getData ( ) , curInst . getData ( ) ) ; } catch ( Exception e ) { // TODO Auto - generated catch block
e . printStackTrace ( ) ; return null ; } pred . add ( ( String ) curInst . getTarget ( ) , score , ( String ) curInst . getSource ( ) ) ; } // 排序
LinkedPredict < String > newpred = pred . mergeDuplicate ( useScore ) ; newpred . assertSize ( n ) ; return newpred ; |
public class RichDiagnosticFormatter { /** * Get the DiagnosticFormatter instance for this context . */
public static RichDiagnosticFormatter instance ( Context context ) { } } | RichDiagnosticFormatter instance = context . get ( RichDiagnosticFormatter . class ) ; if ( instance == null ) instance = new RichDiagnosticFormatter ( context ) ; return instance ; |
public class TextReport { /** * Flush output streams . */
private void flushOutput ( ) throws IOException { } } | outStream . flush ( ) ; outWriter . completeLine ( ) ; errStream . flush ( ) ; errWriter . completeLine ( ) ; |
public class CommercePriceEntryPersistenceImpl { /** * Returns a range of all the commerce price entries where companyId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommercePriceEntryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param companyId the company ID
* @ param start the lower bound of the range of commerce price entries
* @ param end the upper bound of the range of commerce price entries ( not inclusive )
* @ return the range of matching commerce price entries */
@ Override public List < CommercePriceEntry > findByCompanyId ( long companyId , int start , int end ) { } } | return findByCompanyId ( companyId , start , end , null ) ; |
public class State { /** * Splits state at new line character ( ' \ n ' ) , sorts the resulting array and
* returns joined array . Array is joined using ' \ n ' as a delimiter between its
* members .
* @ return */
public static String sortAndGetState ( ) { } } | if ( _state . length ( ) == 0 ) { return _state ; } String [ ] states = _state . split ( "\\n" ) ; List < String > list = new ArrayList ( ) ; for ( int counter = 0 ; counter < states . length ; ++ counter ) { list . add ( states [ counter ] ) ; } Collections . sort ( list ) ; Iterator < String > iterator = list . iterator ( ) ; String value = "" ; while ( iterator . hasNext ( ) ) { value += iterator . next ( ) + "\n" ; } _state = "" ; return value ; |
public class Utils { /** * Copies the content from in into outputFile .
* < i > in < / i > is not closed by this method . < br >
* It must be explicitly closed after this method is called .
* @ param in an input stream ( not null )
* @ param outputFile will be created if it does not exist
* @ throws IOException if the file could not be created */
public static void copyStream ( InputStream in , File outputFile ) throws IOException { } } | OutputStream os = new FileOutputStream ( outputFile ) ; try { copyStreamUnsafelyUseWithCaution ( in , os ) ; } finally { os . close ( ) ; } |
public class ASMifier { @ Override public void visit ( final String name , final Object value ) { } } | buf . setLength ( 0 ) ; buf . append ( "av" ) . append ( id ) . append ( ".visit(" ) ; appendConstant ( buf , name ) ; buf . append ( ", " ) ; appendConstant ( buf , value ) ; buf . append ( ");\n" ) ; text . add ( buf . toString ( ) ) ; |
public class DetectPolygonFromContour { /** * Specifies the image ' s intrinsic parameters and target size
* @ param width Width of the input image
* @ param height Height of the input image */
private void configure ( int width , int height ) { } } | this . imageWidth = width ; this . imageHeight = height ; // adjust size based parameters based on image size
this . minimumContour = minimumContourConfig . computeI ( Math . min ( width , height ) ) ; this . minimumContour = Math . max ( 4 , minimumContour ) ; // This is needed to avoid processing zero or other impossible
this . minimumArea = Math . pow ( this . minimumContour / 4.0 , 2 ) ; contourFinder . setMinContour ( minimumContour ) ; if ( helper != null ) helper . setImageShape ( width , height ) ; |
public class LocalProperties { /** * Filters these LocalProperties by the fields that are forwarded to the output
* as described by the SemanticProperties .
* @ param props The semantic properties holding information about forwarded fields .
* @ param input The index of the input .
* @ return The filtered LocalProperties */
public LocalProperties filterBySemanticProperties ( SemanticProperties props , int input ) { } } | if ( props == null ) { throw new NullPointerException ( "SemanticProperties may not be null." ) ; } LocalProperties returnProps = new LocalProperties ( ) ; // check if sorting is preserved
if ( this . ordering != null ) { Ordering newOrdering = new Ordering ( ) ; for ( int i = 0 ; i < this . ordering . getInvolvedIndexes ( ) . size ( ) ; i ++ ) { int sourceField = this . ordering . getInvolvedIndexes ( ) . get ( i ) ; FieldSet targetField = props . getForwardingTargetFields ( input , sourceField ) ; if ( targetField == null || targetField . size ( ) == 0 ) { if ( i == 0 ) { // order fully destroyed
newOrdering = null ; break ; } else { // order partially preserved
break ; } } else { // use any field of target fields for now . We should use something like field equivalence sets in the future .
if ( targetField . size ( ) > 1 ) { LOG . warn ( "Found that a field is forwarded to more than one target field in " + "semantic forwarded field information. Will only use the field with the lowest index." ) ; } newOrdering . appendOrdering ( targetField . toArray ( ) [ 0 ] , this . ordering . getType ( i ) , this . ordering . getOrder ( i ) ) ; } } returnProps . ordering = newOrdering ; if ( newOrdering != null ) { returnProps . groupedFields = newOrdering . getInvolvedIndexes ( ) ; } else { returnProps . groupedFields = null ; } } // check if grouping is preserved
else if ( this . groupedFields != null ) { FieldList newGroupedFields = new FieldList ( ) ; for ( Integer sourceField : this . groupedFields ) { FieldSet targetField = props . getForwardingTargetFields ( input , sourceField ) ; if ( targetField == null || targetField . size ( ) == 0 ) { newGroupedFields = null ; break ; } else { // use any field of target fields for now . We should use something like field equivalence sets in the future .
if ( targetField . size ( ) > 1 ) { LOG . warn ( "Found that a field is forwarded to more than one target field in " + "semantic forwarded field information. Will only use the field with the lowest index." ) ; } newGroupedFields = newGroupedFields . addField ( targetField . toArray ( ) [ 0 ] ) ; } } returnProps . groupedFields = newGroupedFields ; } if ( this . uniqueFields != null ) { Set < FieldSet > newUniqueFields = new HashSet < FieldSet > ( ) ; for ( FieldSet fields : this . uniqueFields ) { FieldSet newFields = new FieldSet ( ) ; for ( Integer sourceField : fields ) { FieldSet targetField = props . getForwardingTargetFields ( input , sourceField ) ; if ( targetField == null || targetField . size ( ) == 0 ) { newFields = null ; break ; } else { // use any field of target fields for now . We should use something like field equivalence sets in the future .
if ( targetField . size ( ) > 1 ) { LOG . warn ( "Found that a field is forwarded to more than one target field in " + "semantic forwarded field information. Will only use the field with the lowest index." ) ; } newFields = newFields . addField ( targetField . toArray ( ) [ 0 ] ) ; } } if ( newFields != null ) { newUniqueFields . add ( newFields ) ; } } if ( ! newUniqueFields . isEmpty ( ) ) { returnProps . uniqueFields = newUniqueFields ; } else { returnProps . uniqueFields = null ; } } return returnProps ; |
public class DispatcherBase { /** * Obtains the value of an integer configuration parameter given its name , the default value
* and ' reasonable ' minimum and maximum values .
* @ param msi The Message Store instance to obtain the parameters ( may be null )
* @ param parameterName The parameter ' s name
* @ param defaultValue The default value
* @ param minValue A reasonable minimum value
* @ param maxValue A reasonable maximum value */
protected static int obtainIntConfigParameter ( MessageStoreImpl msi , String parameterName , String defaultValue , int minValue , int maxValue ) { } } | int value = Integer . parseInt ( defaultValue ) ; if ( msi != null ) { String strValue = msi . getProperty ( parameterName , defaultValue ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , parameterName + "=" + strValue ) ; } ; // end if
try { value = Integer . parseInt ( strValue ) ; if ( ( value < minValue ) || ( value > maxValue ) ) { value = Integer . parseInt ( defaultValue ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , "OVERRIDE: " + parameterName + "=" + strValue ) ; } ; // end if
} ; // end if
} catch ( NumberFormatException nfexc ) { // No FFDC Code Needed .
} } ; // end if
return value ; |
public class TransactionQueue { /** * Returns a snapshot of the statistics on this TransactionQueue . */
public synchronized TransactionQueueData getStatistics ( ) { } } | return new TransactionQueueData ( this , mTimeLapseStart , System . currentTimeMillis ( ) , mQueue . size ( ) , mThreadCount , mServicingCount , mPeakQueueSize , mPeakThreadCount , mPeakServicingCount , mTotalEnqueueAttempts , mTotalEnqueued , mTotalServiced , mTotalExpired , mTotalServiceExceptions , mTotalUncaughtExceptions , mTotalQueueDuration , mTotalServiceDuration ) ; |
public class ScenarioImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setVendor ( String newVendor ) { } } | String oldVendor = vendor ; vendor = newVendor ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , BpsimPackage . SCENARIO__VENDOR , oldVendor , vendor ) ) ; |
public class RebootManager { /** * Schedules our next regularly scheduled reboot .
* @ return true if a reboot was scheduled , false if regularly scheduled reboots are disabled . */
public boolean scheduleRegularReboot ( ) { } } | // maybe schedule an automatic reboot based on our configuration
int freq = getDayFrequency ( ) ; int hour = getRebootHour ( ) ; if ( freq <= 0 ) { return false ; } Calendars . Builder cal = Calendars . now ( ) ; int curHour = cal . get ( Calendar . HOUR_OF_DAY ) ; cal = cal . zeroTime ( ) . addHours ( hour ) . addDays ( ( curHour < hour ) ? ( freq - 1 ) : freq ) ; // maybe avoid weekends
if ( getSkipWeekends ( ) ) { switch ( cal . get ( Calendar . DAY_OF_WEEK ) ) { case Calendar . SATURDAY : cal . addDays ( 2 ) ; break ; case Calendar . SUNDAY : cal . addDays ( 1 ) ; break ; } } scheduleReboot ( cal . toTime ( ) , true , AUTOMATIC_INITIATOR ) ; // schedule exactly
return true ; |
public class TargetFilterQuerySpecification { /** * { @ link Specification } for retrieving { @ link JpaTargetFilterQuery } s based
* on is { @ link JpaTargetFilterQuery # getName ( ) } .
* @ param distributionSet
* of the filter
* @ return the { @ link JpaTargetFilterQuery } { @ link Specification } */
public static Specification < JpaTargetFilterQuery > byAutoAssignDS ( final DistributionSet distributionSet ) { } } | return ( targetFilterQueryRoot , query , cb ) -> cb . equal ( targetFilterQueryRoot . get ( JpaTargetFilterQuery_ . autoAssignDistributionSet ) , distributionSet ) ; |
public class Tile { /** * A convenient method to set the color of foreground elements like
* title , description , unit , value , tickLabel and tickMark to the given
* Color .
* @ param COLOR */
public void setForegroundBaseColor ( final Color COLOR ) { } } | if ( null == titleColor ) { _titleColor = COLOR ; } else { titleColor . set ( COLOR ) ; } if ( null == descriptionColor ) { _descriptionColor = COLOR ; } else { descriptionColor . set ( COLOR ) ; } if ( null == unitColor ) { _unitColor = COLOR ; } else { unitColor . set ( COLOR ) ; } if ( null == valueColor ) { _valueColor = COLOR ; } else { valueColor . set ( COLOR ) ; } if ( null == textColor ) { _textColor = COLOR ; } else { textColor . set ( COLOR ) ; } if ( null == foregroundColor ) { _foregroundColor = COLOR ; } else { foregroundColor . set ( COLOR ) ; } fireTileEvent ( REDRAW_EVENT ) ; |
public class RomanticCron4jNativeTaskExecutor { protected void setupLinkedContextIfNeeds ( ) { } } | readyContextFieldIfNeeds ( ) ; if ( linkedContext == null ) { synchronized ( attributeLinkLock ) { if ( linkedContext == null ) { linkedContext = getFieldValue ( contextField ) ; } } } |
public class MethodFinder { /** * Computes whether a type is equivalent to a GenericArrayType .
* This method will check that { @ code typeToMatch } is either a { @ link GenericArrayType } or an array and then recursively compare the component types of both arguments using
* { @ link # typesEquivalent ( Type , Type , ResolutionContext ) } .
* @ param typeToMatch the type to match against
* @ param type the type to check , type variable resolution will be performed for this type
* @ param ctx the resolution context to use to perform type variable resolution
* @ return { @ code true } if { @ code type } is equivalent to { @ code typeToMatch } after type resolution , otherwise { @ code false } */
private static boolean typesEquivalent ( Type typeToMatch , GenericArrayType type , ResolutionContext ctx ) { } } | if ( typeToMatch instanceof GenericArrayType ) { GenericArrayType aGat = ( GenericArrayType ) typeToMatch ; return typesEquivalent ( aGat . getGenericComponentType ( ) , ctx . resolve ( type . getGenericComponentType ( ) ) , ctx ) ; } if ( typeToMatch instanceof Class ) { Class < ? > aClazz = ( Class < ? > ) typeToMatch ; if ( aClazz . isArray ( ) ) { return typesEquivalent ( aClazz . getComponentType ( ) , ctx . resolve ( type . getGenericComponentType ( ) ) , ctx ) ; } } return false ; |
public class JK { /** * Prints the .
* @ param name the name
* @ param list the list */
public static void print ( String name , List < ? > list ) { } } | JK . line ( ) ; print ( String . format ( "List (%s):" , name ) ) ; int index = 1 ; for ( Object object : list ) { JK . print ( String . format ( "\t%d) %s" , index ++ , object ) ) ; } JK . line ( ) ; |
public class ArrayOfDoublesSketchBuildAggregator { /** * This method uses synchronization because it can be used during indexing ,
* and Druid can call aggregate ( ) and get ( ) concurrently
* https : / / github . com / apache / incubator - druid / pull / 3956 */
@ Override public void aggregate ( ) { } } | final IndexedInts keys = keySelector . getRow ( ) ; for ( int i = 0 ; i < valueSelectors . length ; i ++ ) { values [ i ] = valueSelectors [ i ] . getDouble ( ) ; } synchronized ( this ) { for ( int i = 0 , keysSize = keys . size ( ) ; i < keysSize ; i ++ ) { final String key = keySelector . lookupName ( keys . get ( i ) ) ; sketch . update ( key , values ) ; } } |
public class Collecting { /** * Returns a { @ code Collector } that accumulates in reverse order the items into a new { @ code Deque } .
* @ param < T > the type of the input elements
* @ return the resulting { @ code Collector } */
public static < T > Collector < T , ? , Deque < T > > reverse ( ) { } } | return Collector . of ( LinkedList :: new , Deque :: addFirst , ( left , right ) -> { right . addAll ( left ) ; return right ; } ) ; |
public class DonutChartTileSkin { /** * * * * * * Methods * * * * * */
@ Override protected void handleEvents ( final String EVENT_TYPE ) { } } | super . handleEvents ( EVENT_TYPE ) ; if ( "VISIBILITY" . equals ( EVENT_TYPE ) ) { Helper . enableNode ( titleText , ! tile . getTitle ( ) . isEmpty ( ) ) ; Helper . enableNode ( text , tile . isTextVisible ( ) ) ; double chartCanvasWidth = width - size * 0.1 ; double chartCanvasHeight = tile . isTextVisible ( ) ? height - size * 0.28 : height - size * 0.205 ; double chartCanvasSize = chartCanvasWidth < chartCanvasHeight ? chartCanvasWidth : chartCanvasHeight ; double legendCanvasWidth = width * 0.225 ; double legendCanvasHeight = chartCanvasSize ; chartCanvas . setWidth ( chartCanvasSize ) ; chartCanvas . setHeight ( chartCanvasSize ) ; legendCanvas . setWidth ( legendCanvasWidth ) ; legendCanvas . setHeight ( legendCanvasHeight ) ; } |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcSectionalAreaIntegralMeasure ( ) { } } | if ( ifcSectionalAreaIntegralMeasureEClass == null ) { ifcSectionalAreaIntegralMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 735 ) ; } return ifcSectionalAreaIntegralMeasureEClass ; |
public class DigestAuthenticator { /** * Generate a unique token . The token is generated according to the
* following pattern . NOnceToken = Base64 ( MD5 ( client - IP " : "
* time - stamp " : " private - key ) ) .
* @ param request HTTP Servlet request */
protected String generateNOnce ( MobicentsSipServletRequest request ) { } } | long currentTime = System . currentTimeMillis ( ) ; String nOnceValue = request . getRemoteAddr ( ) + ":" + currentTime + ":" + key ; byte [ ] buffer = null ; synchronized ( md5Helper ) { buffer = md5Helper . digest ( nOnceValue . getBytes ( ) ) ; } nOnceValue = MD5_ENCODER . encode ( buffer ) ; return nOnceValue ; |
public class CmsStaticExportManager { /** * Substitutes the $ { CONTEXT _ NAME } and $ { SERVLET _ NAME } in a path with the real values . < p >
* @ param path the path to substitute
* @ return path with real context values */
protected String insertContextStrings ( String path ) { } } | // create a new macro resolver
CmsMacroResolver resolver = CmsMacroResolver . newInstance ( ) ; // add special mappings for macros
resolver . addMacro ( "CONTEXT_NAME" , OpenCms . getSystemInfo ( ) . getContextPath ( ) ) ; resolver . addMacro ( "SERVLET_NAME" , OpenCms . getSystemInfo ( ) . getServletPath ( ) ) ; // resolve the macros
return resolver . resolveMacros ( path ) ; |
public class ArabicShaping { /** * Name : calculateSize
* Function : This function calculates the destSize to be used in preflighting
* when the destSize is equal to 0 */
private int calculateSize ( char [ ] source , int sourceStart , int sourceLength ) { } } | int destSize = sourceLength ; switch ( options & LETTERS_MASK ) { case LETTERS_SHAPE : case LETTERS_SHAPE_TASHKEEL_ISOLATED : if ( isLogical ) { for ( int i = sourceStart , e = sourceStart + sourceLength - 1 ; i < e ; ++ i ) { if ( ( source [ i ] == LAM_CHAR && isAlefChar ( source [ i + 1 ] ) ) || isTashkeelCharFE ( source [ i ] ) ) { -- destSize ; } } } else { // visual
for ( int i = sourceStart + 1 , e = sourceStart + sourceLength ; i < e ; ++ i ) { if ( ( source [ i ] == LAM_CHAR && isAlefChar ( source [ i - 1 ] ) ) || isTashkeelCharFE ( source [ i ] ) ) { -- destSize ; } } } break ; case LETTERS_UNSHAPE : for ( int i = sourceStart , e = sourceStart + sourceLength ; i < e ; ++ i ) { if ( isLamAlefChar ( source [ i ] ) ) { destSize ++ ; } } break ; default : break ; } return destSize ; |
public class ProgramConverter { /** * Convert document model to declared document .
* @ param path The program file path .
* @ param program Program model to convert .
* @ return The declared thrift document . */
public CProgram convert ( String path , ProgramType program ) { } } | ImmutableList . Builder < PDeclaredDescriptor < ? > > declaredTypes = ImmutableList . builder ( ) ; ImmutableList . Builder < CConst > constants = ImmutableList . builder ( ) ; ImmutableMap . Builder < String , String > typedefs = ImmutableMap . builder ( ) ; ImmutableList . Builder < CService > services = ImmutableList . builder ( ) ; RecursiveTypeRegistry registry = programRegistry . registryForPath ( path ) ; File dir = new File ( path ) . getParentFile ( ) ; if ( program . hasIncludes ( ) ) { for ( String include : program . getIncludes ( ) ) { String includePath = new File ( dir , include ) . getPath ( ) ; registry . registerInclude ( ReflectionUtils . programNameFromPath ( include ) , programRegistry . registryForPath ( includePath ) ) ; } } for ( Declaration decl : program . getDecl ( ) ) { switch ( decl . unionField ( ) ) { case DECL_ENUM : { EnumType enumType = decl . getDeclEnum ( ) ; int nextValue = PEnumDescriptor . DEFAULT_FIRST_VALUE ; CEnumDescriptor type = new CEnumDescriptor ( enumType . getDocumentation ( ) , program . getProgramName ( ) , enumType . getName ( ) , enumType . getAnnotations ( ) ) ; List < CEnumValue > values = new ArrayList < > ( ) ; for ( EnumValue value : enumType . getValues ( ) ) { int v = value . hasId ( ) ? value . getId ( ) : nextValue ; nextValue = v + 1 ; values . add ( new CEnumValue ( value . getDocumentation ( ) , value . getId ( ) , value . getName ( ) , type , value . getAnnotations ( ) ) ) ; } type . setValues ( values ) ; declaredTypes . add ( type ) ; registry . register ( type ) ; break ; } case DECL_MESSAGE : { MessageType messageType = decl . getDeclMessage ( ) ; List < CField > fields = new ArrayList < > ( ) ; if ( messageType . hasFields ( ) ) { fields . addAll ( messageType . getFields ( ) . stream ( ) . map ( field -> makeField ( registry , program . getProgramName ( ) , field , messageType . getVariant ( ) ) ) . collect ( Collectors . toList ( ) ) ) ; } PMessageDescriptor < ? , ? > type ; switch ( messageType . getVariant ( ) ) { case STRUCT : type = new CStructDescriptor ( messageType . getDocumentation ( ) , program . getProgramName ( ) , messageType . getName ( ) , fields , messageType . getAnnotations ( ) ) ; break ; case UNION : type = new CUnionDescriptor ( messageType . getDocumentation ( ) , program . getProgramName ( ) , messageType . getName ( ) , fields , messageType . getAnnotations ( ) ) ; break ; case EXCEPTION : type = new CExceptionDescriptor ( messageType . getDocumentation ( ) , program . getProgramName ( ) , messageType . getName ( ) , fields , messageType . getAnnotations ( ) ) ; break ; default : throw new UnsupportedOperationException ( "Unhandled message variant " + messageType . getVariant ( ) ) ; } declaredTypes . add ( type ) ; registry . register ( type ) ; break ; } case DECL_CONST : { ConstType constant = decl . getDeclConst ( ) ; CConst cv = makeConst ( registry , program . getProgramName ( ) , constant ) ; constants . add ( cv ) ; registry . registerConstant ( cv . getName ( ) , program . getProgramName ( ) , cv . getDefaultValue ( ) ) ; break ; } case DECL_TYPEDEF : { typedefs . put ( decl . getDeclTypedef ( ) . getName ( ) , decl . getDeclTypedef ( ) . getType ( ) ) ; registry . registerTypedef ( decl . getDeclTypedef ( ) . getName ( ) , program . getProgramName ( ) , decl . getDeclTypedef ( ) . getType ( ) ) ; break ; } case DECL_SERVICE : { ServiceType serviceType = decl . getDeclService ( ) ; ImmutableList . Builder < CServiceMethod > methodBuilder = ImmutableList . builder ( ) ; if ( serviceType . hasMethods ( ) ) { for ( FunctionType sm : serviceType . getMethods ( ) ) { List < CField > rqFields = new ArrayList < > ( ) ; if ( sm . numParams ( ) > 0 ) { for ( FieldType field : sm . getParams ( ) ) { rqFields . add ( makeField ( registry , program . getProgramName ( ) , field , MessageVariant . STRUCT ) ) ; } } CStructDescriptor request = new CStructDescriptor ( null , program . getProgramName ( ) , serviceType . getName ( ) + '.' + sm . getName ( ) + ".request" , rqFields , null ) ; CUnionDescriptor response = null ; if ( ! sm . isOneWay ( ) ) { List < CField > rsFields = new ArrayList < > ( ) ; CField success ; if ( sm . getReturnType ( ) != null ) { PDescriptorProvider type = registry . getProvider ( sm . getReturnType ( ) , program . getProgramName ( ) , sm . getAnnotations ( ) ) ; success = new CField ( null , 0 , PRequirement . OPTIONAL , "success" , type , null , null ) ; } else { success = new CField ( null , 0 , PRequirement . OPTIONAL , "success" , PPrimitive . VOID . provider ( ) , null , null ) ; } rsFields . add ( success ) ; if ( sm . numExceptions ( ) > 0 ) { for ( FieldType field : sm . getExceptions ( ) ) { rsFields . add ( makeField ( registry , program . getProgramName ( ) , field , MessageVariant . UNION ) ) ; } } response = new CUnionDescriptor ( null , program . getProgramName ( ) , serviceType . getName ( ) + '.' + sm . getName ( ) + ".response" , rsFields , null ) ; } CServiceMethod method = new CServiceMethod ( sm . getDocumentation ( ) , sm . getName ( ) , sm . isOneWay ( ) , request , response , sm . getAnnotations ( ) ) ; methodBuilder . add ( method ) ; } // for each method
} // if has methods
PServiceProvider extendsProvider = null ; if ( serviceType . hasExtend ( ) ) { extendsProvider = registry . getServiceProvider ( serviceType . getExtend ( ) , program . getProgramName ( ) ) ; } CService service = new CService ( serviceType . getDocumentation ( ) , program . getProgramName ( ) , serviceType . getName ( ) , extendsProvider , methodBuilder . build ( ) , serviceType . getAnnotations ( ) ) ; services . add ( service ) ; registry . registerRecursively ( service ) ; } } } return new CProgram ( path , program . getDocumentation ( ) , program . getProgramName ( ) , program . getNamespaces ( ) , getIncludedProgramNames ( program ) , program . getIncludes ( ) , typedefs . build ( ) , declaredTypes . build ( ) , services . build ( ) , constants . build ( ) ) ; |
public class FederatedBigQueryOutputCommitter { /** * Runs a federated import job on BigQuery for the data in the output path in addition to calling
* the delegate ' s commitJob . */
@ Override public void commitJob ( JobContext context ) throws IOException { } } | super . commitJob ( context ) ; // Get the destination configuration information .
Configuration conf = context . getConfiguration ( ) ; TableReference destTable = BigQueryOutputConfiguration . getTableReference ( conf ) ; String destProjectId = BigQueryOutputConfiguration . getProjectId ( conf ) ; Optional < BigQueryTableSchema > destSchema = BigQueryOutputConfiguration . getTableSchema ( conf ) ; BigQueryFileFormat outputFileFormat = BigQueryOutputConfiguration . getFileFormat ( conf ) ; List < String > sourceUris = getOutputFileURIs ( ) ; getBigQueryHelper ( ) . importFederatedFromGcs ( destProjectId , destTable , destSchema . isPresent ( ) ? destSchema . get ( ) . get ( ) : null , outputFileFormat , sourceUris ) ; |
public class AbstractPathFinder { /** * A placeholder configuration is one which is not defined or is set to { @ link DatasetDescriptorConfigKeys # DATASET _ DESCRIPTOR _ CONFIG _ ANY } .
* @ param value to be examined for determining if it is a placeholder .
* @ return true if the value is null or empty or equals { @ link DatasetDescriptorConfigKeys # DATASET _ DESCRIPTOR _ CONFIG _ ANY } . */
private boolean isPlaceHolder ( String value ) { } } | return Strings . isNullOrEmpty ( value ) || value . equals ( DatasetDescriptorConfigKeys . DATASET_DESCRIPTOR_CONFIG_ANY ) ; |
public class Asm { /** * Create word ( 2 Bytes ) pointer operand . */
public static final Mem word_ptr_abs ( long target , long disp , SEGMENT segmentPrefix ) { } } | return _ptr_build_abs ( target , disp , segmentPrefix , SIZE_WORD ) ; |
public class Client { /** * Change the password for the currently logged in user . You must supply the
* old password and the new password .
* @ param username
* @ param oldPassword
* @ param newPassword
* @ return */
public ApiResponse changePassword ( String username , String oldPassword , String newPassword ) { } } | Map < String , Object > data = new HashMap < String , Object > ( ) ; data . put ( "newpassword" , newPassword ) ; data . put ( "oldpassword" , oldPassword ) ; return apiRequest ( HttpMethod . POST , null , data , organizationId , applicationId , "users" , username , "password" ) ; |
public class GenerateImplFastCorner { /** * TODO only be considered finished when that list is exhausted */
private String generateSamples ( ) { } } | String output = "" ; tabs = 2 ; Stack < Action > actions = new Stack < > ( ) ; actions . add ( selectNextSample ( ) ) ; while ( ! actions . empty ( ) ) { Action action = actions . peek ( ) ; System . out . println ( "Action bit=" + action . bit + " up=" + action . sampleUp + " n=" + action . consider + " TOTAL=" + actions . size ( ) ) ; debugSampleState ( ) ; if ( action . consider == 0 ) { // First time this action is considered assume it ' s outcome is true
output += strSample ( tabs ++ , action ) ; action . consider ++ ; if ( action . sampleUp ) { samples [ action . bit ] . add ( Sample . UP ) ; } else { samples [ action . bit ] . add ( Sample . DOWN ) ; } } else if ( action . consider == 1 ) { // Second time consider what to do if it ' s outcome is false
output += strElse ( tabs ++ ) ; action . consider ++ ; removeSample ( action . bit ) ; System . out . println ( "removed sample" ) ; debugSampleState ( ) ; updateSamples ( action ) ; } else { // Remove consideration of this action and reconsider the previous one
removeSample ( action . bit ) ; output += strCloseIf ( tabs -- ) ; actions . pop ( ) ; continue ; } // See if a solution has been found .
Solution solution = checkSoluton ( ) ; if ( solution != null ) { // If a solution hsa been found return and mark the first bit as being found so that
// it won ' t detect the same corner twice
output += strReturn ( tabs -- , solution . up ? 1 : - 1 ) ; // Don ' t add a new action . Instead consider other outcomes from previous action
} else { // Wasn ' t able to find a solution . Sample another bit
action = selectNextSample ( ) ; if ( action == null ) { // No need to sample since it has proven that there is no pixel
output += strReturn ( tabs -- , 0 ) ; } else { actions . add ( action ) ; } } } return output ; |
public class CommerceSubscriptionEntryPersistenceImpl { /** * Returns the commerce subscription entries before and after the current commerce subscription entry in the ordered set where uuid = & # 63 ; .
* @ param commerceSubscriptionEntryId the primary key of the current commerce subscription entry
* @ param uuid the uuid
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the previous , current , and next commerce subscription entry
* @ throws NoSuchSubscriptionEntryException if a commerce subscription entry with the primary key could not be found */
@ Override public CommerceSubscriptionEntry [ ] findByUuid_PrevAndNext ( long commerceSubscriptionEntryId , String uuid , OrderByComparator < CommerceSubscriptionEntry > orderByComparator ) throws NoSuchSubscriptionEntryException { } } | CommerceSubscriptionEntry commerceSubscriptionEntry = findByPrimaryKey ( commerceSubscriptionEntryId ) ; Session session = null ; try { session = openSession ( ) ; CommerceSubscriptionEntry [ ] array = new CommerceSubscriptionEntryImpl [ 3 ] ; array [ 0 ] = getByUuid_PrevAndNext ( session , commerceSubscriptionEntry , uuid , orderByComparator , true ) ; array [ 1 ] = commerceSubscriptionEntry ; array [ 2 ] = getByUuid_PrevAndNext ( session , commerceSubscriptionEntry , uuid , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; } |
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public IfcReinforcingMeshTypeEnum createIfcReinforcingMeshTypeEnumFromString ( EDataType eDataType , String initialValue ) { } } | IfcReinforcingMeshTypeEnum result = IfcReinforcingMeshTypeEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ; |
public class RESTClient { /** * Performs GET request .
* @ param path Request path .
* @ throws IOException If error during HTTP connection or entity parsing occurs .
* @ throws RESTException If HTTP response code is non OK . */
public Response get ( String path ) throws IOException , RESTException { } } | return get ( path , null , null ) ; |
public class SecretKeyBackupHelper { /** * Decrypt a secret key backup and return the { @ link PGPSecretKeyRing } contained in it .
* TODO : Return a PGPSecretKeyRingCollection instead ?
* @ param backup encrypted { @ link SecretkeyElement } containing the backup
* @ param backupCode passphrase for decrypting the { @ link SecretkeyElement } .
* @ return the
* @ throws InvalidBackupCodeException in case the provided backup code is invalid .
* @ throws IOException IO is dangerous .
* @ throws PGPException PGP is brittle . */
public static PGPSecretKeyRing restoreSecretKeyBackup ( SecretkeyElement backup , String backupCode ) throws InvalidBackupCodeException , IOException , PGPException { } } | byte [ ] encrypted = Base64 . decode ( backup . getB64Data ( ) ) ; byte [ ] decrypted ; try { decrypted = PGPainless . decryptWithPassword ( encrypted , new Passphrase ( backupCode . toCharArray ( ) ) ) ; } catch ( IOException | PGPException e ) { throw new InvalidBackupCodeException ( "Could not decrypt secret key backup. Possibly wrong passphrase?" , e ) ; } return PGPainless . readKeyRing ( ) . secretKeyRing ( decrypted ) ; |
public class Organization { /** * A list of policy types that are enabled for this organization . For example , if your organization has all features
* enabled , then service control policies ( SCPs ) are included in the list .
* < note >
* Even if a policy type is shown as available in the organization , you can separately enable and disable them at
* the root level by using < a > EnablePolicyType < / a > and < a > DisablePolicyType < / a > . Use < a > ListRoots < / a > to see the
* status of a policy type in that root .
* < / note >
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setAvailablePolicyTypes ( java . util . Collection ) } or { @ link # withAvailablePolicyTypes ( java . util . Collection ) }
* if you want to override the existing values .
* @ param availablePolicyTypes
* A list of policy types that are enabled for this organization . For example , if your organization has all
* features enabled , then service control policies ( SCPs ) are included in the list . < / p > < note >
* Even if a policy type is shown as available in the organization , you can separately enable and disable
* them at the root level by using < a > EnablePolicyType < / a > and < a > DisablePolicyType < / a > . Use < a > ListRoots < / a >
* to see the status of a policy type in that root .
* @ return Returns a reference to this object so that method calls can be chained together . */
public Organization withAvailablePolicyTypes ( PolicyTypeSummary ... availablePolicyTypes ) { } } | if ( this . availablePolicyTypes == null ) { setAvailablePolicyTypes ( new java . util . ArrayList < PolicyTypeSummary > ( availablePolicyTypes . length ) ) ; } for ( PolicyTypeSummary ele : availablePolicyTypes ) { this . availablePolicyTypes . add ( ele ) ; } return this ; |
public class GetConnectionsResult { /** * A list of requested connection definitions .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setConnectionList ( java . util . Collection ) } or { @ link # withConnectionList ( java . util . Collection ) } if you want
* to override the existing values .
* @ param connectionList
* A list of requested connection definitions .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetConnectionsResult withConnectionList ( Connection ... connectionList ) { } } | if ( this . connectionList == null ) { setConnectionList ( new java . util . ArrayList < Connection > ( connectionList . length ) ) ; } for ( Connection ele : connectionList ) { this . connectionList . add ( ele ) ; } return this ; |
public class JKMessage { /** * Adds the lables .
* @ param locale the locale
* @ param fileName the file name */
public void addLables ( JKLocale locale , String fileName ) { } } | addLables ( locale , JKIOUtil . readPropertiesFile ( fileName ) ) ; |
public class CmsGalleryService { /** * Returns the gallery search object containing the results for the current parameter . < p >
* @ param searchObj the current search object
* @ return the search result
* @ throws CmsException if the search fails */
private CmsGallerySearchBean search ( CmsGallerySearchBean searchObj ) throws CmsException { } } | CmsGallerySearchBean searchObjBean = new CmsGallerySearchBean ( searchObj ) ; if ( searchObj == null ) { return searchObjBean ; } // store folder filter
storeFolderFilter ( searchObj . getFolders ( ) ) ; // search
CmsGallerySearchParameters params = prepareSearchParams ( searchObj ) ; org . opencms . search . galleries . CmsGallerySearch searchBean = new org . opencms . search . galleries . CmsGallerySearch ( ) ; CmsObject searchCms = getSearchCms ( searchObj ) ; searchBean . init ( searchCms ) ; CmsGallerySearchResultList searchResults = OpenCms . getSearchManager ( ) . getIndexSolr ( "Solr Offline" ) . gallerySearch ( searchCms , params ) ; searchResults . calculatePages ( params . getResultPage ( ) , params . getMatchesPerPage ( ) ) ; // set only the result dependent search params for this search
// the user dependent params ( galleries , types etc . ) remain unchanged
searchObjBean . setSortOrder ( params . getSortOrder ( ) . name ( ) ) ; searchObjBean . setScope ( params . getScope ( ) ) ; searchObjBean . setResultCount ( searchResults . getHitCount ( ) ) ; searchObjBean . setPage ( params . getResultPage ( ) ) ; searchObjBean . setLastPage ( params . getResultPage ( ) ) ; searchObjBean . setResults ( buildSearchResultList ( searchResults , null ) ) ; if ( searchObj . getGalleryMode ( ) . equals ( GalleryMode . ade ) ) { if ( searchObjBean . getResultCount ( ) > 0 ) { CmsADESessionCache cache = CmsADESessionCache . getCache ( getRequest ( ) , getCmsObject ( ) ) ; cache . setLastPageEditorGallerySearch ( searchObj ) ; } } updateNoUploadReason ( searchCms , searchObjBean ) ; return searchObjBean ; |
public class TypeConversion { /** * A utility method to convert the char from the byte array to a char .
* @ param bytes
* The byte array containing the char .
* @ param offset
* The index at which the char is located .
* @ return The char value . */
public static char bytesToChar ( byte [ ] bytes , int offset ) { } } | char result = 0x0 ; for ( int i = offset ; i < offset + 2 ; ++ i ) { result = ( char ) ( ( result ) << 8 ) ; result |= ( bytes [ i ] & 0x00FF ) ; } return result ; |
public class Cache { /** * Looks up credible Records in the Cache ( a wrapper around lookupRecords ) .
* Unlike lookupRecords , this given no indication of why failure occurred .
* @ param name The name to look up
* @ param type The type to look up
* @ return An array of RRsets , or null
* @ see Credibility */
public RRset [ ] findRecords ( Name name , int type ) { } } | return findRecords ( name , type , Credibility . NORMAL ) ; |
public class ServersInner { /** * Updates an existing server .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param parameters The required parameters for updating a server .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the ServerInner object if successful . */
public ServerInner update ( String resourceGroupName , String serverName , ServerUpdate parameters ) { } } | return updateWithServiceResponseAsync ( resourceGroupName , serverName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class Expressions { /** * Create a new Template expression
* @ param template template
* @ param args template parameters
* @ return template expression */
public static BooleanTemplate booleanTemplate ( Template template , Object ... args ) { } } | return booleanTemplate ( template , ImmutableList . copyOf ( args ) ) ; |
public class UnparsedExtEntity { /** * Implementation of abstract base methods */
@ Override public void writeEnc ( Writer w ) throws IOException { } } | w . write ( "<!ENTITY " ) ; w . write ( mName ) ; String pubId = getPublicId ( ) ; if ( pubId != null ) { w . write ( "PUBLIC \"" ) ; w . write ( pubId ) ; w . write ( "\" " ) ; } else { w . write ( "SYSTEM " ) ; } w . write ( '"' ) ; w . write ( getSystemId ( ) ) ; w . write ( "\" NDATA " ) ; w . write ( mNotationId ) ; w . write ( '>' ) ; |
public class CmsXsltUtil { /** * Applies a XSLT Transformation to the content . < p >
* The method does not use DOM4J , because iso - 8859-1 code ist not transformed correctly .
* @ param cms the cms object
* @ param xsltFile the XSLT transformation file
* @ param xmlContent the XML content to transform
* @ return the transformed xml
* @ throws CmsXmlException if something goes wrong
* @ throws CmsException if something goes wrong */
public static String transformXmlContent ( CmsObject cms , String xsltFile , String xmlContent ) throws CmsException , CmsXmlException { } } | // JAXP reads data
Source xmlSource = new StreamSource ( new StringReader ( xmlContent ) ) ; String xsltString = new String ( cms . readFile ( xsltFile ) . getContents ( ) ) ; Source xsltSource = new StreamSource ( new StringReader ( xsltString ) ) ; String result = null ; try { TransformerFactory transFact = TransformerFactory . newInstance ( ) ; Transformer trans = transFact . newTransformer ( xsltSource ) ; StringWriter writer = new StringWriter ( ) ; trans . transform ( xmlSource , new StreamResult ( writer ) ) ; result = writer . toString ( ) ; } catch ( Exception exc ) { throw new CmsXmlException ( Messages . get ( ) . container ( Messages . ERR_CSV_XML_TRANSFORMATION_FAILED_0 ) ) ; } // cut of the prefacing declaration ' < ? xml version = " 1.0 " encoding = " UTF - 8 " ? > '
if ( result . startsWith ( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" ) ) { return result . substring ( 38 ) ; } else { return result ; } |
public class PNCounterProxy { /** * Updates the locally observed CRDT vector clock atomically . This method
* is thread safe and can be called concurrently . The method will only
* update the clock if the { @ code receivedLogicalTimestamps } is higher than
* the currently observed vector clock .
* @ param receivedVectorClock vector clock received from a replica state read */
private void updateObservedReplicaTimestamps ( VectorClock receivedVectorClock ) { } } | for ( ; ; ) { final VectorClock currentClock = this . observedClock ; if ( currentClock != null && currentClock . isAfter ( receivedVectorClock ) ) { break ; } if ( OBSERVED_TIMESTAMPS_UPDATER . compareAndSet ( this , currentClock , receivedVectorClock ) ) { break ; } } |
public class RTMPConnManager { /** * { @ inheritDoc } */
@ Override public Collection < RTMPConnection > getAllConnections ( ) { } } | ArrayList < RTMPConnection > list = new ArrayList < RTMPConnection > ( connMap . size ( ) ) ; list . addAll ( connMap . values ( ) ) ; return list ; |
public class PixelUtils { /** * Converts DP to Pixel < br >
* @ param dp DP
* @ return Pixel
* @ see < a href = " http : / / developer . android . com / guide / practices / screens _ support . html # dips - pels " > http : / / developer . android . com / guide / practices / screens _ support . html # dips - pels < / a > */
public static int getPixelFromDp ( Context context , int dp ) { } } | // Get the screen ' s density scale
float scale = context . getResources ( ) . getDisplayMetrics ( ) . density ; // Convert the dps to pixels , based on density scale
// because dp * scale is cast as an integer value , this will cause the result to be truncated .
// Therefore , Adding ROUND _ FACTOR helps the result to have properly rounded integer value .
return ( int ) ( dp * scale + ROUND_FACTOR ) ; |
public class SystemUtil { /** * Creates a new temporary directory in the given path .
* @ param _ path path
* @ param _ name directory name
* @ param _ deleteOnExit delete directory on jvm shutdown
* @ return created Directory , null if directory / file was already existing */
public static File createTempDirectory ( String _path , String _name , boolean _deleteOnExit ) { } } | File outputDir = new File ( concatFilePath ( _path , _name ) ) ; if ( ! outputDir . exists ( ) ) { try { Files . createDirectory ( Paths . get ( outputDir . toString ( ) ) ) ; } catch ( IOException _ex ) { LOGGER . error ( "Error while creating temp directory: " , _ex ) ; } } else { return null ; } if ( _deleteOnExit ) { outputDir . deleteOnExit ( ) ; } return outputDir ; |
public class CatalogUtil { /** * Get the auto generated DDL from the catalog jar .
* @ param jarfile in - memory catalog jar file
* @ return Auto generated DDL stored in catalog . jar
* @ throws IOException If the catalog or the auto generated ddl cannot be loaded . */
public static String getAutoGenDDLFromJar ( InMemoryJarfile jarfile ) throws IOException { } } | // Read the raw auto generated ddl bytes .
byte [ ] ddlBytes = jarfile . get ( VoltCompiler . AUTOGEN_DDL_FILE_NAME ) ; if ( ddlBytes == null ) { throw new IOException ( "Auto generated schema DDL not found - please make sure the database is initialized with valid schema." ) ; } String ddl = new String ( ddlBytes , StandardCharsets . UTF_8 ) ; return ddl . trim ( ) ; |
public class DeleteTableVersionRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteTableVersionRequest deleteTableVersionRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( deleteTableVersionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteTableVersionRequest . getCatalogId ( ) , CATALOGID_BINDING ) ; protocolMarshaller . marshall ( deleteTableVersionRequest . getDatabaseName ( ) , DATABASENAME_BINDING ) ; protocolMarshaller . marshall ( deleteTableVersionRequest . getTableName ( ) , TABLENAME_BINDING ) ; protocolMarshaller . marshall ( deleteTableVersionRequest . getVersionId ( ) , VERSIONID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class PathResolvingComparator { /** * - - - - - private methods - - - - - */
private Comparable resolve ( final GraphObject obj , final String path ) { } } | final ConfigurationProvider config = StructrApp . getConfiguration ( ) ; final String [ ] parts = path . split ( "[\\.]+" ) ; GraphObject current = obj ; int pos = 0 ; for ( final String part : parts ) { final Class type = current . getEntityType ( ) ; final PropertyKey key = config . getPropertyKeyForJSONName ( type , part , false ) ; if ( key == null ) { logger . warn ( "Unknown key {} while resolving path {} for sorting." , part , path ) ; return null ; } final Object value = current . getProperty ( key ) ; if ( value != null ) { // last part of path ?
if ( ++ pos == parts . length ) { if ( value instanceof Comparable ) { return ( Comparable ) value ; } logger . warn ( "Path evaluation result of component {} of type {} in {} cannot be used for sorting." , part , value . getClass ( ) . getSimpleName ( ) , path ) ; return null ; } if ( value instanceof GraphObject ) { current = ( GraphObject ) value ; } else { logger . warn ( "Path component {} of type {} in {} cannot be evaluated further." , part , value . getClass ( ) . getSimpleName ( ) , path ) ; return null ; } } } return null ; |
public class XLinkUtils { /** * Prepares the XLinkTemplate before it is transmitted to the remote tool .
* < br / > < br / >
* The baseUrl is prepared and the KeyNames are added . < br / >
* The models are assigned to the views ( yet in an naive order ) . < br / >
* The ConnectorId / value combination and the ViewId - Key are also added to the Template to enable
* Local Switching . */
public static XLinkUrlBlueprint prepareXLinkTemplate ( String baseUrl , String connectorId , Map < ModelDescription , XLinkConnectorView [ ] > modelsToViews , int expirationDays , XLinkConnector [ ] registeredTools ) { } } | baseUrl += "?" + XLinkConstants . XLINK_EXPIRATIONDATE_KEY + "=" + urlEncodeParameter ( getExpirationDate ( expirationDays ) ) ; String connectorIdParam = XLinkConstants . XLINK_CONNECTORID_KEY + "=" + urlEncodeParameter ( connectorId ) ; Map < String , ModelDescription > viewToModels = assigneModelsToViews ( modelsToViews ) ; return new XLinkUrlBlueprint ( baseUrl , viewToModels , registeredTools , connectorIdParam , new XLinkUrlKeyNames ( ) ) ; |
public class ArtifactResource { /** * Return all the artifacts that matches the filters .
* This method is call via GET < grapes _ url > / artifact / < gavc >
* Following filters can be used : artifactId , classifier , groupId , hasLicense , licenseId , type , uriInfo , version
* @ param uriInfo UriInfo
* @ return Response An artifact in HTML or JSON */
@ GET @ Produces ( MediaType . APPLICATION_JSON ) @ Path ( ServerAPI . GET_ALL ) public Response getAll ( @ Context final UriInfo uriInfo ) { } } | if ( LOG . isInfoEnabled ( ) ) { LOG . info ( String . format ( "Got a get all artifact request [%s]" , uriInfo . getPath ( ) ) ) ; } final FiltersHolder filters = new FiltersHolder ( ) ; filters . init ( uriInfo . getQueryParameters ( ) ) ; if ( filters . getArtifactFieldsFilters ( ) . size ( ) == 0 ) { LOG . warn ( "No artifact filtering criteria. Returning BAD_REQUEST" ) ; throw new WebApplicationException ( Response . status ( Response . Status . BAD_REQUEST ) . entity ( "Please provide at least one artifact filtering criteria" ) . build ( ) ) ; } final List < Artifact > artifacts = new ArrayList < > ( ) ; final List < DbArtifact > dbArtifacts = getArtifactHandler ( ) . getArtifacts ( filters ) ; for ( final DbArtifact dbArtifact : dbArtifacts ) { artifacts . add ( getModelMapper ( ) . getArtifact ( dbArtifact ) ) ; } return Response . ok ( artifacts ) . build ( ) ; |
public class ESRIFileUtil { /** * Translate a floating point value into ESRI standard .
* < p > This function translate the Java NaN and infinites values
* into the ESRI equivalent value .
* @ param value the value .
* @ return the ESRI value */
@ Pure public static double toESRI ( double value ) { } } | return ( Double . isInfinite ( value ) || Double . isNaN ( value ) ) ? ESRI_NAN : value ; |
public class ValoSideBar { /** * Specifies whether the side bar should use large icons or not .
* @ see ValoTheme # MENU _ PART _ LARGE _ ICONS */
public void setLargeIcons ( boolean largeIcons ) { } } | this . largeIcons = largeIcons ; if ( getCompositionRoot ( ) != null ) { if ( largeIcons ) { getCompositionRoot ( ) . addStyleName ( ValoTheme . MENU_PART_LARGE_ICONS ) ; } else { getCompositionRoot ( ) . removeStyleName ( ValoTheme . MENU_PART_LARGE_ICONS ) ; } } |
public class JobHistoryFileParserBase { /** * calculates the cost of a job in current units
* jobCost = thisJobMbMillis * computeTco / mbMillisInOneday
* @ param mb millis , compute tco , memory of the machine */
public static double calculateJobCost ( long mbMillis , double computeTco , long machineMemory ) { } } | if ( ( machineMemory == 0L ) || ( computeTco == 0.0 ) ) { LOG . error ( "Unable to calculate job cost since machineMemory " + machineMemory + " or computeTco " + computeTco + " is 0; returning jobCost as 0" ) ; return 0.0 ; } double jobCost = ( mbMillis * computeTco ) / ( Constants . MILLIS_ONE_DAY * machineMemory ) ; return jobCost ; |
public class JSLocalConsumerPoint { /** * Checks that the maxBatchSize is > 0
* Checks that messasgeLockExpiry > = 0
* Checks that maxActiveMessages > = 0
* @ param maxBatchSize
* @ param max */
private void checkParams ( int maxActiveMessages , long messageLockExpiry , int maxBatchSize ) throws SIIncorrectCallException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "checkParams" , new Object [ ] { Integer . valueOf ( maxActiveMessages ) , Long . valueOf ( messageLockExpiry ) , Integer . valueOf ( maxBatchSize ) } ) ; if ( maxActiveMessages < 0 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "checkParams" , "SIIncorrectCallException maxActiveMessages < 0" ) ; throw new SIIncorrectCallException ( nls_cwsir . getFormattedMessage ( "REG_ASYNCH_CONSUMER_ERROR_CWSIR0141" , new Object [ ] { Integer . valueOf ( maxActiveMessages ) } , null ) ) ; } if ( messageLockExpiry < 0 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "checkParams" , "SIIncorrectCallException messageLockExpiry < 0" ) ; throw new SIIncorrectCallException ( nls_cwsir . getFormattedMessage ( "REG_ASYNCH_CONSUMER_ERROR_CWSIR0142" , new Object [ ] { Long . valueOf ( messageLockExpiry ) } , null ) ) ; } if ( maxBatchSize <= 0 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "checkParams" , "SIIncorrectCallException maxBatchSize <= 0" ) ; throw new SIIncorrectCallException ( nls_cwsir . getFormattedMessage ( "REG_ASYNCH_CONSUMER_ERROR_CWSIR0143" , new Object [ ] { Integer . valueOf ( maxBatchSize ) } , null ) ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "checkParams" ) ; |
public class ByteCodeParser { /** * Parses the . class file . */
public JavaClass parse ( InputStream is ) throws IOException { } } | _is = is ; if ( _loader == null ) _loader = new JavaClassLoader ( ) ; if ( _class == null ) _class = new JavaClass ( _loader ) ; _cp = _class . getConstantPool ( ) ; parseClass ( ) ; return _class ; |
public class CmsGalleryActionElement { /** * Returns the serialized initial data for gallery dialog depending on the given mode . < p >
* @ param galleryMode the gallery mode
* @ return the data
* @ throws Exception if something goes wrong */
private String export ( GalleryMode galleryMode ) throws Exception { } } | CmsGalleryConfiguration conf = createGalleryConfigurationFromRequest ( galleryMode ) ; CmsGalleryDataBean data = CmsGalleryService . getInitialSettings ( getRequest ( ) , conf ) ; CmsGallerySearchBean search = null ; if ( GalleryTabId . cms_tab_results . equals ( data . getStartTab ( ) ) ) { search = CmsGalleryService . getSearch ( getRequest ( ) , data ) ; } Set < String > folderFilter = data . getStartFolderFilter ( ) ; if ( ( folderFilter != null ) && ! folderFilter . isEmpty ( ) ) { data . setVfsPreloadData ( CmsGalleryService . generateVfsPreloadData ( getCmsObject ( ) , null , folderFilter ) ) ; } if ( ( search != null ) && ( search . getScope ( ) != null ) && ( search . getScope ( ) != data . getScope ( ) ) ) { // default selected scope option should be the one for which the search has been actually performed
data . setScope ( search . getScope ( ) ) ; } else if ( ( search != null ) && ( search . getScope ( ) == null ) ) { data . setScope ( OpenCms . getWorkplaceManager ( ) . getGalleryDefaultScope ( ) ) ; } StringBuffer sb = new StringBuffer ( ) ; sb . append ( exportDictionary ( CmsGalleryDataBean . DICT_NAME , I_CmsGalleryService . class . getMethod ( "getInitialSettings" , CmsGalleryConfiguration . class ) , data ) ) ; sb . append ( exportDictionary ( CmsGallerySearchBean . DICT_NAME , I_CmsGalleryService . class . getMethod ( "getSearch" , CmsGalleryDataBean . class ) , search ) ) ; return sb . toString ( ) ; |
public class FileProxyRNASequenceCreator { /** * Even though we are passing in the sequence we really only care about the length of the sequence and the offset
* index in the fasta file .
* @ param sequence
* @ param index
* @ return
* @ throws CompoundNotFoundException
* @ throws IOException */
@ Override public AbstractSequence < NucleotideCompound > getSequence ( String sequence , long index ) throws CompoundNotFoundException , IOException { } } | SequenceFileProxyLoader < NucleotideCompound > sequenceFileProxyLoader = new SequenceFileProxyLoader < NucleotideCompound > ( file , sequenceParser , index , sequence . length ( ) , compoundSet ) ; return new RNASequence ( sequenceFileProxyLoader , compoundSet ) ; |
public class MetricValue { /** * If the < code > comparisonOperator < / code > calls for a set of CIDRs , use this to specify that set to be compared with
* the < code > metric < / code > .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setCidrs ( java . util . Collection ) } or { @ link # withCidrs ( java . util . Collection ) } if you want to override the
* existing values .
* @ param cidrs
* If the < code > comparisonOperator < / code > calls for a set of CIDRs , use this to specify that set to be
* compared with the < code > metric < / code > .
* @ return Returns a reference to this object so that method calls can be chained together . */
public MetricValue withCidrs ( String ... cidrs ) { } } | if ( this . cidrs == null ) { setCidrs ( new java . util . ArrayList < String > ( cidrs . length ) ) ; } for ( String ele : cidrs ) { this . cidrs . add ( ele ) ; } return this ; |
public class HttpUtil { /** * Extract headers from { @ link HttpServletRequest }
* @ param request { @ link HttpServletRequest } to extract headers from
* @ return Array with { @ link Header } s */
public static Header [ ] getHeadersFromRequest ( HttpServletRequest request ) { } } | List < Header > returnHeaderList = new ArrayList < > ( ) ; Enumeration < String > headerNames = request . getHeaderNames ( ) ; while ( headerNames . hasMoreElements ( ) ) { String headerName = headerNames . nextElement ( ) ; Enumeration < String > headerValues = request . getHeaders ( headerName ) ; while ( headerValues . hasMoreElements ( ) ) { String headerValue = headerValues . nextElement ( ) ; // as cookies are sent in headers , we ' ll also have to check for unsupported cookies here
if ( headerName . toLowerCase ( ) . equals ( "cookie" ) ) { String [ ] cookies = headerValue . split ( ";" ) ; List < String > newCookieList = new ArrayList < > ( ) ; for ( int i = 0 ; i < cookies . length ; i ++ ) { final String cookieFromArray = cookies [ i ] ; newCookieList . add ( cookieFromArray ) ; } // rewrite the cookie value
headerValue = StringUtils . join ( newCookieList , ";" ) ; } if ( headerValue . isEmpty ( ) ) { LOG . debug ( "Skipping request header '" + headerName + "' as it's value is empty (possibly an " + "unsupported cookie value has been removed)" ) ; } else { LOG . debug ( "Adding request header: " + headerName + "=" + headerValue ) ; returnHeaderList . add ( new BasicHeader ( headerName , headerValue ) ) ; } } } Header [ ] headersArray = new Header [ returnHeaderList . size ( ) ] ; headersArray = returnHeaderList . toArray ( headersArray ) ; return headersArray ; |
public class Settings { /** * Set a view setting
* @ param viewId the view to which add the setting
* @ param item the item name
* @ param value the item value */
public void setViewSetting ( final String viewId , final String item , final String value ) { } } | final ConfigItemMapEntrySet view = this . viewById . get ( viewId ) ; if ( view == null ) return ; view . set ( item , value ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.