signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class InstrumentedExecutors { /** * Returns an instrumented default thread factory used to create new threads .
* This factory creates all new threads used by an Executor in the
* same { @ link ThreadGroup } . If there is a { @ link
* java . lang . SecurityManager } , it uses the group of { @ link
* System # getSecurityManager } , else the group of the thread
* invoking this { @ code defaultThreadFactory } method . Each new
* thread is created as a non - daemon thread with priority set to
* the smaller of { @ code Thread . NORM _ PRIORITY } and the maximum
* priority permitted in the thread group . New threads have names
* accessible via { @ link Thread # getName } of
* < em > pool - N - thread - M < / em > , where < em > N < / em > is the sequence
* number of this factory , and < em > M < / em > is the sequence number
* of the thread created by this factory .
* @ param registry the { @ link MetricRegistry } that will contain the metrics .
* @ param name the ( metrics ) name for this executor service , see { @ link MetricRegistry # name ( String , String . . . ) } .
* @ return a thread factory
* @ see Executors # defaultThreadFactory ( ) */
public static InstrumentedThreadFactory defaultThreadFactory ( MetricRegistry registry , String name ) { } } | return new InstrumentedThreadFactory ( Executors . defaultThreadFactory ( ) , registry , name ) ; |
public class Counter { /** * This method removes given key from counter
* @ param element
* @ return counter value */
public double removeKey ( T element ) { } } | AtomicDouble v = map . remove ( element ) ; dirty . set ( true ) ; if ( v != null ) return v . get ( ) ; else return 0.0 ; |
public class Zone { /** * Represent a zone with a fake email and a TTL of 86400.
* @ param name corresponds to { @ link # name ( ) }
* @ param id nullable , corresponds to { @ link # id ( ) }
* @ deprecated Use { @ link # create ( String , String , int , String ) } . This will be removed in version */
@ Deprecated public static Zone create ( String name , String id ) { } } | return new Zone ( id , name , 86400 , "nil@" + name ) ; |
public class CommonG { /** * Connect to JDBC secured / not secured database
* @ param database database connection string
* @ param host database host
* @ param port database port
* @ param user database user
* @ param password database password
* @ param ca trusted certificate authorities ( . crt )
* @ param crt : server certificate
* @ param key : server private key
* @ throws Exception exception * */
public void connectToPostgreSQLDatabase ( String database , String host , String port , String user , String password , Boolean secure , String ca , String crt , String key ) throws SQLException { } } | if ( port . startsWith ( "[" ) ) { port = port . substring ( 1 , port . length ( ) - 1 ) ; } if ( ! secure ) { if ( password == null ) { password = "stratio" ; } try { myConnection = DriverManager . getConnection ( "jdbc:postgresql://" + host + ":" + port + "/" + database , user , password ) ; } catch ( SQLException se ) { // log the exception
this . getLogger ( ) . error ( se . getMessage ( ) ) ; // re - throw the exception
throw se ; } } else { Properties props = new Properties ( ) ; if ( user != null ) { props . setProperty ( "user" , user ) ; } if ( ca != null ) { props . setProperty ( "sslrootcert" , ca ) ; } if ( crt != null ) { props . setProperty ( "sslcert" , crt ) ; } if ( key != null ) { props . setProperty ( "sslkey" , key ) ; } props . setProperty ( "password" , "null" ) ; props . setProperty ( "ssl" , "true" ) ; props . setProperty ( "sslmode" , "verify-full" ) ; try { myConnection = DriverManager . getConnection ( "jdbc:postgresql://" + host + ":" + port + "/" + database , props ) ; } catch ( SQLException se ) { // log the exception
this . getLogger ( ) . error ( se . getMessage ( ) ) ; // re - throw the exception
throw se ; } } |
public class MarkdownParser { /** * Change the formats to be applied to the outline entries .
* < p > The format must be compatible with { @ link MessageFormat } .
* < p > If section auto - numbering is on ,
* the first parameter < code > { 0 } < / code > equals to the prefix ,
* the second parameter < code > { 1 } < / code > equals to the string representation of the section number ,
* the third parameter < code > { 2 } < / code > equals to the title text , and the fourth parameter
* < code > { 3 } < / code > is the reference id of the section .
* < p > If section auto - numbering is off ,
* the first parameter < code > { 0 } < / code > equals to the prefix ,
* the second parameter < code > { 1 } < / code > equals to the title text , and the third parameter
* < code > { 2 } < / code > is the reference id of the section .
* @ param formatWithoutNumbers the format for the outline entries without section numbers .
* @ param formatWithNumbers the format for the outline entries with section numbers . */
public void setOutlineEntryFormat ( String formatWithoutNumbers , String formatWithNumbers ) { } } | if ( ! Strings . isEmpty ( formatWithoutNumbers ) ) { this . outlineEntryWithoutNumberFormat = formatWithoutNumbers ; } if ( ! Strings . isEmpty ( formatWithNumbers ) ) { this . outlineEntryWithNumberFormat = formatWithNumbers ; } |
public class LocalDateAndTimeStampImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } } | switch ( featureID ) { case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__STAMP_TYPE : return STAMP_TYPE_EDEFAULT == null ? stampType != null : ! STAMP_TYPE_EDEFAULT . equals ( stampType ) ; case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__THUN_YEAR : return THUN_YEAR_EDEFAULT == null ? tHunYear != null : ! THUN_YEAR_EDEFAULT . equals ( tHunYear ) ; case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__TEN_YEAR : return TEN_YEAR_EDEFAULT == null ? tenYear != null : ! TEN_YEAR_EDEFAULT . equals ( tenYear ) ; case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__DAY : return DAY_EDEFAULT == null ? day != null : ! DAY_EDEFAULT . equals ( day ) ; case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__HOUR : return HOUR_EDEFAULT == null ? hour != null : ! HOUR_EDEFAULT . equals ( hour ) ; case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__MINUTE : return MINUTE_EDEFAULT == null ? minute != null : ! MINUTE_EDEFAULT . equals ( minute ) ; case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__SECOND : return SECOND_EDEFAULT == null ? second != null : ! SECOND_EDEFAULT . equals ( second ) ; case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__HUND_SEC : return HUND_SEC_EDEFAULT == null ? hundSec != null : ! HUND_SEC_EDEFAULT . equals ( hundSec ) ; } return super . eIsSet ( featureID ) ; |
public class ApiOvhOrder { /** * Create order
* REST : POST / order / dedicated / server / { serviceName } / usbKey / { duration }
* @ param capacity [ required ] Capacity in gigabytes
* @ param serviceName [ required ] The internal name of your dedicated server
* @ param duration [ required ] Duration */
public OvhOrder dedicated_server_serviceName_usbKey_duration_POST ( String serviceName , String duration , OvhUsbKeyCapacityEnum capacity ) throws IOException { } } | String qPath = "/order/dedicated/server/{serviceName}/usbKey/{duration}" ; StringBuilder sb = path ( qPath , serviceName , duration ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "capacity" , capacity ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhOrder . class ) ; |
public class CopyFileExtensions { /** * Copies the given source directory to the given destination directory .
* @ param source
* The source directory .
* @ param destination
* The destination directory .
* @ return ' s true if the directory is copied , otherwise false .
* @ throws FileIsSecurityRestrictedException
* Is thrown if the source file is security restricted .
* @ throws IOException
* Is thrown if an error occurs by reading or writing .
* @ throws FileIsADirectoryException
* Is thrown if the destination file is a directory .
* @ throws FileIsNotADirectoryException
* Is thrown if the source file is not a directory .
* @ throws DirectoryAlreadyExistsException
* Is thrown if the directory all ready exists . */
public static boolean copyDirectory ( final File source , final File destination ) throws FileIsSecurityRestrictedException , IOException , FileIsADirectoryException , FileIsNotADirectoryException , DirectoryAlreadyExistsException { } } | return copyDirectory ( source , destination , true ) ; |
public class SkewHeap { /** * { @ inheritDoc } */
@ Override @ LogarithmicTime ( amortized = true ) @ SuppressWarnings ( "unchecked" ) public AddressableHeap . Handle < K , V > insert ( K key , V value ) { } } | if ( other != this ) { throw new IllegalStateException ( "A heap cannot be used after a meld" ) ; } if ( key == null ) { throw new NullPointerException ( "Null keys not permitted" ) ; } Node < K , V > n = createNode ( key , value ) ; // easy special cases
if ( size == 0 ) { root = n ; size = 1 ; return n ; } else if ( size == 1 ) { int c ; if ( comparator == null ) { c = ( ( Comparable < ? super K > ) key ) . compareTo ( root . key ) ; } else { c = comparator . compare ( key , root . key ) ; } if ( c <= 0 ) { n . o_c = root ; root . y_s = n ; root = n ; } else { root . o_c = n ; n . y_s = root ; } size = 2 ; return n ; } if ( comparator == null ) { root = union ( root , n ) ; } else { root = unionWithComparator ( root , n ) ; } size ++ ; return n ; |
public class JMOptional { /** * Gets nullable and filtered optional .
* @ param < T > the type parameter
* @ param target the target
* @ param predicate the predicate
* @ return the nullable and filtered optional */
public static < T > Optional < T > getNullableAndFilteredOptional ( T target , Predicate < T > predicate ) { } } | return Optional . ofNullable ( target ) . filter ( predicate ) ; |
public class DSetImpl { /** * Evaluate the boolean query predicate for each element of the collection and
* return a new collection that contains each element that evaluated to true .
* @ parampredicateAn OQL boolean query predicate .
* @ returnA new collection containing the elements that evaluated true for the predicate .
* @ exceptionorg . odmg . QueryInvalidExceptionThe query predicate is invalid . */
public DCollection query ( String predicate ) throws org . odmg . QueryInvalidException { } } | // 1 . build complete OQL statement
String oql = "select all from java.lang.Object where " + predicate ; TransactionImpl tx = getTransaction ( ) ; OQLQuery predicateQuery = tx . getImplementation ( ) . newOQLQuery ( ) ; PBCapsule capsule = new PBCapsule ( tx . getImplementation ( ) . getCurrentPBKey ( ) , tx ) ; PersistenceBroker broker = capsule . getBroker ( ) ; try { predicateQuery . create ( oql ) ; Query pQ = ( ( OQLQueryImpl ) predicateQuery ) . getQuery ( ) ; Criteria pCrit = pQ . getCriteria ( ) ; Criteria allElementsCriteria = this . getPkCriteriaForAllElements ( broker ) ; // join selection of elements with predicate criteria :
pCrit . addAndCriteria ( allElementsCriteria ) ; Class clazz = this . getElementsExtentClass ( broker ) ; Query q = new QueryByCriteria ( clazz , pCrit ) ; if ( log . isDebugEnabled ( ) ) log . debug ( q . toString ( ) ) ; // 2 . perfom query
return ( DSetImpl ) broker . getCollectionByQuery ( DSetImpl . class , q ) ; } catch ( PersistenceBrokerException e ) { throw new ODMGRuntimeException ( e . getMessage ( ) ) ; } finally { capsule . destroy ( ) ; } |
public class XmlRpcDataMarshaller { /** * Transforms the Collection of References into a Vector of Reference parameters .
* @ param references a { @ link java . util . Collection } object .
* @ return the Collection of References into a Vector of Reference parameters */
public static Vector < Object > toXmlRpcReferencesParameters ( Collection < Reference > references ) { } } | Vector < Object > referencesParams = new Vector < Object > ( ) ; for ( Reference reference : references ) { referencesParams . add ( reference . marshallize ( ) ) ; } return referencesParams ; |
public class CompilerStatistics { /** * Take a snapshot of the current memory usage of the JVM and update the
* high - water marks . */
public void updateMemoryInfo ( ) { } } | MemoryMXBean meminfo = ManagementFactory . getMemoryMXBean ( ) ; MemoryUsage usage = meminfo . getHeapMemoryUsage ( ) ; long _heapUsed = usage . getUsed ( ) ; updateMaximum ( heapUsed , _heapUsed ) ; updateMaximum ( heapTotal , usage . getMax ( ) ) ; usage = meminfo . getNonHeapMemoryUsage ( ) ; updateMaximum ( nonHeapUsed , usage . getUsed ( ) ) ; updateMaximum ( nonHeapTotal , usage . getMax ( ) ) ; // Log the memory usage if requested . Check the log level before logging
// to minimize object creation overheads during preparation of the call
// parameters .
if ( memoryLogger . isLoggable ( Level . INFO ) ) { memoryLogger . log ( Level . INFO , "MEM" , new Object [ ] { _heapUsed } ) ; } |
public class Context { /** * Gets the nodes from the site tree which are " In Scope " . Searches recursively starting from
* the root node . Should be used with care , as it is time - consuming , querying the database for
* every node in the Site Tree .
* @ return the nodes in scope from site tree */
public List < SiteNode > getTopNodesInContextFromSiteTree ( ) { } } | List < SiteNode > nodes = new LinkedList < > ( ) ; SiteNode rootNode = session . getSiteTree ( ) . getRoot ( ) ; @ SuppressWarnings ( "unchecked" ) Enumeration < TreeNode > en = rootNode . children ( ) ; while ( en . hasMoreElements ( ) ) { SiteNode sn = ( SiteNode ) en . nextElement ( ) ; if ( isContainsNodesInContext ( sn ) ) { nodes . add ( sn ) ; } } return nodes ; |
public class Crc32 { /** * Feed a bitstring to the crc calculation . */
public void append ( short bits ) { } } | long l ; long [ ] a1 ; l = ( ( l = crc ) >> 8L ) ^ ( a1 = CRC32_TABLE ) [ ( int ) ( ( l & 0xFF ) ^ ( long ) ( bits & 0xFF ) ) ] ; crc = ( l >> 8L ) ^ a1 [ ( int ) ( ( l & 0xFF ) ^ ( long ) ( ( bits & 0xffff ) >> 8 ) ) ] ; |
public class RNAUtils { /** * method to hybridize two PolymerNotations together if they are
* antiparallel
* @ param one
* PolymerNotation first
* @ param two
* PolymerNotation second
* @ return List of ConnectionNotations
* @ throws RNAUtilsException if the polymer is not a RNA / DNA
* @ throws NotationException if notation is not valid
* @ throws HELM2HandledException if it contains helm2 specific features can not be downcasted to HELM1 format
* @ throws ChemistryException
* if the Chemistry Engine can not be initialized
* @ throws NucleotideLoadingException if nucleotides can not be loaded */
public static List < ConnectionNotation > hybridizeAntiparallel ( PolymerNotation one , PolymerNotation two ) throws RNAUtilsException , NotationException , HELM2HandledException , ChemistryException , NucleotideLoadingException { } } | checkRNA ( one ) ; checkRNA ( two ) ; List < ConnectionNotation > connections = new ArrayList < ConnectionNotation > ( ) ; ConnectionNotation connection ; /* Length of the two rnas have to be the same */
if ( areAntiparallel ( one , two ) ) { for ( int i = 0 ; i < PolymerUtils . getTotalMonomerCount ( one ) ; i ++ ) { int backValue = PolymerUtils . getTotalMonomerCount ( one ) - i ; int firstValue = i + 1 ; String details = firstValue + ":pair-" + backValue + ":pair" ; connection = new ConnectionNotation ( one . getPolymerID ( ) , two . getPolymerID ( ) , details ) ; connections . add ( connection ) ; } return connections ; } else { throw new RNAUtilsException ( "The given RNAs are not antiparallel to each other" ) ; } |
public class DefaultTraverserContext { /** * PRIVATE : Used by { @ link Traverser } */
void setChildrenContexts ( Map < String , List < TraverserContext < T > > > children ) { } } | assertTrue ( this . children == null , "children already set" ) ; this . children = children ; |
public class CreateIdentityProviderRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CreateIdentityProviderRequest createIdentityProviderRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( createIdentityProviderRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createIdentityProviderRequest . getUserPoolId ( ) , USERPOOLID_BINDING ) ; protocolMarshaller . marshall ( createIdentityProviderRequest . getProviderName ( ) , PROVIDERNAME_BINDING ) ; protocolMarshaller . marshall ( createIdentityProviderRequest . getProviderType ( ) , PROVIDERTYPE_BINDING ) ; protocolMarshaller . marshall ( createIdentityProviderRequest . getProviderDetails ( ) , PROVIDERDETAILS_BINDING ) ; protocolMarshaller . marshall ( createIdentityProviderRequest . getAttributeMapping ( ) , ATTRIBUTEMAPPING_BINDING ) ; protocolMarshaller . marshall ( createIdentityProviderRequest . getIdpIdentifiers ( ) , IDPIDENTIFIERS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class AdaptiveTableLayout { /** * When used adapter with IMMUTABLE data , returns rows position modifications
* ( old position - > new position )
* @ return row position modification map . Includes only modified row numbers */
@ SuppressWarnings ( "unchecked" ) public Map < Integer , Integer > getLinkedAdapterRowsModifications ( ) { } } | return mAdapter instanceof LinkedAdaptiveTableAdapterImpl ? ( ( LinkedAdaptiveTableAdapterImpl ) mAdapter ) . getRowsModifications ( ) : Collections . < Integer , Integer > emptyMap ( ) ; |
public class MergeResources { /** * Workaround for https : / / issuetracker . google . com / 67418335 */
@ Override @ Input public String getCombinedInput ( ) { } } | return new CombinedInput ( super . getCombinedInput ( ) ) . add ( "dataBindingLayoutInfoOutFolder" , null ) . add ( "publicFile" , getPublicFile ( ) ) . add ( "blameLogFolder" , getBlameLogFolder ( ) ) . add ( "mergedNotCompiledResourcesOutputDirectory" , null ) . toString ( ) ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link MultiPointPropertyType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link MultiPointPropertyType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "multiPosition" ) public JAXBElement < MultiPointPropertyType > createMultiPosition ( MultiPointPropertyType value ) { } } | return new JAXBElement < MultiPointPropertyType > ( _MultiPosition_QNAME , MultiPointPropertyType . class , null , value ) ; |
public class Configuration { /** * Returns the class associated with the given key as a string .
* @ param < T > The type of the class to return .
* @ param key The key pointing to the associated value
* @ param defaultValue The optional default value returned if no entry exists
* @ param classLoader The class loader used to resolve the class .
* @ return The value associated with the given key , or the default value , if to entry for the key exists . */
@ SuppressWarnings ( "unchecked" ) public < T > Class < T > getClass ( String key , Class < ? extends T > defaultValue , ClassLoader classLoader ) throws ClassNotFoundException { } } | Object o = getRawValue ( key ) ; if ( o == null ) { return ( Class < T > ) defaultValue ; } if ( o . getClass ( ) == String . class ) { return ( Class < T > ) Class . forName ( ( String ) o , true , classLoader ) ; } LOG . warn ( "Configuration cannot evaluate value " + o + " as a class name" ) ; return ( Class < T > ) defaultValue ; |
public class EitherLens { /** * Convenience static factory method for creating a lens over left values , wrapping them in a { @ link Maybe } . When
* setting , a { @ link Maybe # nothing ( ) } value means to leave the { @ link Either } unaltered , where as a
* { @ link Maybe # just } value replaces the either with a left over the { @ link Maybe } .
* Note that this lens is NOT lawful , since " you get back what you put in " fails for { @ link Maybe # nothing ( ) } .
* @ param < L > the left parameter type
* @ param < R > the right parameter type
* @ return a lens that focuses on left values */
public static < L , R > Lens . Simple < Either < L , R > , Maybe < L > > left ( ) { } } | return simpleLens ( CoProduct2 :: projectA , ( lOrR , maybeL ) -> maybeL . < Either < L , R > > fmap ( Either :: left ) . orElse ( lOrR ) ) ; |
public class ListDeviceEventsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListDeviceEventsRequest listDeviceEventsRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( listDeviceEventsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listDeviceEventsRequest . getDeviceArn ( ) , DEVICEARN_BINDING ) ; protocolMarshaller . marshall ( listDeviceEventsRequest . getEventType ( ) , EVENTTYPE_BINDING ) ; protocolMarshaller . marshall ( listDeviceEventsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listDeviceEventsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class IfcPropertySetDefinitionImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcTypeObject > getDefinesType ( ) { } } | return ( EList < IfcTypeObject > ) eGet ( Ifc4Package . Literals . IFC_PROPERTY_SET_DEFINITION__DEFINES_TYPE , true ) ; |
public class SpatialRuleLookupArray { /** * This method adds the container if no such rule container exists in this lookup and returns the index otherwise . */
private int addRuleContainer ( SpatialRuleContainer container ) { } } | int newIndex = this . ruleContainers . indexOf ( container ) ; if ( newIndex >= 0 ) return newIndex ; newIndex = ruleContainers . size ( ) ; if ( newIndex >= 255 ) throw new IllegalStateException ( "No more spatial rule container fit into this lookup as 255 combination of ruleContainers reached" ) ; this . ruleContainers . add ( container ) ; return newIndex ; |
public class AStarPathUtil { /** * Return a heuristic estimate of the cost to get from < code > ( ax , ay ) < / code > to
* < code > ( bx , by ) < / code > . */
protected static int getDistanceEstimate ( int ax , int ay , int bx , int by ) { } } | // we ' re doing all of our cost calculations based on geometric distance times ten
int xsq = bx - ax ; int ysq = by - ay ; return ( int ) ( ADJACENT_COST * Math . sqrt ( xsq * xsq + ysq * ysq ) ) ; |
public class JSONArray { /** * Returns the value at { @ code index } if it exists and is a boolean or can be coerced
* to a boolean .
* @ param index the index to get the value from
* @ return the value at { @ code index }
* @ throws JSONException if the value at { @ code index } doesn ' t exist or cannot be
* coerced to a boolean . */
public boolean getBoolean ( int index ) throws JSONException { } } | Object object = get ( index ) ; Boolean result = JSON . toBoolean ( object ) ; if ( result == null ) { throw JSON . typeMismatch ( index , object , "boolean" ) ; } return result ; |
public class StringWalker { /** * Advances this { @ link StringWalker } to the { @ code y } coordinate . < br >
* Sets the character to the
* @ param y the y
* @ return true , if successful */
public boolean walkToY ( int y ) { } } | if ( this . y + lineHeight > y ) return true ; while ( nextLine ( ) ) if ( this . y + lineHeight > y ) return true ; return false ; |
public class ManagedConcurrentValueMap { /** * Returns the value stored for the given key at the point of call .
* @ param key a non null key
* @ return the value stored in the map for the given key */
public V get ( K key ) { } } | ManagedReference < V > ref = internalMap . get ( key ) ; if ( ref != null ) return ref . get ( ) ; return null ; |
public class DescribeClientPropertiesResult { /** * Information about the specified Amazon WorkSpaces clients .
* @ return Information about the specified Amazon WorkSpaces clients . */
public java . util . List < ClientPropertiesResult > getClientPropertiesList ( ) { } } | if ( clientPropertiesList == null ) { clientPropertiesList = new com . amazonaws . internal . SdkInternalList < ClientPropertiesResult > ( ) ; } return clientPropertiesList ; |
public class DeploymentMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Deployment deployment , ProtocolMarshaller protocolMarshaller ) { } } | if ( deployment == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deployment . getCreatedAt ( ) , CREATEDAT_BINDING ) ; protocolMarshaller . marshall ( deployment . getDeploymentArn ( ) , DEPLOYMENTARN_BINDING ) ; protocolMarshaller . marshall ( deployment . getDeploymentId ( ) , DEPLOYMENTID_BINDING ) ; protocolMarshaller . marshall ( deployment . getDeploymentType ( ) , DEPLOYMENTTYPE_BINDING ) ; protocolMarshaller . marshall ( deployment . getGroupArn ( ) , GROUPARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ServletContainer { /** * Registers a servlet .
* @ param servletClass servlet class to be registered . This class must be annotated with { @ linkplain WebServlet } .
* @ return this . */
public final SC registerServlet ( Class < ? extends HttpServlet > servletClass ) { } } | WebServlet webServlet = servletClass . getAnnotation ( WebServlet . class ) ; if ( webServlet == null ) throw new IllegalArgumentException ( String . format ( "Missing annotation '%s' for class '%s'" , WebFilter . class . getName ( ) , servletClass . getName ( ) ) ) ; String [ ] urlPatterns = webServlet . value ( ) ; if ( urlPatterns . length == 0 ) urlPatterns = webServlet . urlPatterns ( ) ; if ( urlPatterns . length == 0 ) throw new IllegalArgumentException ( String . format ( "Missing pattern mapping for '%s'" , servletClass . getName ( ) ) ) ; for ( String urlPattern : urlPatterns ) { registerServlet ( servletClass , urlPattern ) ; } return ( SC ) this ; |
public class CPAttachmentFileEntryLocalServiceUtil { /** * Returns the cp attachment file entry matching the UUID and group .
* @ param uuid the cp attachment file entry ' s UUID
* @ param groupId the primary key of the group
* @ return the matching cp attachment file entry , or < code > null < / code > if a matching cp attachment file entry could not be found */
public static com . liferay . commerce . product . model . CPAttachmentFileEntry fetchCPAttachmentFileEntryByUuidAndGroupId ( String uuid , long groupId ) { } } | return getService ( ) . fetchCPAttachmentFileEntryByUuidAndGroupId ( uuid , groupId ) ; |
public class HtmlUtils { /** * Escape the escapes ( " ) and ( \ \ ) with escapes . These characters will be replaced with
* ( & quot ) and ( \ \ \ \ ) respectively .
* @ param value the string to escape
* @ return the escaped string */
public static String escapeEscapes ( String value ) { } } | assert ( value != null ) ; InternalStringBuilder sb = new InternalStringBuilder ( value . length ( ) ) ; for ( int i = 0 ; i < value . length ( ) ; i ++ ) { char c = value . charAt ( i ) ; if ( c == '"' ) { sb . append ( """ ) ; continue ; } if ( c == '\\' ) { sb . append ( "\\\\" ) ; continue ; } sb . append ( c ) ; } return sb . toString ( ) ; |
public class Time { /** * Generate Time Reports for a Specific Team ( hide financial info )
* @ param company Company ID
* @ param team Team ID
* @ param params Parameters
* @ throwsJSONException If error occurred
* @ return { @ link JSONObject } */
public JSONObject getByTeamLimited ( String company , String team , HashMap < String , String > params ) throws JSONException { } } | return _getByType ( company , team , null , params , true ) ; |
public class AdjustedRangeInputStream { /** * / * ( non - Javadoc )
* @ see java . io . InputStream # read ( byte [ ] , int , int ) */
@ Override public int read ( byte [ ] buffer , int offset , int length ) throws IOException { } } | abortIfNeeded ( ) ; int numBytesRead ; // If no more bytes are available , do not read any bytes into the buffer
if ( this . virtualAvailable <= 0 ) { numBytesRead = - 1 ; } else { // If the desired read length is greater than the number of available bytes ,
// shorten the read length to the number of available bytes .
if ( length > this . virtualAvailable ) { // If the number of available bytes is greater than the maximum value of a 32 bit int , then
// read as many bytes as an int can .
length = ( this . virtualAvailable < Integer . MAX_VALUE ) ? ( int ) this . virtualAvailable : Integer . MAX_VALUE ; } // Read bytes into the buffer .
numBytesRead = this . decryptedContents . read ( buffer , offset , length ) ; } // If we were able to read bytes , decrement the number of bytes available to be read .
if ( numBytesRead != - 1 ) { this . virtualAvailable -= numBytesRead ; } else { // If we ' ve reached the end of the stream , close it
this . virtualAvailable = 0 ; close ( ) ; } return numBytesRead ; |
public class DateKeySerializer { /** * { @ inheritDoc } */
@ Override protected String doSerialize ( Date value , JsonSerializationContext ctx ) { } } | if ( ctx . isWriteDateKeysAsTimestamps ( ) ) { return Long . toString ( value . getTime ( ) ) ; } else { return DateFormat . format ( value ) ; } |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EEnum getIfcGasTerminalTypeEnum ( ) { } } | if ( ifcGasTerminalTypeEnumEEnum == null ) { ifcGasTerminalTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 843 ) ; } return ifcGasTerminalTypeEnumEEnum ; |
public class ResultUtils { /** * Check converted result compatibility with required type .
* @ param result result object
* @ param targetType target type
* @ throws ResultConversionException if result doesn ' t match required type */
public static void check ( final Object result , final Class < ? > targetType ) { } } | if ( result != null && ! targetType . isAssignableFrom ( result . getClass ( ) ) ) { // note : conversion logic may go wrong ( e . g . because converter expect collection input mostly and may
// not work correctly for single element ) , but anyway overall conversion would be considered failed .
throw new ResultConversionException ( String . format ( "Failed to convert %s to %s" , toStringType ( result ) , targetType . getSimpleName ( ) ) ) ; } |
public class Tile { /** * Defines the behavior of the visualization where the needle / bar should
* start from 0 instead of the minValue . This is especially useful when
* working with a gauge that has a range with a negative minValue
* @ param IS _ TRUE */
public void setStartFromZero ( final boolean IS_TRUE ) { } } | if ( null == startFromZero ) { _startFromZero = IS_TRUE ; setValue ( IS_TRUE && getMinValue ( ) < 0 ? 0 : getMinValue ( ) ) ; fireTileEvent ( REDRAW_EVENT ) ; } else { startFromZero . set ( IS_TRUE ) ; } |
public class ListSubscriptionDefinitionsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListSubscriptionDefinitionsRequest listSubscriptionDefinitionsRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( listSubscriptionDefinitionsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listSubscriptionDefinitionsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listSubscriptionDefinitionsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ResourceGroovyMethods { /** * Creates a new BufferedWriter for this file , passes it to the closure , and
* ensures the stream is flushed and closed after the closure returns .
* @ param file a File
* @ param closure a closure
* @ return the value returned by the closure
* @ throws IOException if an IOException occurs .
* @ since 1.5.2 */
public static < T > T withWriter ( File file , @ ClosureParams ( value = SimpleType . class , options = "java.io.BufferedWriter" ) Closure < T > closure ) throws IOException { } } | return IOGroovyMethods . withWriter ( newWriter ( file ) , closure ) ; |
public class AnnotationMethodResolver { /** * Find a < em > single < / em > Method on the Class of the given candidate object that
* contains the annotation type for which this resolver is searching .
* @ param candidate the instance whose Class will be checked for the annotation
* @ return a single matching Method instance or < code > null < / code > if the candidate ' s
* Class contains no Methods with the specified annotation
* @ throws IllegalArgumentException if more than one Method has the specified
* annotation */
public Method findMethod ( Object candidate ) { } } | Assert . notNull ( candidate , "candidate object must not be null" ) ; Class < ? > targetClass = AopUtils . getTargetClass ( candidate ) ; if ( targetClass == null ) { targetClass = candidate . getClass ( ) ; } return this . findMethod ( targetClass ) ; |
public class AnalysisResults { /** * Get the number of performed runs of the given search when solving the given problem .
* @ param problemID ID of the problem
* @ param searchID ID of the applied search
* @ return number of performed runs of the given search when solving the given problem
* @ throws UnknownIDException if an unknown problem or search ID is given */
public int getNumRuns ( String problemID , String searchID ) { } } | if ( ! results . containsKey ( problemID ) ) { throw new UnknownIDException ( "Unknown problem ID " + problemID + "." ) ; } if ( ! results . get ( problemID ) . containsKey ( searchID ) ) { throw new UnknownIDException ( "Unknown search ID " + searchID + " for problem " + problemID + "." ) ; } return results . get ( problemID ) . get ( searchID ) . size ( ) ; |
public class McGregor { /** * Start McGregor search and extend the mappings if possible .
* @ param largestMappingSize
* @ param presentMapping
* @ throws IOException */
public void startMcGregorIteration ( int largestMappingSize , Map < Integer , Integer > presentMapping ) throws IOException { } } | this . globalMCSSize = ( largestMappingSize / 2 ) ; List < String > cTab1Copy = McGregorChecks . generateCTabCopy ( source ) ; List < String > cTab2Copy = McGregorChecks . generateCTabCopy ( target ) ; // find mapped atoms of both molecules and store these in mappedAtoms
List < Integer > mappedAtoms = new ArrayList < Integer > ( ) ; // System . out . println ( " \ nMapped Atoms " ) ;
for ( Map . Entry < Integer , Integer > map : presentMapping . entrySet ( ) ) { // System . out . println ( " i : " + map . getKey ( ) + " j : " + map . getValue ( ) ) ;
mappedAtoms . add ( map . getKey ( ) ) ; mappedAtoms . add ( map . getValue ( ) ) ; } int mappingSize = presentMapping . size ( ) ; List < Integer > iBondNeighborsA = new ArrayList < Integer > ( ) ; List < String > cBondNeighborsA = new ArrayList < String > ( ) ; List < Integer > iBondSetA = new ArrayList < Integer > ( ) ; List < String > cBondSetA = new ArrayList < String > ( ) ; List < Integer > iBondNeighborsB = new ArrayList < Integer > ( ) ; List < Integer > iBondSetB = new ArrayList < Integer > ( ) ; List < String > cBondNeighborsB = new ArrayList < String > ( ) ; List < String > cBondSetB = new ArrayList < String > ( ) ; // find unmapped atoms of molecule A
List < Integer > unmappedAtomsMolA = McGregorChecks . markUnMappedAtoms ( true , source , presentMapping ) ; int counter = 0 ; int gSetBondNumA = 0 ; int gSetBondNumB = 0 ; int gNeighborBondnumA = 0 ; // number of remaining molecule A bonds after the clique search , which are neighbors of the MCS _ 1
int gNeighborBondNumB = 0 ; // number of remaining molecule B bonds after the clique search , which are neighbors of the MCS _ 1
QueryProcessor queryProcess = new QueryProcessor ( cTab1Copy , cTab2Copy , SIGNS , gNeighborBondnumA , gSetBondNumA , iBondNeighborsA , cBondNeighborsA , mappingSize , iBondSetA , cBondSetA ) ; if ( ! ( source instanceof IQueryAtomContainer ) ) { queryProcess . process ( source , target , unmappedAtomsMolA , mappedAtoms , counter ) ; } else { queryProcess . process ( ( IQueryAtomContainer ) source , target , unmappedAtomsMolA , mappedAtoms , counter ) ; } cTab1Copy = queryProcess . getCTab1 ( ) ; cTab2Copy = queryProcess . getCTab2 ( ) ; gSetBondNumA = queryProcess . getBondNumA ( ) ; gNeighborBondnumA = queryProcess . getNeighborBondNumA ( ) ; iBondNeighborsA = queryProcess . getIBondNeighboursA ( ) ; cBondNeighborsA = queryProcess . getCBondNeighborsA ( ) ; // find unmapped atoms of molecule B
List < Integer > unmappedAtomsMolB = McGregorChecks . markUnMappedAtoms ( false , target , presentMapping ) ; // System . out . println ( " unmappedAtomsMolB : " + unmappedAtomsMolB . size ( ) ) ;
// Extract bonds which are related with unmapped atoms of molecule B .
// In case that unmapped atoms are connected with already mapped atoms , the mapped atoms are labelled with
// new special signs - > the result are two vectors : cBondNeighborsA and int _ bonds _ molB , which contain those
// bonds of molecule B , which are relevant for the McGregorBondTypeInSensitive algorithm .
// The special signs must be transfered to the corresponding atoms of molecule A
TargetProcessor targetProcess = new TargetProcessor ( cTab1Copy , cTab2Copy , SIGNS , gNeighborBondNumB , gSetBondNumB , iBondNeighborsB , cBondNeighborsB , gNeighborBondnumA , iBondNeighborsA , cBondNeighborsA ) ; targetProcess . process ( target , unmappedAtomsMolB , mappingSize , iBondSetB , cBondSetB , mappedAtoms , counter ) ; cTab1Copy = targetProcess . getCTab1 ( ) ; cTab2Copy = targetProcess . getCTab2 ( ) ; gSetBondNumB = targetProcess . getBondNumB ( ) ; gNeighborBondNumB = targetProcess . getNeighborBondNumB ( ) ; iBondNeighborsB = targetProcess . getIBondNeighboursB ( ) ; cBondNeighborsB = targetProcess . getCBondNeighborsB ( ) ; boolean dummy = false ; McgregorHelper mcGregorHelper = new McgregorHelper ( dummy , presentMapping . size ( ) , mappedAtoms , gNeighborBondnumA , gNeighborBondNumB , iBondNeighborsA , iBondNeighborsB , cBondNeighborsA , cBondNeighborsB , gSetBondNumA , gSetBondNumB , iBondSetA , iBondSetB , cBondSetA , cBondSetB ) ; iterator ( mcGregorHelper ) ; |
public class Drawable { /** * Load an animated sprite , giving horizontal and vertical frames ( sharing the same surface ) . It may be useful in
* case of multiple animated sprites .
* { @ link SpriteAnimated # load ( ) } must not be called as surface has already been loaded .
* @ param surface The surface reference ( must not be < code > null < / code > ) .
* @ param horizontalFrames The number of horizontal frames ( must be strictly positive ) .
* @ param verticalFrames The number of vertical frames ( must be strictly positive ) .
* @ return The loaded animated sprite .
* @ throws LionEngineException If arguments are invalid . */
public static SpriteAnimated loadSpriteAnimated ( ImageBuffer surface , int horizontalFrames , int verticalFrames ) { } } | return new SpriteAnimatedImpl ( surface , horizontalFrames , verticalFrames ) ; |
public class LogTable { /** * Log this transaction .
* @ param strTrxType The transaction type . */
public void logTrx ( FieldList record , String strTrxType ) { } } | BaseBuffer buffer = this . getBuffer ( ) ; buffer . clearBuffer ( ) ; buffer . addHeader ( strTrxType ) ; buffer . addHeader ( record . getTableNames ( false ) ) ; buffer . addHeader ( record . getCounterField ( ) . toString ( ) ) ; if ( ProxyConstants . REMOVE != strTrxType ) buffer . fieldsToBuffer ( record ) ; Object objLogData = buffer . getPhysicalData ( ) ; this . logTrx ( objLogData ) ; |
public class GetWorkflowApprovalRequests { /** * Runs the example .
* @ param adManagerServices the services factory .
* @ param session the session .
* @ throws ApiException if the API request failed with one or more service errors .
* @ throws RemoteException if the API request failed due to other errors . */
public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session ) throws RemoteException { } } | WorkflowRequestServiceInterface workflowRequestService = adManagerServices . get ( session , WorkflowRequestServiceInterface . class ) ; // Create a statement to select workflow requests .
StatementBuilder statementBuilder = new StatementBuilder ( ) . where ( "type = :type" ) . orderBy ( "id ASC" ) . limit ( StatementBuilder . SUGGESTED_PAGE_LIMIT ) . withBindVariableValue ( "type" , WorkflowRequestType . WORKFLOW_APPROVAL_REQUEST . toString ( ) ) ; // Retrieve a small amount of workflow requests at a time , paging through
// until all workflow requests have been retrieved .
int totalResultSetSize = 0 ; do { WorkflowRequestPage page = workflowRequestService . getWorkflowRequestsByStatement ( statementBuilder . toStatement ( ) ) ; if ( page . getResults ( ) != null ) { // Print out some information for each workflow request .
totalResultSetSize = page . getTotalResultSetSize ( ) ; int i = page . getStartIndex ( ) ; for ( WorkflowRequest workflowRequest : page . getResults ( ) ) { System . out . printf ( "%d) Workflow request with ID %d, entity type '%s', and entity ID %d was found.%n" , i ++ , workflowRequest . getId ( ) , workflowRequest . getEntityType ( ) , workflowRequest . getEntityId ( ) ) ; } } statementBuilder . increaseOffsetBy ( StatementBuilder . SUGGESTED_PAGE_LIMIT ) ; } while ( statementBuilder . getOffset ( ) < totalResultSetSize ) ; System . out . printf ( "Number of results found: %d%n" , totalResultSetSize ) ; |
public class TypeSimplifier { /** * Finds the top - level types for all the declared types ( classes and interfaces ) in the given
* { @ code Set < TypeMirror > } .
* < p > The returned set contains only top - level types . If we reference { @ code java . util . Map . Entry }
* then the returned set will contain { @ code java . util . Map } . This is because we want to write
* { @ code Map . Entry } everywhere rather than { @ code Entry } . */
private static Set < TypeMirror > topLevelTypes ( Types typeUtil , Set < TypeMirror > types ) { } } | return types . stream ( ) . map ( typeMirror -> MoreElements . asType ( typeUtil . asElement ( typeMirror ) ) ) . map ( typeElement -> topLevelType ( typeElement ) . asType ( ) ) . collect ( toCollection ( TypeMirrorSet :: new ) ) ; |
public class GenericBoJdbcDao { /** * Fetch an existing BO from storage by id .
* @ param conn
* @ param id
* @ return */
protected T get ( Connection conn , BoId id ) { } } | if ( id == null || id . values == null || id . values . length == 0 ) { return null ; } final String cacheKey = cacheKey ( id ) ; T bo = getFromCache ( getCacheName ( ) , cacheKey , typeClass ) ; if ( bo == null ) { bo = executeSelectOne ( rowMapper , conn , calcSqlSelectOne ( id ) , id . values ) ; putToCache ( getCacheName ( ) , cacheKey , bo ) ; } return bo ; |
public class HttpServer { /** * Configure the
* { @ link ServerCookieEncoder } ; { @ link ServerCookieDecoder } will be
* chosen based on the encoder
* @ param encoder the preferred ServerCookieEncoder
* @ return a new { @ link HttpServer } */
public final HttpServer cookieCodec ( ServerCookieEncoder encoder ) { } } | ServerCookieDecoder decoder = encoder == ServerCookieEncoder . LAX ? ServerCookieDecoder . LAX : ServerCookieDecoder . STRICT ; return tcpConfiguration ( tcp -> tcp . bootstrap ( b -> HttpServerConfiguration . cookieCodec ( b , encoder , decoder ) ) ) ; |
public class ExpressionTagQueryParser { /** * Grammar analysis */
@ Override public void enterObject ( TagQueryParser . ObjectContext ctx ) { } } | Stack < String > objectStack = new Stack < > ( ) ; stack . push ( objectStack ) ; String eval ; if ( ctx . getParent ( ) . getParent ( ) == null && ctx . tagexp ( ) != null ) { eval = getEval ( ctx . tagexp ( ) ) ; evalsPostfix . add ( eval ) ; } if ( ctx . logical_operator ( ) != null ) { TagexpContext left = ctx . object ( 0 ) . tagexp ( ) ; TagexpContext right = ctx . object ( 1 ) . tagexp ( ) ; if ( left != null && right != null ) { String lEval = getEval ( left ) ; String rEval = getEval ( right ) ; if ( isNot ( lEval ) && ! isNot ( rEval ) ) { evalsPostfix . add ( rEval ) ; evalsPostfix . add ( lEval ) ; } else { evalsPostfix . add ( lEval ) ; evalsPostfix . add ( rEval ) ; } } else if ( left != null && right == null ) { String lEval = getEval ( left ) ; if ( ! isNot ( lEval ) ) { evalsPostfix . add ( lEval ) ; } else { objectStack . push ( lEval ) ; } } else if ( left == null && right != null ) { String rEval = getEval ( right ) ; evalsPostfix . add ( rEval ) ; } } |
public class DeflatingStreamSinkConduit { /** * The we are in the flushing state then we flush to the underlying stream , otherwise just return true
* @ return false if there is still more to flush */
private boolean performFlushIfRequired ( ) throws IOException { } } | if ( anyAreSet ( state , FLUSHING_BUFFER ) ) { final ByteBuffer [ ] bufs = new ByteBuffer [ additionalBuffer == null ? 1 : 2 ] ; long totalLength = 0 ; bufs [ 0 ] = currentBuffer . getBuffer ( ) ; totalLength += bufs [ 0 ] . remaining ( ) ; if ( additionalBuffer != null ) { bufs [ 1 ] = additionalBuffer ; totalLength += bufs [ 1 ] . remaining ( ) ; } if ( totalLength > 0 ) { long total = 0 ; long res = 0 ; do { res = next . write ( bufs , 0 , bufs . length ) ; total += res ; if ( res == 0 ) { return false ; } } while ( total < totalLength ) ; } additionalBuffer = null ; currentBuffer . getBuffer ( ) . clear ( ) ; state = state & ~ FLUSHING_BUFFER ; } return true ; |
public class MkCoPTree { /** * Adjusts the knn distance in the subtree of the specified root entry .
* @ param entry the root entry of the current subtree
* @ param knnLists a map of knn lists for each leaf entry */
private void adjustApproximatedKNNDistances ( MkCoPEntry entry , Map < DBID , KNNList > knnLists ) { } } | MkCoPTreeNode < O > node = getNode ( entry ) ; if ( node . isLeaf ( ) ) { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { MkCoPLeafEntry leafEntry = ( MkCoPLeafEntry ) node . getEntry ( i ) ; approximateKnnDistances ( leafEntry , knnLists . get ( leafEntry . getRoutingObjectID ( ) ) ) ; } } else { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { MkCoPEntry dirEntry = node . getEntry ( i ) ; adjustApproximatedKNNDistances ( dirEntry , knnLists ) ; } } ApproximationLine approx = node . conservativeKnnDistanceApproximation ( settings . kmax ) ; entry . setConservativeKnnDistanceApproximation ( approx ) ; |
public class Resolve { /** * Select the best method for a call site among two choices .
* @ param env The current environment .
* @ param site The original type from where the
* selection takes place .
* @ param argtypes The invocation ' s value arguments ,
* @ param typeargtypes The invocation ' s type arguments ,
* @ param sym Proposed new best match .
* @ param bestSoFar Previously found best match .
* @ param allowBoxing Allow boxing conversions of arguments .
* @ param useVarargs Box trailing arguments into an array for varargs . */
@ SuppressWarnings ( "fallthrough" ) Symbol selectBest ( Env < AttrContext > env , Type site , List < Type > argtypes , List < Type > typeargtypes , Symbol sym , Symbol bestSoFar , boolean allowBoxing , boolean useVarargs , boolean operator ) { } } | if ( sym . kind == ERR || ! sym . isInheritedIn ( site . tsym , types ) ) { return bestSoFar ; } else if ( useVarargs && ( sym . flags ( ) & VARARGS ) == 0 ) { return bestSoFar . kind >= ERRONEOUS ? new BadVarargsMethod ( ( ResolveError ) bestSoFar . baseSymbol ( ) ) : bestSoFar ; } Assert . check ( sym . kind < AMBIGUOUS ) ; try { Type mt = rawInstantiate ( env , site , sym , null , argtypes , typeargtypes , allowBoxing , useVarargs , types . noWarnings ) ; if ( ! operator || verboseResolutionMode . contains ( VerboseResolutionMode . PREDEF ) ) currentResolutionContext . addApplicableCandidate ( sym , mt ) ; } catch ( InapplicableMethodException ex ) { if ( ! operator ) currentResolutionContext . addInapplicableCandidate ( sym , ex . getDiagnostic ( ) ) ; switch ( bestSoFar . kind ) { case ABSENT_MTH : return new InapplicableSymbolError ( currentResolutionContext ) ; case WRONG_MTH : if ( operator ) return bestSoFar ; bestSoFar = new InapplicableSymbolsError ( currentResolutionContext ) ; default : return bestSoFar ; } } if ( ! isAccessible ( env , site , sym ) ) { return ( bestSoFar . kind == ABSENT_MTH ) ? new AccessError ( env , site , sym ) : bestSoFar ; } return ( bestSoFar . kind > AMBIGUOUS ) ? sym : mostSpecific ( argtypes , sym , bestSoFar , env , site , allowBoxing && operator , useVarargs ) ; |
public class SolarisVirtualMachine { /** * The door is attached to . java _ pid < pid > in the temporary directory . */
private int openDoor ( int pid ) throws IOException { } } | String path = tmpdir + "/.java_pid" + pid ; ; fd = open ( path ) ; // Check that the file owner / permission to avoid attaching to
// bogus process
try { checkPermissions ( path ) ; } catch ( IOException ioe ) { close ( fd ) ; throw ioe ; } return fd ; |
public class SnsAPI { /** * 刷新access _ token ( 第三方平台开发 )
* @ param appid appid
* @ param refresh _ token refresh _ token
* @ param component _ appid 服务开发商的appid
* @ param component _ access _ token 服务开发方的access _ token
* @ return SnsToken */
public static SnsToken oauth2ComponentRefreshToken ( String appid , String refresh_token , String component_appid , String component_access_token ) { } } | HttpUriRequest httpUriRequest = RequestBuilder . post ( ) . setUri ( BASE_URI + "/sns/oauth2/component/refresh_token" ) . addParameter ( "appid" , appid ) . addParameter ( "refresh_token" , refresh_token ) . addParameter ( "grant_type" , "refresh_token" ) . addParameter ( "component_appid" , component_appid ) . addParameter ( "component_access_token" , component_access_token ) . build ( ) ; return LocalHttpClient . executeJsonResult ( httpUriRequest , SnsToken . class ) ; |
public class UserRoleDAO { /** * Note : Use Sparingly . Cassandra ' s forced key structure means this will perform fairly poorly
* @ param trans
* @ param role
* @ return
* @ throws DAOException */
public Result < List < Data > > readByRole ( AuthzTrans trans , String role ) { } } | return psByRole . read ( trans , R_TEXT + " by Role " + role , new Object [ ] { role } ) ; |
public class Organizer { public static < K , V > Map < K , V > toMap ( Mappable < K , V > [ ] aMappables ) { } } | if ( aMappables == null ) throw new IllegalArgumentException ( "aMappables required in Organizer" ) ; Map < K , V > map = new HashMap < K , V > ( aMappables . length ) ; Mappable < K , V > mappable = null ; for ( int i = 0 ; i < aMappables . length ; i ++ ) { mappable = aMappables [ i ] ; map . put ( ( K ) mappable . getKey ( ) , ( V ) mappable . getValue ( ) ) ; } return map ; |
public class AddressDivisionGroupingBase { /** * gets the count of addresses that this address division grouping may represent
* If this address division grouping is not a subnet block of multiple addresses or has no range of values , then there is only one such address .
* @ return */
@ Override public BigInteger getCount ( ) { } } | BigInteger cached = cachedCount ; if ( cached == null ) { cachedCount = cached = getCountImpl ( ) ; } return cached ; |
public class AbstractMessageHandler { /** * Get the active operation .
* @ param id the active operation id
* @ return the active operation , { @ code null } if if there is no registered operation */
protected < T , A > ActiveOperation < T , A > getActiveOperation ( final Integer id ) { } } | // noinspection unchecked
return ( ActiveOperation < T , A > ) activeRequests . get ( id ) ; |
public class MethodCompiler { /** * Returns the name of local variable at index
* @ param index
* @ return */
public String getLocalName ( int index ) { } } | VariableElement lv = getLocalVariable ( index ) ; return lv . getSimpleName ( ) . toString ( ) ; |
public class CacheOnDisk { /** * return the sleep time in msec */
protected long calculateSleepTime ( ) { } } | Calendar c = new GregorianCalendar ( ) ; int currentHour = c . get ( Calendar . HOUR_OF_DAY ) ; int currentMin = c . get ( Calendar . MINUTE ) ; int currentSec = c . get ( Calendar . SECOND ) ; long stime = SECONDS_FOR_24_HOURS - ( ( currentHour * 60 + currentMin ) * 60 + currentSec ) + cleanupHour * 60 * 60 ; if ( stime > SECONDS_FOR_24_HOURS ) { stime = stime - SECONDS_FOR_24_HOURS ; } if ( stime < 10 ) { stime = 10 ; } stime = stime * 1000 ; // convert to msec
return stime ; |
public class DescribeDeploymentJobResult { /** * A list of robot deployment summaries .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setRobotDeploymentSummary ( java . util . Collection ) } or
* { @ link # withRobotDeploymentSummary ( java . util . Collection ) } if you want to override the existing values .
* @ param robotDeploymentSummary
* A list of robot deployment summaries .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeDeploymentJobResult withRobotDeploymentSummary ( RobotDeployment ... robotDeploymentSummary ) { } } | if ( this . robotDeploymentSummary == null ) { setRobotDeploymentSummary ( new java . util . ArrayList < RobotDeployment > ( robotDeploymentSummary . length ) ) ; } for ( RobotDeployment ele : robotDeploymentSummary ) { this . robotDeploymentSummary . add ( ele ) ; } return this ; |
public class CmsADEConfigData { /** * Gets the formatters from the schema . < p >
* @ param cms the current CMS context
* @ param res the resource for which the formatters should be retrieved
* @ return the formatters from the schema */
protected CmsFormatterConfiguration getFormattersFromSchema ( CmsObject cms , CmsResource res ) { } } | try { return OpenCms . getResourceManager ( ) . getResourceType ( res . getTypeId ( ) ) . getFormattersForResource ( cms , res ) ; } catch ( CmsException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; return CmsFormatterConfiguration . EMPTY_CONFIGURATION ; } |
public class Timestamp { /** * Returns a Timestamp , precise to the second , with a given local offset .
* This is equivalent to the corresponding Ion value
* { @ code YYYY - MM - DDThh : mm : ss + - oo : oo } , where { @ code oo : oo } represents the
* hour and minutes of the local offset from UTC .
* @ param offset
* the local offset from UTC , measured in minutes ;
* may be { @ code null } to represent an unknown local offset */
public static Timestamp forSecond ( int year , int month , int day , int hour , int minute , int second , Integer offset ) { } } | return new Timestamp ( year , month , day , hour , minute , second , offset ) ; |
public class PdfSignatureAppearance { /** * Gets the main appearance layer .
* Consult < A HREF = " http : / / partners . adobe . com / asn / developer / pdfs / tn / PPKAppearances . pdf " > PPKAppearances . pdf < / A >
* for further details .
* @ return the main appearance layer
* @ throws DocumentException on error */
public PdfTemplate getAppearance ( ) throws DocumentException { } } | if ( isInvisible ( ) ) { PdfTemplate t = new PdfTemplate ( writer ) ; t . setBoundingBox ( new Rectangle ( 0 , 0 ) ) ; writer . addDirectTemplateSimple ( t , null ) ; return t ; } if ( app [ 0 ] == null ) { PdfTemplate t = app [ 0 ] = new PdfTemplate ( writer ) ; t . setBoundingBox ( new Rectangle ( 100 , 100 ) ) ; writer . addDirectTemplateSimple ( t , new PdfName ( "n0" ) ) ; t . setLiteral ( "% DSBlank\n" ) ; } if ( app [ 1 ] == null && ! acro6Layers ) { PdfTemplate t = app [ 1 ] = new PdfTemplate ( writer ) ; t . setBoundingBox ( new Rectangle ( 100 , 100 ) ) ; writer . addDirectTemplateSimple ( t , new PdfName ( "n1" ) ) ; t . setLiteral ( questionMark ) ; } if ( app [ 2 ] == null ) { String text ; if ( layer2Text == null ) { StringBuffer buf = new StringBuffer ( ) ; buf . append ( "Digitally signed by " ) . append ( PdfPKCS7 . getSubjectFields ( ( X509Certificate ) certChain [ 0 ] ) . getField ( "CN" ) ) . append ( '\n' ) ; SimpleDateFormat sd = new SimpleDateFormat ( "yyyy.MM.dd HH:mm:ss z" ) ; buf . append ( "Date: " ) . append ( sd . format ( signDate . getTime ( ) ) ) ; if ( reason != null ) buf . append ( '\n' ) . append ( "Reason: " ) . append ( reason ) ; if ( location != null ) buf . append ( '\n' ) . append ( "Location: " ) . append ( location ) ; text = buf . toString ( ) ; } else text = layer2Text ; PdfTemplate t = app [ 2 ] = new PdfTemplate ( writer ) ; t . setBoundingBox ( rect ) ; writer . addDirectTemplateSimple ( t , new PdfName ( "n2" ) ) ; if ( image != null ) { if ( imageScale == 0 ) { t . addImage ( image , rect . getWidth ( ) , 0 , 0 , rect . getHeight ( ) , 0 , 0 ) ; } else { float usableScale = imageScale ; if ( imageScale < 0 ) usableScale = Math . min ( rect . getWidth ( ) / image . getWidth ( ) , rect . getHeight ( ) / image . getHeight ( ) ) ; float w = image . getWidth ( ) * usableScale ; float h = image . getHeight ( ) * usableScale ; float x = ( rect . getWidth ( ) - w ) / 2 ; float y = ( rect . getHeight ( ) - h ) / 2 ; t . addImage ( image , w , 0 , 0 , h , x , y ) ; } } Font font ; if ( layer2Font == null ) font = new Font ( ) ; else font = new Font ( layer2Font ) ; float size = font . getSize ( ) ; Rectangle dataRect = null ; Rectangle signatureRect = null ; if ( render == SignatureRenderNameAndDescription || ( render == SignatureRenderGraphicAndDescription && this . signatureGraphic != null ) ) { // origin is the bottom - left
signatureRect = new Rectangle ( MARGIN , MARGIN , rect . getWidth ( ) / 2 - MARGIN , rect . getHeight ( ) - MARGIN ) ; dataRect = new Rectangle ( rect . getWidth ( ) / 2 + MARGIN / 2 , MARGIN , rect . getWidth ( ) - MARGIN / 2 , rect . getHeight ( ) - MARGIN ) ; if ( rect . getHeight ( ) > rect . getWidth ( ) ) { signatureRect = new Rectangle ( MARGIN , rect . getHeight ( ) / 2 , rect . getWidth ( ) - MARGIN , rect . getHeight ( ) ) ; dataRect = new Rectangle ( MARGIN , MARGIN , rect . getWidth ( ) - MARGIN , rect . getHeight ( ) / 2 - MARGIN ) ; } } else { dataRect = new Rectangle ( MARGIN , MARGIN , rect . getWidth ( ) - MARGIN , rect . getHeight ( ) * ( 1 - TOP_SECTION ) - MARGIN ) ; } if ( render == SignatureRenderNameAndDescription ) { String signedBy = PdfPKCS7 . getSubjectFields ( ( X509Certificate ) certChain [ 0 ] ) . getField ( "CN" ) ; Rectangle sr2 = new Rectangle ( signatureRect . getWidth ( ) - MARGIN , signatureRect . getHeight ( ) - MARGIN ) ; float signedSize = fitText ( font , signedBy , sr2 , - 1 , runDirection ) ; ColumnText ct2 = new ColumnText ( t ) ; ct2 . setRunDirection ( runDirection ) ; ct2 . setSimpleColumn ( new Phrase ( signedBy , font ) , signatureRect . getLeft ( ) , signatureRect . getBottom ( ) , signatureRect . getRight ( ) , signatureRect . getTop ( ) , signedSize , Element . ALIGN_LEFT ) ; ct2 . go ( ) ; } else if ( render == SignatureRenderGraphicAndDescription ) { ColumnText ct2 = new ColumnText ( t ) ; ct2 . setRunDirection ( runDirection ) ; ct2 . setSimpleColumn ( signatureRect . getLeft ( ) , signatureRect . getBottom ( ) , signatureRect . getRight ( ) , signatureRect . getTop ( ) , 0 , Element . ALIGN_RIGHT ) ; Image im = Image . getInstance ( signatureGraphic ) ; im . scaleToFit ( signatureRect . getWidth ( ) , signatureRect . getHeight ( ) ) ; Paragraph p = new Paragraph ( ) ; // must calculate the point to draw from to make image appear in middle of column
float x = 0 ; // experimentation found this magic number to counteract Adobe ' s signature graphic , which
// offsets the y co - ordinate by 15 units
float y = - im . getScaledHeight ( ) + 15 ; x = x + ( signatureRect . getWidth ( ) - im . getScaledWidth ( ) ) / 2 ; y = y - ( signatureRect . getHeight ( ) - im . getScaledHeight ( ) ) / 2 ; p . add ( new Chunk ( im , x + ( signatureRect . getWidth ( ) - im . getScaledWidth ( ) ) / 2 , y , false ) ) ; ct2 . addElement ( p ) ; ct2 . go ( ) ; } if ( size <= 0 ) { Rectangle sr = new Rectangle ( dataRect . getWidth ( ) , dataRect . getHeight ( ) ) ; size = fitText ( font , text , sr , 12 , runDirection ) ; } ColumnText ct = new ColumnText ( t ) ; ct . setRunDirection ( runDirection ) ; ct . setSimpleColumn ( new Phrase ( text , font ) , dataRect . getLeft ( ) , dataRect . getBottom ( ) , dataRect . getRight ( ) , dataRect . getTop ( ) , size , Element . ALIGN_LEFT ) ; ct . go ( ) ; } if ( app [ 3 ] == null && ! acro6Layers ) { PdfTemplate t = app [ 3 ] = new PdfTemplate ( writer ) ; t . setBoundingBox ( new Rectangle ( 100 , 100 ) ) ; writer . addDirectTemplateSimple ( t , new PdfName ( "n3" ) ) ; t . setLiteral ( "% DSBlank\n" ) ; } if ( app [ 4 ] == null && ! acro6Layers ) { PdfTemplate t = app [ 4 ] = new PdfTemplate ( writer ) ; t . setBoundingBox ( new Rectangle ( 0 , rect . getHeight ( ) * ( 1 - TOP_SECTION ) , rect . getRight ( ) , rect . getTop ( ) ) ) ; writer . addDirectTemplateSimple ( t , new PdfName ( "n4" ) ) ; Font font ; if ( layer2Font == null ) font = new Font ( ) ; else font = new Font ( layer2Font ) ; float size = font . getSize ( ) ; String text = "Signature Not Verified" ; if ( layer4Text != null ) text = layer4Text ; Rectangle sr = new Rectangle ( rect . getWidth ( ) - 2 * MARGIN , rect . getHeight ( ) * TOP_SECTION - 2 * MARGIN ) ; size = fitText ( font , text , sr , 15 , runDirection ) ; ColumnText ct = new ColumnText ( t ) ; ct . setRunDirection ( runDirection ) ; ct . setSimpleColumn ( new Phrase ( text , font ) , MARGIN , 0 , rect . getWidth ( ) - MARGIN , rect . getHeight ( ) - MARGIN , size , Element . ALIGN_LEFT ) ; ct . go ( ) ; } int rotation = writer . reader . getPageRotation ( page ) ; Rectangle rotated = new Rectangle ( rect ) ; int n = rotation ; while ( n > 0 ) { rotated = rotated . rotate ( ) ; n -= 90 ; } if ( frm == null ) { frm = new PdfTemplate ( writer ) ; frm . setBoundingBox ( rotated ) ; writer . addDirectTemplateSimple ( frm , new PdfName ( "FRM" ) ) ; float scale = Math . min ( rect . getWidth ( ) , rect . getHeight ( ) ) * 0.9f ; float x = ( rect . getWidth ( ) - scale ) / 2 ; float y = ( rect . getHeight ( ) - scale ) / 2 ; scale /= 100 ; if ( rotation == 90 ) frm . concatCTM ( 0 , 1 , - 1 , 0 , rect . getHeight ( ) , 0 ) ; else if ( rotation == 180 ) frm . concatCTM ( - 1 , 0 , 0 , - 1 , rect . getWidth ( ) , rect . getHeight ( ) ) ; else if ( rotation == 270 ) frm . concatCTM ( 0 , - 1 , 1 , 0 , 0 , rect . getWidth ( ) ) ; frm . addTemplate ( app [ 0 ] , 0 , 0 ) ; if ( ! acro6Layers ) frm . addTemplate ( app [ 1 ] , scale , 0 , 0 , scale , x , y ) ; frm . addTemplate ( app [ 2 ] , 0 , 0 ) ; if ( ! acro6Layers ) { frm . addTemplate ( app [ 3 ] , scale , 0 , 0 , scale , x , y ) ; frm . addTemplate ( app [ 4 ] , 0 , 0 ) ; } } PdfTemplate napp = new PdfTemplate ( writer ) ; napp . setBoundingBox ( rotated ) ; writer . addDirectTemplateSimple ( napp , null ) ; napp . addTemplate ( frm , 0 , 0 ) ; return napp ; |
public class CreateApplicationBundleMojo { /** * Writes an Info . plist file describing this bundle .
* @ param infoPlist The file to write Info . plist contents to
* @ param files A list of file names of the jar files to add in $ JAVAROOT
* @ throws MojoExecutionException */
private void writeInfoPlist ( File infoPlist , List files ) throws MojoExecutionException { } } | VelocityContext velocityContext = new VelocityContext ( ) ; velocityContext . put ( "mainClass" , mainClass ) ; velocityContext . put ( "cfBundleExecutable" , javaApplicationStub . getName ( ) ) ; velocityContext . put ( "vmOptions" , vmOptions ) ; velocityContext . put ( "bundleName" , bundleName ) ; velocityContext . put ( "iconFile" , iconFile == null ? "GenericJavaApp.icns" : iconFile . getName ( ) ) ; velocityContext . put ( "version" , version ) ; velocityContext . put ( "jvmVersion" , jvmVersion ) ; StringBuffer jarFilesBuffer = new StringBuffer ( ) ; jarFilesBuffer . append ( "<array>" ) ; for ( int i = 0 ; i < files . size ( ) ; i ++ ) { String name = ( String ) files . get ( i ) ; jarFilesBuffer . append ( "<string>" ) ; jarFilesBuffer . append ( "$JAVAROOT/" ) . append ( name ) ; jarFilesBuffer . append ( "</string>" ) ; } if ( additionalClasspath != null ) { for ( int i = 0 ; i < additionalClasspath . size ( ) ; i ++ ) { String pathElement = ( String ) additionalClasspath . get ( i ) ; jarFilesBuffer . append ( "<string>" ) ; jarFilesBuffer . append ( pathElement ) ; jarFilesBuffer . append ( "</string>" ) ; } } jarFilesBuffer . append ( "</array>" ) ; velocityContext . put ( "classpath" , jarFilesBuffer . toString ( ) ) ; try { String encoding = detectEncoding ( dictionaryFile , velocityContext ) ; getLog ( ) . debug ( "Detected encoding " + encoding + " for dictionary file " + dictionaryFile ) ; Writer writer = new OutputStreamWriter ( new FileOutputStream ( infoPlist ) , encoding ) ; velocity . getEngine ( ) . mergeTemplate ( dictionaryFile , encoding , velocityContext , writer ) ; writer . close ( ) ; } catch ( IOException e ) { throw new MojoExecutionException ( "Could not write Info.plist to file " + infoPlist , e ) ; } catch ( ParseErrorException e ) { throw new MojoExecutionException ( "Error parsing " + dictionaryFile , e ) ; } catch ( ResourceNotFoundException e ) { throw new MojoExecutionException ( "Could not find resource for template " + dictionaryFile , e ) ; } catch ( MethodInvocationException e ) { throw new MojoExecutionException ( "MethodInvocationException occured merging Info.plist template " + dictionaryFile , e ) ; } catch ( Exception e ) { throw new MojoExecutionException ( "Exception occured merging Info.plist template " + dictionaryFile , e ) ; } |
public class FbBot { /** * Checks if there ' s any registered { @ link FbBotMillEvent } for the incoming
* callback . If there ' s any , then the callback is handled . The chain will be
* processed according to the { @ link BotMillPolicy } followed by this bot . If
* the policy is { @ link BotMillPolicy # FIRST _ ONLY } , the chain will be
* processed until the first callback matches . Otherwise , if the policy is
* { @ link BotMillPolicy # PROCESS _ ALL } , all the chain will always be
* processed .
* @ param envelope
* the incoming message . */
public void processMessage ( MessageEnvelope envelope ) { } } | for ( ActionFrame f : this . actionFrameList ) { // If the policy is FIRST _ ONLY stop processing the chain at the
// first trigger .
this . envelope = new MessageEnvelope ( ) ; if ( f . getReplies ( ) != null && f . getReplies ( ) . size ( ) > 0 ) { if ( f . processMultipleReply ( envelope ) && this . botMillPolicy . equals ( BotMillPolicy . FIRST_ONLY ) ) { break ; } } else { if ( f . process ( envelope ) && this . botMillPolicy . equals ( BotMillPolicy . FIRST_ONLY ) ) { break ; } } } |
public class MtasDataDoubleAdvanced { /** * ( non - Javadoc )
* @ see
* mtas . codec . util . collector . MtasDataCollector # stringToBoundary ( java . lang .
* String , java . lang . Integer ) */
@ Override protected Double stringToBoundary ( String boundary , Integer segmentNumber ) throws IOException { } } | if ( segmentRegistration . equals ( SEGMENT_BOUNDARY_ASC ) || segmentRegistration . equals ( SEGMENT_BOUNDARY_DESC ) ) { if ( segmentNumber == null ) { return Double . valueOf ( boundary ) ; } else { return Double . valueOf ( boundary ) / segmentNumber ; } } else { throw new IOException ( "not available for segmentRegistration " + segmentRegistration ) ; } |
public class ElemExtensionDecl { /** * Get a function at a given index in this extension element
* @ param i Index of function to get
* @ return Name of Function at given index
* @ throws ArrayIndexOutOfBoundsException */
public String getFunction ( int i ) throws ArrayIndexOutOfBoundsException { } } | if ( null == m_functions ) throw new ArrayIndexOutOfBoundsException ( ) ; return ( String ) m_functions . elementAt ( i ) ; |
public class StateFilter { /** * / * ( non - Javadoc )
* @ see tuwien . auto . calimero . buffer . Configuration . NetworkFilter # init
* ( tuwien . auto . calimero . buffer . Configuration ) */
public void init ( Configuration c ) { } } | final DatapointModel m = c . getDatapointModel ( ) ; if ( m != null ) createReferences ( m ) ; |
public class CanonicalPlanner { /** * Create a JOIN or SOURCE node that contain the source information .
* @ param context the execution context
* @ param source the source to be processed ; may not be null
* @ param usedSelectors the map of { @ link SelectorName } s ( aliases or names ) used in the query .
* @ return the new plan ; never null */
protected PlanNode createPlanNode ( QueryContext context , Source source , Map < SelectorName , Table > usedSelectors ) { } } | if ( source instanceof Selector ) { // No join required . . .
assert source instanceof AllNodes || source instanceof NamedSelector ; Selector selector = ( Selector ) source ; PlanNode node = new PlanNode ( Type . SOURCE ) ; if ( selector . hasAlias ( ) ) { node . addSelector ( selector . alias ( ) ) ; node . setProperty ( Property . SOURCE_ALIAS , selector . alias ( ) ) ; node . setProperty ( Property . SOURCE_NAME , selector . name ( ) ) ; } else { node . addSelector ( selector . name ( ) ) ; node . setProperty ( Property . SOURCE_NAME , selector . name ( ) ) ; } // Validate the source name and set the available columns . . .
NameFactory nameFactory = context . getExecutionContext ( ) . getValueFactories ( ) . getNameFactory ( ) ; // Always use the qualified form when searching for tables
Table table = context . getSchemata ( ) . getTable ( selector . name ( ) . qualifiedForm ( nameFactory ) ) ; if ( table != null ) { if ( table instanceof View ) context . getHints ( ) . hasView = true ; if ( usedSelectors . put ( selector . aliasOrName ( ) , table ) != null ) { // There was already a table with this alias or name . . .
I18n msg = GraphI18n . selectorNamesMayNotBeUsedMoreThanOnce ; context . getProblems ( ) . addError ( msg , selector . aliasOrName ( ) . getString ( ) ) ; } node . setProperty ( Property . SOURCE_COLUMNS , table . getColumns ( ) ) ; } else { context . getProblems ( ) . addError ( GraphI18n . tableDoesNotExist , selector . name ( ) ) ; } return node ; } if ( source instanceof Join ) { Join join = ( Join ) source ; JoinCondition joinCondition = join . getJoinCondition ( ) ; // Set up new join node corresponding to this join predicate
PlanNode node = new PlanNode ( Type . JOIN ) ; node . setProperty ( Property . JOIN_TYPE , join . type ( ) ) ; node . setProperty ( Property . JOIN_ALGORITHM , JoinAlgorithm . NESTED_LOOP ) ; node . setProperty ( Property . JOIN_CONDITION , joinCondition ) ; context . getHints ( ) . hasJoin = true ; if ( join . type ( ) == JoinType . LEFT_OUTER ) { context . getHints ( ) . hasOptionalJoin = true ; } // Handle each child
Source [ ] clauses = new Source [ ] { join . getLeft ( ) , join . getRight ( ) } ; for ( int i = 0 ; i < 2 ; i ++ ) { PlanNode sourceNode = createPlanNode ( context , clauses [ i ] , usedSelectors ) ; node . addLastChild ( sourceNode ) ; } // Add selectors to the joinNode
for ( PlanNode child : node . getChildren ( ) ) { node . addSelectors ( child . getSelectors ( ) ) ; } return node ; } // should not get here ; if we do , somebody added a new type of source
assert false ; return null ; |
public class JsonDeserializer { /** * Deserialize the null value . This method allows children to override the default behaviour .
* @ param reader { @ link JsonReader } used to read the JSON input
* @ param ctx Context for the full deserialization process
* @ param params Parameters for this deserialization
* @ return the deserialized object */
protected T deserializeNullValue ( JsonReader reader , JsonDeserializationContext ctx , JsonDeserializerParameters params ) { } } | reader . skipValue ( ) ; return null ; |
public class DictionaryCompressionOptimizer { /** * Choose a dictionary column to convert to direct encoding . We do this by predicting the compression ration
* of the stripe if a singe column is flipped to direct . So for each column , we try to predict the row count
* when we will hit a stripe flush limit if that column were converted to direct . Once we know the row count , we
* calculate the predicted compression ratio .
* @ param totalNonDictionaryBytes current size of the stripe without non - dictionary columns
* @ param stripeRowCount current number of rows in the stripe
* @ return the column that would produce the best stripe compression ration if converted to direct */
private DictionaryCompressionProjection selectDictionaryColumnToConvert ( int totalNonDictionaryBytes , int stripeRowCount ) { } } | checkState ( ! directConversionCandidates . isEmpty ( ) ) ; int totalNonDictionaryBytesPerRow = totalNonDictionaryBytes / stripeRowCount ; // rawBytes = sum of the length of every row value ( without dictionary encoding )
// dictionaryBytes = sum of the length of every entry in the dictionary
// indexBytes = bytes used encode the dictionary index ( e . g . , 2 byte for dictionary less than 65536 entries )
long totalDictionaryRawBytes = 0 ; long totalDictionaryBytes = 0 ; long totalDictionaryIndexBytes = 0 ; long totalDictionaryRawBytesPerRow = 0 ; long totalDictionaryBytesPerNewRow = 0 ; long totalDictionaryIndexBytesPerRow = 0 ; for ( DictionaryColumnManager column : allWriters ) { if ( ! column . isDirectEncoded ( ) ) { totalDictionaryRawBytes += column . getRawBytes ( ) ; totalDictionaryBytes += column . getDictionaryBytes ( ) ; totalDictionaryIndexBytes += column . getIndexBytes ( ) ; totalDictionaryRawBytesPerRow += column . getRawBytesPerRow ( ) ; totalDictionaryBytesPerNewRow += column . getDictionaryBytesPerFutureRow ( ) ; totalDictionaryIndexBytesPerRow += column . getIndexBytesPerRow ( ) ; } } long totalUncompressedBytesPerRow = totalNonDictionaryBytesPerRow + totalDictionaryRawBytesPerRow ; DictionaryCompressionProjection maxProjectedCompression = null ; for ( DictionaryColumnManager column : directConversionCandidates ) { // determine the size of the currently written stripe if we were convert this column to direct
long currentRawBytes = totalNonDictionaryBytes + column . getRawBytes ( ) ; long currentDictionaryBytes = totalDictionaryBytes - column . getDictionaryBytes ( ) ; long currentIndexBytes = totalDictionaryIndexBytes - column . getIndexBytes ( ) ; long currentTotalBytes = currentRawBytes + currentDictionaryBytes + currentIndexBytes ; // estimate the size of each new row if we were convert this column to direct
double rawBytesPerFutureRow = totalNonDictionaryBytesPerRow + column . getRawBytesPerRow ( ) ; double dictionaryBytesPerFutureRow = totalDictionaryBytesPerNewRow - column . getDictionaryBytesPerFutureRow ( ) ; double indexBytesPerFutureRow = totalDictionaryIndexBytesPerRow - column . getIndexBytesPerRow ( ) ; double totalBytesPerFutureRow = rawBytesPerFutureRow + dictionaryBytesPerFutureRow + indexBytesPerFutureRow ; // estimate how many rows until we hit a limit and flush the stripe if we convert this column to direct
long rowsToDictionaryMemoryLimit = ( long ) ( ( dictionaryMemoryMaxBytesLow - currentDictionaryBytes ) / dictionaryBytesPerFutureRow ) ; long rowsToStripeMemoryLimit = ( long ) ( ( stripeMaxBytes - currentTotalBytes ) / totalBytesPerFutureRow ) ; long rowsToStripeRowLimit = stripeMaxRowCount - stripeRowCount ; long rowsToLimit = Longs . min ( rowsToDictionaryMemoryLimit , rowsToStripeMemoryLimit , rowsToStripeRowLimit ) ; // predict the compression ratio at that limit if we were convert this column to direct
long predictedUncompressedSizeAtLimit = totalNonDictionaryBytes + totalDictionaryRawBytes + ( totalUncompressedBytesPerRow * rowsToLimit ) ; long predictedCompressedSizeAtLimit = ( long ) ( currentTotalBytes + ( totalBytesPerFutureRow * rowsToLimit ) ) ; double predictedCompressionRatioAtLimit = 1.0 * predictedUncompressedSizeAtLimit / predictedCompressedSizeAtLimit ; // convert the column that creates the best compression ratio
if ( maxProjectedCompression == null || maxProjectedCompression . getPredictedFileCompressionRatio ( ) < predictedCompressionRatioAtLimit ) { maxProjectedCompression = new DictionaryCompressionProjection ( column , predictedCompressionRatioAtLimit ) ; } } return maxProjectedCompression ; |
public class McCodeGen { /** * Output class import
* @ param def definition
* @ param out Writer
* @ throws IOException ioException */
@ Override public void writeImport ( Definition def , Writer out ) throws IOException { } } | out . write ( "package " + def . getRaPackage ( ) + ";\n\n" ) ; if ( def . isSupportEis ( ) ) { out . write ( "import java.io.IOException;\n" ) ; } out . write ( "import java.io.PrintWriter;\n" ) ; if ( def . isSupportEis ( ) ) { out . write ( "import java.net.Socket;\n" ) ; } out . write ( "import java.util.ArrayList;\n" ) ; out . write ( "import java.util.Collections;\n" ) ; out . write ( "import java.util.HashSet;\n" ) ; out . write ( "import java.util.List;\n" ) ; out . write ( "import java.util.Set;\n" ) ; importLogging ( def , out ) ; out . write ( "import javax.resource.NotSupportedException;\n" ) ; out . write ( "import javax.resource.ResourceException;\n" ) ; out . write ( "import javax.resource.spi.ConnectionEvent;\n" ) ; out . write ( "import javax.resource.spi.ConnectionEventListener;\n" ) ; out . write ( "import javax.resource.spi.ConnectionRequestInfo;\n" ) ; out . write ( "import javax.resource.spi.LocalTransaction;\n" ) ; out . write ( "import javax.resource.spi.ManagedConnection;\n" ) ; out . write ( "import javax.resource.spi.ManagedConnectionMetaData;\n\n" ) ; out . write ( "import javax.security.auth.Subject;\n" ) ; out . write ( "import javax.transaction.xa.XAResource;\n\n" ) ; |
public class AWSStorageGatewayClient { /** * Activates the gateway you previously deployed on your host . In the activation process , you specify information
* such as the region you want to use for storing snapshots or tapes , the time zone for scheduled snapshots the
* gateway snapshot schedule window , an activation key , and a name for your gateway . The activation process also
* associates your gateway with your account ; for more information , see < a > UpdateGatewayInformation < / a > .
* < note >
* You must turn on the gateway VM before you can activate your gateway .
* < / note >
* @ param activateGatewayRequest
* A JSON object containing one or more of the following fields : < / p >
* < ul >
* < li >
* < a > ActivateGatewayInput $ ActivationKey < / a >
* < / li >
* < li >
* < a > ActivateGatewayInput $ GatewayName < / a >
* < / li >
* < li >
* < a > ActivateGatewayInput $ GatewayRegion < / a >
* < / li >
* < li >
* < a > ActivateGatewayInput $ GatewayTimezone < / a >
* < / li >
* < li >
* < a > ActivateGatewayInput $ GatewayType < / a >
* < / li >
* < li >
* < a > ActivateGatewayInput $ TapeDriveType < / a >
* < / li >
* < li >
* < a > ActivateGatewayInput $ MediumChangerType < / a >
* < / li >
* @ return Result of the ActivateGateway operation returned by the service .
* @ throws InvalidGatewayRequestException
* An exception occurred because an invalid gateway request was issued to the service . For more information ,
* see the error and message fields .
* @ throws InternalServerErrorException
* An internal server error has occurred during the request . For more information , see the error and message
* fields .
* @ sample AWSStorageGateway . ActivateGateway
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / storagegateway - 2013-06-30 / ActivateGateway " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public ActivateGatewayResult activateGateway ( ActivateGatewayRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeActivateGateway ( request ) ; |
public class StopWords { /** * Is stop word .
* @ param text the text
* @ return the boolean */
public boolean isStopWord ( HString text ) { } } | if ( text == null ) { return true ; } else if ( text . isInstance ( Types . TOKEN ) ) { return isTokenStopWord ( Cast . as ( text ) ) ; } return text . tokens ( ) . stream ( ) . allMatch ( this :: isTokenStopWord ) ; |
public class AppServicePlansInner { /** * List all capabilities of an App Service plan .
* List all capabilities of an App Service plan .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param name Name of the App Service plan .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the List & lt ; CapabilityInner & gt ; object */
public Observable < ServiceResponse < List < CapabilityInner > > > listCapabilitiesWithServiceResponseAsync ( String resourceGroupName , String name ) { } } | if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( name == null ) { throw new IllegalArgumentException ( "Parameter name is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . listCapabilities ( resourceGroupName , name , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < List < CapabilityInner > > > > ( ) { @ Override public Observable < ServiceResponse < List < CapabilityInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < List < CapabilityInner > > clientResponse = listCapabilitiesDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ; |
public class AnnotatedJacksonModule { /** * A static method to create an instance of { @ link AnnotatedJacksonModule } .
* @ param clientBindingAnnotation a { @ link BindingAnnotation } to which the { @ link ObjectMapper }
* need to be annotated with .
* @ return an instance of { @ link AnnotatedJacksonModule } */
public static AnnotatedJacksonModule with ( Class < ? extends Annotation > clientBindingAnnotation ) { } } | if ( clientBindingAnnotation == null ) { throw new NullPointerException ( "clientBindingAnnotation:null" ) ; } BindingAnnotations . checkIsBindingAnnotation ( clientBindingAnnotation ) ; return new AnnotatedJacksonModule ( clientBindingAnnotation ) ; |
public class CustomerChangeData { /** * Gets the changedFeeds value for this CustomerChangeData .
* @ return changedFeeds * A list of feed changes for the customer as specified in the
* selector . If a feed is included in
* the selector then it will be included in this list ,
* even if the feed did not change . */
public com . google . api . ads . adwords . axis . v201809 . ch . FeedChangeData [ ] getChangedFeeds ( ) { } } | return changedFeeds ; |
public class hqlParser { /** * hql . g : 584:1 : atom : primaryExpression ( DOT ^ identifier ( options { greedy = true ; } : ( op = OPEN ^ exprList CLOSE ! ) ) ? | lb = OPEN _ BRACKET ^ expression CLOSE _ BRACKET ! ) * ; */
public final hqlParser . atom_return atom ( ) throws RecognitionException { } } | hqlParser . atom_return retval = new hqlParser . atom_return ( ) ; retval . start = input . LT ( 1 ) ; CommonTree root_0 = null ; Token op = null ; Token lb = null ; Token DOT236 = null ; Token CLOSE239 = null ; Token CLOSE_BRACKET241 = null ; ParserRuleReturnScope primaryExpression235 = null ; ParserRuleReturnScope identifier237 = null ; ParserRuleReturnScope exprList238 = null ; ParserRuleReturnScope expression240 = null ; CommonTree op_tree = null ; CommonTree lb_tree = null ; CommonTree DOT236_tree = null ; CommonTree CLOSE239_tree = null ; CommonTree CLOSE_BRACKET241_tree = null ; try { // hql . g : 585:3 : ( primaryExpression ( DOT ^ identifier ( options { greedy = true ; } : ( op = OPEN ^ exprList CLOSE ! ) ) ? | lb = OPEN _ BRACKET ^ expression CLOSE _ BRACKET ! ) * )
// hql . g : 585:5 : primaryExpression ( DOT ^ identifier ( options { greedy = true ; } : ( op = OPEN ^ exprList CLOSE ! ) ) ? | lb = OPEN _ BRACKET ^ expression CLOSE _ BRACKET ! ) *
{ root_0 = ( CommonTree ) adaptor . nil ( ) ; pushFollow ( FOLLOW_primaryExpression_in_atom2774 ) ; primaryExpression235 = primaryExpression ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , primaryExpression235 . getTree ( ) ) ; // hql . g : 586:3 : ( DOT ^ identifier ( options { greedy = true ; } : ( op = OPEN ^ exprList CLOSE ! ) ) ? | lb = OPEN _ BRACKET ^ expression CLOSE _ BRACKET ! ) *
loop83 : while ( true ) { int alt83 = 3 ; int LA83_0 = input . LA ( 1 ) ; if ( ( LA83_0 == DOT ) ) { alt83 = 1 ; } else if ( ( LA83_0 == OPEN_BRACKET ) ) { alt83 = 2 ; } switch ( alt83 ) { case 1 : // hql . g : 587:4 : DOT ^ identifier ( options { greedy = true ; } : ( op = OPEN ^ exprList CLOSE ! ) ) ?
{ DOT236 = ( Token ) match ( input , DOT , FOLLOW_DOT_in_atom2783 ) ; DOT236_tree = ( CommonTree ) adaptor . create ( DOT236 ) ; root_0 = ( CommonTree ) adaptor . becomeRoot ( DOT236_tree , root_0 ) ; pushFollow ( FOLLOW_identifier_in_atom2786 ) ; identifier237 = identifier ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , identifier237 . getTree ( ) ) ; // hql . g : 588:5 : ( options { greedy = true ; } : ( op = OPEN ^ exprList CLOSE ! ) ) ?
int alt82 = 2 ; int LA82_0 = input . LA ( 1 ) ; if ( ( LA82_0 == OPEN ) ) { alt82 = 1 ; } switch ( alt82 ) { case 1 : // hql . g : 589:6 : ( op = OPEN ^ exprList CLOSE ! )
{ // hql . g : 589:6 : ( op = OPEN ^ exprList CLOSE ! )
// hql . g : 589:8 : op = OPEN ^ exprList CLOSE !
{ op = ( Token ) match ( input , OPEN , FOLLOW_OPEN_in_atom2814 ) ; op_tree = ( CommonTree ) adaptor . create ( op ) ; root_0 = ( CommonTree ) adaptor . becomeRoot ( op_tree , root_0 ) ; op . setType ( METHOD_CALL ) ; pushFollow ( FOLLOW_exprList_in_atom2819 ) ; exprList238 = exprList ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , exprList238 . getTree ( ) ) ; CLOSE239 = ( Token ) match ( input , CLOSE , FOLLOW_CLOSE_in_atom2821 ) ; } } break ; } } break ; case 2 : // hql . g : 590:5 : lb = OPEN _ BRACKET ^ expression CLOSE _ BRACKET !
{ lb = ( Token ) match ( input , OPEN_BRACKET , FOLLOW_OPEN_BRACKET_in_atom2835 ) ; lb_tree = ( CommonTree ) adaptor . create ( lb ) ; root_0 = ( CommonTree ) adaptor . becomeRoot ( lb_tree , root_0 ) ; lb . setType ( INDEX_OP ) ; pushFollow ( FOLLOW_expression_in_atom2840 ) ; expression240 = expression ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , expression240 . getTree ( ) ) ; CLOSE_BRACKET241 = ( Token ) match ( input , CLOSE_BRACKET , FOLLOW_CLOSE_BRACKET_in_atom2842 ) ; } break ; default : break loop83 ; } } } retval . stop = input . LT ( - 1 ) ; retval . tree = ( CommonTree ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( CommonTree ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { // do for sure before leaving
} return retval ; |
public class XMLHelper { /** * Helper program : Extracts the specified XPATH expression
* from an XML - String .
* @ param node the node
* @ param xString the x path
* @ return NodeList
* @ throws XPathExpressionException the x path expression exception */
public static NodeList getElementsB ( Node node , String xString ) throws XPathExpressionException { } } | XPathExpression xPath = compileX ( xString ) ; return ( NodeList ) xPath . evaluate ( node , XPathConstants . NODESET ) ; |
public class URIDestinationCreator { /** * Convert escaped backslash to single backslash .
* This method de - escapes double backslashes whilst at the same time
* checking that there are no single backslahes in the input string .
* @ param input The string to be processed
* @ return The modified String
* @ throws JMSException if an unescaped backslash is found */
private String unescapeBackslash ( String input ) throws JMSException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "unescapeBackslash" , input ) ; String result = input ; // If there are no backslashes then don ' t bother creating the buffer etc .
if ( input . indexOf ( "\\" ) != - 1 ) { int startValue = 0 ; StringBuffer tmp = new StringBuffer ( input ) ; while ( ( startValue = tmp . indexOf ( "\\" , startValue ) ) != - 1 ) { // check that the next character is also a \
if ( startValue + 1 < tmp . length ( ) && tmp . charAt ( startValue + 1 ) == '\\' ) { // remove the first slash
tmp . deleteCharAt ( startValue ) ; // increment startValue so that the next indexOf begins after the second slash
startValue ++ ; } else { // we ' ve found a single \ , so throw an exception
throw ( JMSException ) JmsErrorUtils . newThrowable ( JMSException . class , "BAD_ESCAPE_CHAR_CWSIA0387" , new Object [ ] { input } , tc ) ; } } result = tmp . toString ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "unescapeBackslash" , result ) ; return result ; |
public class CmsBreadCrumbConnector { /** * Appends a bread crumb entry . < p >
* @ param buffer the string buffer to append to
* @ param target the target state
* @ param label the entry label */
private void appendBreadCrumbEntry ( StringBuffer buffer , String target , String label ) { } } | if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( target ) ) { buffer . append ( "<a href=\"#!" ) . append ( target ) . append ( "\" title=\"" + CmsDomUtil . escapeXml ( label ) + "\"><span>" ) . append ( label ) . append ( "</span></a>" ) ; } else { buffer . append ( "<span class=\"o-tools-breadcrumb-active\" title=\"" + CmsDomUtil . escapeXml ( label ) + "\"><span>" ) . append ( label ) . append ( "</span></span>" ) ; } |
public class Flushables { /** * Flush a { @ link Flushable } , with control over whether an { @ code IOException } may be thrown .
* < p > If { @ code swallowIOException } is true , then we don ' t rethrow { @ code IOException } , but merely
* log it .
* @ param flushable the { @ code Flushable } object to be flushed .
* @ param swallowIOException if true , don ' t propagate IO exceptions thrown by the { @ code flush }
* method
* @ throws IOException if { @ code swallowIOException } is false and { @ link Flushable # flush } throws
* an { @ code IOException } .
* @ see Closeables # close */
public static void flush ( Flushable flushable , boolean swallowIOException ) throws IOException { } } | try { flushable . flush ( ) ; } catch ( IOException e ) { if ( swallowIOException ) { logger . log ( Level . WARNING , "IOException thrown while flushing Flushable." , e ) ; } else { throw e ; } } |
public class DirContextDnsResolver { /** * Perform hostname to address resolution .
* @ param host the hostname , must not be empty or { @ literal null } .
* @ return array of one or more { @ link InetAddress adresses }
* @ throws UnknownHostException */
@ Override public InetAddress [ ] resolve ( String host ) throws UnknownHostException { } } | if ( ipStringToBytes ( host ) != null ) { return new InetAddress [ ] { InetAddress . getByAddress ( ipStringToBytes ( host ) ) } ; } List < InetAddress > inetAddresses = new ArrayList < > ( ) ; try { resolve ( host , inetAddresses ) ; } catch ( NamingException e ) { throw new UnknownHostException ( String . format ( "Cannot resolve %s to a hostname because of %s" , host , e ) ) ; } if ( inetAddresses . isEmpty ( ) ) { throw new UnknownHostException ( String . format ( "Cannot resolve %s to a hostname" , host ) ) ; } return inetAddresses . toArray ( new InetAddress [ inetAddresses . size ( ) ] ) ; |
public class SIPFramer { /** * Helper function that checks whether or not the data could be a SIP message . It is a very
* basic check but if it doesn ' t go through it definitely is not a SIP message .
* @ param data
* @ return */
public static boolean couldBeSipMessage ( final Buffer data ) throws IOException { } } | if ( data . getReadableBytes ( ) < 4 ) { return false ; } final byte a = data . getByte ( 0 ) ; final byte b = data . getByte ( 1 ) ; final byte c = data . getByte ( 2 ) ; final byte d = data . getByte ( 3 ) ; return a == 'S' && b == 'I' && c == 'P' || // response
a == 'I' && b == 'N' && c == 'V' && d == 'I' || // INVITE
a == 'A' && b == 'C' && c == 'K' || // ACK
a == 'B' && b == 'Y' && c == 'E' || // BYE
a == 'O' && b == 'P' && c == 'T' && d == 'I' || // OPTIONS
a == 'C' && b == 'A' && c == 'N' && d == 'C' || // CANCEL
a == 'M' && b == 'E' && c == 'S' && d == 'S' || // MESSAGE
a == 'R' && b == 'E' && c == 'G' && d == 'I' || // REGISTER
a == 'I' && b == 'N' && c == 'F' && d == 'O' || // INFO
a == 'P' && b == 'R' && c == 'A' && d == 'C' || // PRACK
a == 'S' && b == 'U' && c == 'B' && d == 'S' || // SUBSCRIBE
a == 'N' && b == 'O' && c == 'T' && d == 'I' || // NOTIFY
a == 'U' && b == 'P' && c == 'D' && d == 'A' || // UPDATE
a == 'R' && b == 'E' && c == 'F' && d == 'E' || // REFER
a == 'P' && b == 'U' && c == 'B' && d == 'L' ; // PUBLISH |
public class AppServiceEnvironmentsInner { /** * Get all worker pools of an App Service Environment .
* Get all worker pools of an App Service Environment .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; WorkerPoolResourceInner & gt ; object */
public Observable < Page < WorkerPoolResourceInner > > listWorkerPoolsNextAsync ( final String nextPageLink ) { } } | return listWorkerPoolsNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < WorkerPoolResourceInner > > , Page < WorkerPoolResourceInner > > ( ) { @ Override public Page < WorkerPoolResourceInner > call ( ServiceResponse < Page < WorkerPoolResourceInner > > response ) { return response . body ( ) ; } } ) ; |
public class TasksResult { /** * Returns the tags for the specified priority .
* @ param priority
* the priority
* @ return the tags for the specified priority */
public final String getTags ( final Priority priority ) { } } | if ( priority == Priority . HIGH ) { return highTags ; } else if ( priority == Priority . NORMAL ) { return normalTags ; } else { return lowTags ; } |
public class ConfigurationUtils { /** * Get a global integer property . This method will first try to get the value from an
* environment variable and if that does not exist it will look up a system property .
* @ param key Name of the variable
* @ param defaultValue Returned if neither env var nor system property are defined
* @ return String the value of the Environment or System Property if defined , the given
* default value otherwise */
public static Integer tryGetGlobalIntegerProperty ( String key , Integer defaultValue ) { } } | try { String value = System . getenv ( formatEnvironmentVariable ( key ) ) ; if ( value == null ) { return tryGetIntegerProperty ( key , defaultValue ) ; } else { return Integer . parseInt ( value ) ; } } catch ( SecurityException | NumberFormatException e ) { logger . error ( "Could not get value of global property {} due to SecurityManager. Using default value." , key , e ) ; return defaultValue ; } |
public class XMLEmitter { /** * XML text node .
* @ param aText
* The contained text array
* @ param nOfs
* Offset into the array where to start
* @ param nLen
* Number of chars to use , starting from the provided offset . */
public void onText ( @ Nonnull final char [ ] aText , @ Nonnegative final int nOfs , @ Nonnegative final int nLen ) { } } | onText ( aText , nOfs , nLen , true ) ; |
public class LazyGroupMember { /** * Attempts to load the lazy command from the command registry of the parent command group , but
* only if it hasn ' t already been loaded . */
private void loadIfNecessary ( ) { } } | if ( loadedMember != null ) { return ; } CommandRegistry commandRegistry = parentGroup . getCommandRegistry ( ) ; Assert . isTrue ( parentGroup . getCommandRegistry ( ) != null , "Command registry must be set for group '" + parentGroup . getId ( ) + "' in order to load lazy command '" + lazyCommandId + "'." ) ; if ( commandRegistry . containsCommandGroup ( lazyCommandId ) ) { CommandGroup group = commandRegistry . getCommandGroup ( lazyCommandId ) ; loadedMember = new SimpleGroupMember ( parentGroup , group ) ; } else if ( commandRegistry . containsActionCommand ( lazyCommandId ) ) { ActionCommand command = commandRegistry . getActionCommand ( lazyCommandId ) ; loadedMember = new SimpleGroupMember ( parentGroup , command ) ; } else { if ( logger . isWarnEnabled ( ) ) { logger . warn ( "Lazy command '" + lazyCommandId + "' was asked to display; however, no backing command instance exists in registry." ) ; } } if ( addedLazily && loadedMember != null ) { loadedMember . onAdded ( ) ; } |
public class AbstractSarlMojo { /** * Extract the dependencies that are declared for a Maven plugin .
* This function reads the list of the dependencies in the configuration
* resource file with { @ link MavenHelper # getConfig ( String ) } .
* The key given to { @ link MavenHelper # getConfig ( String ) } is
* < code > & lt ; configurationKeyPrefix & gt ; . dependencies < / code > .
* @ param configurationKeyPrefix the string that is the prefix in the configuration file .
* @ return the list of the dependencies .
* @ throws MojoExecutionException if something cannot be done when extracting the dependencies . */
protected Dependency [ ] getDependenciesFor ( String configurationKeyPrefix ) throws MojoExecutionException { } } | final List < Dependency > dependencies = new ArrayList < > ( ) ; final Pattern pattern = Pattern . compile ( "^[ \t\n\r]*([^: \t\n\t]+)[ \t\n\r]*:[ \t\n\r]*([^: \t\n\t]+)[ \t\n\r]*$" ) ; // $ NON - NLS - 1 $
final String rawDependencies = this . mavenHelper . getConfig ( configurationKeyPrefix + ".dependencies" ) ; // $ NON - NLS - 1 $
final Map < String , Dependency > pomDependencies = this . mavenHelper . getPluginDependencies ( ) ; for ( final String dependencyId : rawDependencies . split ( "\\s*[;|,]+\\s*" ) ) { // $ NON - NLS - 1 $
final Matcher matcher = pattern . matcher ( dependencyId ) ; if ( matcher != null && matcher . matches ( ) ) { final String dependencyGroupId = matcher . group ( 1 ) ; final String dependencyArtifactId = matcher . group ( 2 ) ; final String dependencyKey = ArtifactUtils . versionlessKey ( dependencyGroupId , dependencyArtifactId ) ; final Dependency dependencyObject = pomDependencies . get ( dependencyKey ) ; if ( dependencyObject == null ) { throw new MojoExecutionException ( MessageFormat . format ( Messages . AbstractSarlMojo_4 , dependencyKey ) ) ; } dependencies . add ( dependencyObject ) ; } } final Dependency [ ] dependencyArray = new Dependency [ dependencies . size ( ) ] ; dependencies . toArray ( dependencyArray ) ; return dependencyArray ; |
public class Log4JLogger { /** * Check whether the Log4j Logger used is enabled for < code > FATAL < / code >
* priority . */
public boolean isFatalEnabled ( ) { } } | if ( IS12 ) { return getLogger ( ) . isEnabledFor ( Level . FATAL ) ; } return getLogger ( ) . isEnabledFor ( Level . FATAL ) ; |
public class NotificationEffect { /** * ringtone */
public void ringtone ( NotificationEntry entry ) { } } | if ( ! mEnabled ) { Log . w ( TAG , "failed to play ringtone. effect disabled." ) ; return ; } if ( mRingtoneEnabled && mRingtoneAuto && entry . playRingtone && entry . ringtoneUri == null ) { // default ringtone
if ( DBG ) Log . d ( TAG , "[default] ringtone" ) ; entry . setRingtone ( mContext , mRingtoneRes ) ; } if ( entry . playRingtone ) { Uri ringtone = entry . ringtoneUri ; if ( ringtone == null ) { Log . e ( TAG , "ringtone uri not found." ) ; return ; } Ringtone r = null ; if ( mRingtone != null && mRingtoneUri != null && mRingtoneUri . equals ( ringtone ) ) { r = mRingtone ; } else { r = RingtoneManager . getRingtone ( mContext , ringtone ) ; mRingtone = r ; mRingtoneUri = ringtone ; } if ( r == null ) { Log . e ( TAG , "ringtone not found." ) ; return ; } if ( mAudioManager == null ) { mAudioManager = ( AudioManager ) mContext . getSystemService ( Context . AUDIO_SERVICE ) ; } if ( mAudioManager != null && mAudioManager . getStreamVolume ( r . getStreamType ( ) ) == 0 ) { Log . i ( TAG , "volume muted. won't play any ringtone." ) ; return ; } if ( DBG ) Log . d ( TAG , "ringtone - " + entry . ringtoneUri ) ; r . play ( ) ; } |
public class JavaEscapeUtil { /** * Perform an escape operation , based on String , according to the specified level . */
static String escape ( final String text , final JavaEscapeLevel escapeLevel ) { } } | if ( text == null ) { return null ; } final int level = escapeLevel . getEscapeLevel ( ) ; StringBuilder strBuilder = null ; final int offset = 0 ; final int max = text . length ( ) ; int readOffset = offset ; for ( int i = offset ; i < max ; i ++ ) { final int codepoint = Character . codePointAt ( text , i ) ; /* * Shortcut : most characters will be ASCII / Alphanumeric , and we won ' t need to do anything at
* all for them */
if ( codepoint <= ( ESCAPE_LEVELS_LEN - 2 ) && level < ESCAPE_LEVELS [ codepoint ] ) { continue ; } /* * Shortcut : we might not want to escape non - ASCII chars at all either . */
if ( codepoint > ( ESCAPE_LEVELS_LEN - 2 ) && level < ESCAPE_LEVELS [ ESCAPE_LEVELS_LEN - 1 ] ) { if ( Character . charCount ( codepoint ) > 1 ) { // This is to compensate that we are actually escaping two char [ ] positions with a single codepoint .
i ++ ; } continue ; } /* * At this point we know for sure we will need some kind of escape , so we
* can increase the offset and initialize the string builder if needed , along with
* copying to it all the contents pending up to this point . */
if ( strBuilder == null ) { strBuilder = new StringBuilder ( max + 20 ) ; } if ( i - readOffset > 0 ) { strBuilder . append ( text , readOffset , i ) ; } if ( Character . charCount ( codepoint ) > 1 ) { // This is to compensate that we are actually reading two char [ ] positions with a single codepoint .
i ++ ; } readOffset = i + 1 ; /* * Perform the real escape , attending the different combinations of SECs and UHEXA */
if ( codepoint < SEC_CHARS_LEN ) { // We will try to use a SEC
final char sec = SEC_CHARS [ codepoint ] ; if ( sec != SEC_CHARS_NO_SEC ) { // SEC found ! just write it and go for the next char
strBuilder . append ( ESCAPE_PREFIX ) ; strBuilder . append ( sec ) ; continue ; } } /* * No SEC - escape was possible , so we need uhexa escape . */
if ( Character . charCount ( codepoint ) > 1 ) { final char [ ] codepointChars = Character . toChars ( codepoint ) ; strBuilder . append ( ESCAPE_UHEXA_PREFIX ) ; strBuilder . append ( toUHexa ( codepointChars [ 0 ] ) ) ; strBuilder . append ( ESCAPE_UHEXA_PREFIX ) ; strBuilder . append ( toUHexa ( codepointChars [ 1 ] ) ) ; continue ; } strBuilder . append ( ESCAPE_UHEXA_PREFIX ) ; strBuilder . append ( toUHexa ( codepoint ) ) ; } /* * Final cleaning : return the original String object if no escape was actually needed . Otherwise
* append the remaining unescaped text to the string builder and return . */
if ( strBuilder == null ) { return text ; } if ( max - readOffset > 0 ) { strBuilder . append ( text , readOffset , max ) ; } return strBuilder . toString ( ) ; |
public class TopicDefinition { /** * / * ( non - Javadoc )
* @ see net . timewalker . ffmq4 . management . destination . AbstractDestinationDescriptor # initFromSettings ( net . timewalker . ffmq4 . utils . Settings ) */
@ Override protected void initFromSettings ( Settings settings ) { } } | super . initFromSettings ( settings ) ; this . subscriberFailurePolicy = settings . getIntProperty ( "subscriberFailurePolicy" , FFMQSubscriberPolicy . SUBSCRIBER_POLICY_LOG ) ; this . subscriberOverflowPolicy = settings . getIntProperty ( "subscriberOverflowPolicy" , FFMQSubscriberPolicy . SUBSCRIBER_POLICY_LOG ) ; String rawPartitionsKeysToIndex = settings . getStringProperty ( "partitionsKeysToIndex" ) ; if ( rawPartitionsKeysToIndex != null ) { StringTokenizer st = new StringTokenizer ( rawPartitionsKeysToIndex , ", " ) ; this . partitionsKeysToIndex = new String [ st . countTokens ( ) ] ; int pos = 0 ; while ( st . hasMoreTokens ( ) ) this . partitionsKeysToIndex [ pos ++ ] = st . nextToken ( ) ; } |
public class ContentRepository { /** * Removes the specified access control entry if the given principal name matches the principal associated with the
* entry .
* @ param acList
* the access control list to remove the entry from
* @ param acEntry
* the entry to be potentially removed
* @ param principalName
* the name of the principal to match . Use the ' * ' wildcard to match all principal .
* @ throws RepositoryException
* if the entry removal failed */
private void removeAccessControlEntry ( final AccessControlList acList , final AccessControlEntry acEntry , final String principalName ) throws RepositoryException { } } | if ( ANY_WILDCARD . equals ( principalName ) || acEntry . getPrincipal ( ) . getName ( ) . equals ( principalName ) ) { acList . removeAccessControlEntry ( acEntry ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.