signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class ElemTemplateElement { /** * Given a namespace , get the corrisponding prefix .
* 9/15/00 : This had been iteratively examining the m _ declaredPrefixes
* field for this node and its parents . That makes life difficult for
* the compilation experiment , which doesn ' t have a static vector of
* local declarations . Replaced a recursive solution , which permits
* easier subclassing / overriding .
* @ param prefix non - null reference to prefix string , which should map
* to a namespace URL .
* @ return The namespace URL that the prefix maps to , or null if no
* mapping can be found . */
public String getNamespaceForPrefix ( String prefix ) { } } | // if ( null ! = prefix & & prefix . equals ( " xmlns " ) )
// return Constants . S _ XMLNAMESPACEURI ;
List nsDecls = m_declaredPrefixes ; if ( null != nsDecls ) { int n = nsDecls . size ( ) ; if ( prefix . equals ( Constants . ATTRVAL_DEFAULT_PREFIX ) ) { prefix = "" ; } for ( int i = 0 ; i < n ; i ++ ) { XMLNSDecl decl = ( XMLNSDecl ) nsDecls . get ( i ) ; if ( prefix . equals ( decl . getPrefix ( ) ) ) return decl . getURI ( ) ; } } // Not found ; ask our ancestors
if ( null != m_parentNode ) return m_parentNode . getNamespaceForPrefix ( prefix ) ; // JJK : No ancestors ; try implicit
// % REVIEW % Are there literals somewhere that we should use instead ?
// % REVIEW % Is this really the best place to patch ?
if ( "xml" . equals ( prefix ) ) return "http://www.w3.org/XML/1998/namespace" ; // No parent , so no definition
return null ; |
public class AddInstanceGroupsResult { /** * Instance group IDs of the newly created instance groups .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setInstanceGroupIds ( java . util . Collection ) } or { @ link # withInstanceGroupIds ( java . util . Collection ) } if you
* want to override the existing values .
* @ param instanceGroupIds
* Instance group IDs of the newly created instance groups .
* @ return Returns a reference to this object so that method calls can be chained together . */
public AddInstanceGroupsResult withInstanceGroupIds ( String ... instanceGroupIds ) { } } | if ( this . instanceGroupIds == null ) { setInstanceGroupIds ( new com . amazonaws . internal . SdkInternalList < String > ( instanceGroupIds . length ) ) ; } for ( String ele : instanceGroupIds ) { this . instanceGroupIds . add ( ele ) ; } return this ; |
public class ComputationGraph { /** * Set the state of the RNN layer , for use in { @ link # rnnTimeStep ( INDArray . . . ) }
* @ param layerName The name of the layer .
* @ param state The state to set the specified layer to */
public void rnnSetPreviousState ( String layerName , Map < String , INDArray > state ) { } } | Layer l = verticesMap . get ( layerName ) . getLayer ( ) ; if ( l instanceof org . deeplearning4j . nn . layers . wrapper . BaseWrapperLayer ) { l = ( ( org . deeplearning4j . nn . layers . wrapper . BaseWrapperLayer ) l ) . getUnderlying ( ) ; } if ( l == null || ! ( l instanceof RecurrentLayer ) ) { throw new UnsupportedOperationException ( "Layer \"" + layerName + "\" is not a recurrent layer. Cannot set state" ) ; } ( ( RecurrentLayer ) l ) . rnnSetPreviousState ( state ) ; |
public class FilesImpl { /** * Deletes the specified file from the compute node .
* @ param poolId The ID of the pool that contains the compute node .
* @ param nodeId The ID of the compute node from which you want to delete the file .
* @ param filePath The path to the file or directory that you want to delete .
* @ param recursive Whether to delete children of a directory . If the filePath parameter represents a directory instead of a file , you can set recursive to true to delete the directory and all of the files and subdirectories in it . If recursive is false then the directory must be empty or deletion will fail .
* @ param fileDeleteFromComputeNodeOptions Additional parameters for the operation
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < Void > deleteFromComputeNodeAsync ( String poolId , String nodeId , String filePath , Boolean recursive , FileDeleteFromComputeNodeOptions fileDeleteFromComputeNodeOptions , final ServiceCallback < Void > serviceCallback ) { } } | return ServiceFuture . fromHeaderResponse ( deleteFromComputeNodeWithServiceResponseAsync ( poolId , nodeId , filePath , recursive , fileDeleteFromComputeNodeOptions ) , serviceCallback ) ; |
public class RMIRegistryManager { /** * Checks if rmiregistry is running on the specified port .
* @ param port
* @ return true if rmiregistry is running on the specified port , false
* otherwise */
public static boolean isRMIRegistryRunning ( Configuration configuration , int port ) { } } | try { final Registry registry = RegistryFinder . getInstance ( ) . getRegistry ( configuration , port ) ; registry . list ( ) ; return true ; } catch ( RemoteException ex ) { return false ; } catch ( Exception e ) { return false ; } |
public class OptionGroup { /** * Adds an option to the option group .
* @ param value Value , not null and not repeated in the group
* @ param text Text , not null
* @ return This group */
public OptionGroup addOption ( String value , String text ) { } } | return this . addOption ( new Option ( value , text ) ) ; |
public class SingleEvaluatedMoveCache { /** * Cache the given evaluation , discarding any previously cached evaluations .
* @ param move move applied to the current solution
* @ param evaluation evaluation of obtained neighbour */
@ Override public final void cacheMoveEvaluation ( Move < ? > move , Evaluation evaluation ) { } } | evaluatedMove = move ; this . evaluation = evaluation ; |
public class UnicodeSet { /** * Returns true if this set contains every character
* of the given range .
* @ param start first character , inclusive , of the range
* @ param end last character , inclusive , of the range
* @ return true if the test condition is met */
public boolean contains ( int start , int end ) { } } | if ( start < MIN_VALUE || start > MAX_VALUE ) { throw new IllegalArgumentException ( "Invalid code point U+" + Utility . hex ( start , 6 ) ) ; } if ( end < MIN_VALUE || end > MAX_VALUE ) { throw new IllegalArgumentException ( "Invalid code point U+" + Utility . hex ( end , 6 ) ) ; } // int i = - 1;
// while ( true ) {
// if ( start < list [ + + i ] ) break ;
int i = findCodePoint ( start ) ; return ( ( i & 1 ) != 0 && end < list [ i ] ) ; |
public class FactoryFinder { /** * Try to find provider using Jar Service Provider Mechanism
* @ return instance of provider class if found or null */
private static Object findJarServiceProvider ( String factoryId ) throws ConfigurationError { } } | String serviceId = "META-INF/services/" + factoryId ; InputStream is = null ; // First try the Context ClassLoader
ClassLoader cl = Thread . currentThread ( ) . getContextClassLoader ( ) ; if ( cl != null ) { is = cl . getResourceAsStream ( serviceId ) ; } if ( is == null ) { cl = FactoryFinder . class . getClassLoader ( ) ; is = cl . getResourceAsStream ( serviceId ) ; } if ( is == null ) { // No provider found
return null ; } if ( debug ) debugPrintln ( "found jar resource=" + serviceId + " using ClassLoader: " + cl ) ; BufferedReader rd ; try { rd = new BufferedReader ( new InputStreamReader ( is , "UTF-8" ) , DEFAULT_LINE_LENGTH ) ; } catch ( java . io . UnsupportedEncodingException e ) { rd = new BufferedReader ( new InputStreamReader ( is ) , DEFAULT_LINE_LENGTH ) ; } String factoryClassName = null ; try { // XXX Does not handle all possible input as specified by the
// Jar Service Provider specification
factoryClassName = rd . readLine ( ) ; } catch ( IOException x ) { // No provider found
return null ; } finally { IoUtils . closeQuietly ( rd ) ; } if ( factoryClassName != null && ! "" . equals ( factoryClassName ) ) { if ( debug ) debugPrintln ( "found in resource, value=" + factoryClassName ) ; return newInstance ( factoryClassName , cl ) ; } // No provider found
return null ; |
public class ActionContext { /** * Return a { @ link UrlPath } of this context .
* Note this method is used only by { @ link Router } for dynamic path
* matching .
* @ return a { @ link UrlPath } of this context */
public UrlPath urlPath ( ) { } } | if ( null == urlPath ) { urlPath = UrlPath . of ( null == processedUrl ? req ( ) . url ( ) : processedUrl ) ; } return urlPath ; |
public class QueryParser { /** * src / riemann / Query . g : 69:1 : value : ( String | t | f | nil | INT | FLOAT ) ; */
public final QueryParser . value_return value ( ) throws RecognitionException { } } | QueryParser . value_return retval = new QueryParser . value_return ( ) ; retval . start = input . LT ( 1 ) ; CommonTree root_0 = null ; Token String78 = null ; Token INT82 = null ; Token FLOAT83 = null ; QueryParser . t_return t79 = null ; QueryParser . f_return f80 = null ; QueryParser . nil_return nil81 = null ; CommonTree String78_tree = null ; CommonTree INT82_tree = null ; CommonTree FLOAT83_tree = null ; try { // src / riemann / Query . g : 69:7 : ( ( String | t | f | nil | INT | FLOAT ) )
// src / riemann / Query . g : 69:10 : ( String | t | f | nil | INT | FLOAT )
{ root_0 = ( CommonTree ) adaptor . nil ( ) ; // src / riemann / Query . g : 69:10 : ( String | t | f | nil | INT | FLOAT )
int alt30 = 6 ; switch ( input . LA ( 1 ) ) { case String : { alt30 = 1 ; } break ; case 27 : { alt30 = 2 ; } break ; case 28 : { alt30 = 3 ; } break ; case 29 : case 30 : { alt30 = 4 ; } break ; case INT : { alt30 = 5 ; } break ; case FLOAT : { alt30 = 6 ; } break ; default : NoViableAltException nvae = new NoViableAltException ( "" , 30 , 0 , input ) ; throw nvae ; } switch ( alt30 ) { case 1 : // src / riemann / Query . g : 69:11 : String
{ String78 = ( Token ) match ( input , String , FOLLOW_String_in_value523 ) ; String78_tree = ( CommonTree ) adaptor . create ( String78 ) ; adaptor . addChild ( root_0 , String78_tree ) ; } break ; case 2 : // src / riemann / Query . g : 69:20 : t
{ pushFollow ( FOLLOW_t_in_value527 ) ; t79 = t ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , t79 . getTree ( ) ) ; } break ; case 3 : // src / riemann / Query . g : 69:24 : f
{ pushFollow ( FOLLOW_f_in_value531 ) ; f80 = f ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , f80 . getTree ( ) ) ; } break ; case 4 : // src / riemann / Query . g : 69:28 : nil
{ pushFollow ( FOLLOW_nil_in_value535 ) ; nil81 = nil ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , nil81 . getTree ( ) ) ; } break ; case 5 : // src / riemann / Query . g : 69:34 : INT
{ INT82 = ( Token ) match ( input , INT , FOLLOW_INT_in_value539 ) ; INT82_tree = ( CommonTree ) adaptor . create ( INT82 ) ; adaptor . addChild ( root_0 , INT82_tree ) ; } break ; case 6 : // src / riemann / Query . g : 69:40 : FLOAT
{ FLOAT83 = ( Token ) match ( input , FLOAT , FOLLOW_FLOAT_in_value543 ) ; FLOAT83_tree = ( CommonTree ) adaptor . create ( FLOAT83 ) ; adaptor . addChild ( root_0 , FLOAT83_tree ) ; } break ; } } retval . stop = input . LT ( - 1 ) ; retval . tree = ( CommonTree ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( CommonTree ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { } return retval ; |
public class NonVoltDBBackend { /** * Returns all column names for the specified table , in the order defined
* in the DDL . */
protected List < String > getAllColumns ( String tableName ) { } } | List < String > columns = new ArrayList < String > ( ) ; try { // Lower - case table names are required for PostgreSQL ; we might need to
// alter this if we use another comparison database ( besides HSQL ) someday
ResultSet rs = dbconn . getMetaData ( ) . getColumns ( null , null , tableName . toLowerCase ( ) , null ) ; while ( rs . next ( ) ) { columns . add ( rs . getString ( 4 ) ) ; } } catch ( SQLException e ) { printCaughtException ( "In NonVoltDBBackend.getAllColumns, caught SQLException: " + e ) ; } return columns ; |
public class AnnotationUtil { /** * 获取注解类的保留时间 , 可选值 SOURCE ( 源码时 ) , CLASS ( 编译时 ) , RUNTIME ( 运行时 ) , 默认为 CLASS
* @ param annotationType 注解类
* @ return 保留时间枚举 */
public static RetentionPolicy getRetentionPolicy ( Class < ? extends Annotation > annotationType ) { } } | final Retention retention = annotationType . getAnnotation ( Retention . class ) ; if ( null == retention ) { return RetentionPolicy . CLASS ; } return retention . value ( ) ; |
public class ConnectionImpl { /** * ( non - Javadoc )
* @ see com . ibm . wsspi . sib . core . SICoreConnection # receiveNoWait ( com . ibm . wsspi . sib . core . SITransaction , com . ibm . websphere . sib . Reliability ,
* com . ibm . websphere . sib . SIDestinationAddress , com . ibm . wsspi . sib . core . DestinationType , java . lang . String , java . lang . String , com . ibm . websphere . sib . Reliability ,
* com . ibm . wsspi . sib . core . OrderingContext )
* Added M7 Core SPI */
@ Override public SIBusMessage receiveNoWait ( SITransaction tran , Reliability unrecoverableReliability , SIDestinationAddress destAddr , DestinationType destinationType , SelectionCriteria criteria , Reliability reliability , String alternateUser ) throws SIConnectionDroppedException , SIConnectionUnavailableException , SIConnectionLostException , SILimitExceededException , SINotAuthorizedException , SIDestinationLockedException , SITemporaryDestinationNotFoundException , SIResourceException , SIErrorException , SIIncorrectCallException , SINotPossibleInCurrentConfigurationException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConnection . tc . isEntryEnabled ( ) ) SibTr . entry ( CoreSPIConnection . tc , "receiveNoWait" , new Object [ ] { this , tran , unrecoverableReliability , destAddr , destinationType , criteria , reliability , alternateUser } ) ; SIBusMessage msg = internalReceiveNoWait ( tran , unrecoverableReliability , destAddr , destinationType , criteria , reliability , alternateUser , false ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConnection . tc . isEntryEnabled ( ) ) SibTr . exit ( CoreSPIConnection . tc , "receiveNoWait" , msg ) ; return msg ; |
public class StringUtil { /** * Skips any spaces at or after start and returns the index of first
* non - space character ;
* @ param s the string
* @ param start index to start
* @ return index of first non - space */
public static int skipSpaces ( String s , int start ) { } } | int limit = s . length ( ) ; int i = start ; for ( ; i < limit ; i ++ ) { if ( s . charAt ( i ) != ' ' ) { break ; } } return i ; |
public class Validators { /** * Creates and returns a validator , which allows to validate texts to ensure , that they
* represent valid IPv4 addresses . Empty texts are also accepted .
* @ param context
* The context , which should be used to retrieve the error message , as an instance of
* the class { @ link Context } . The context may not be null
* @ param resourceId
* The resource ID of the string resource , which contains the error message , which
* should be set , as an { @ link Integer } value . The resource ID must correspond to a
* valid string resource
* @ return The validator , which has been created , as an instance of the type { @ link Validator } */
public static Validator < CharSequence > iPv4Address ( @ NonNull final Context context , @ StringRes final int resourceId ) { } } | return new IPv4AddressValidator ( context , resourceId ) ; |
public class ContextAnalyzer { /** * Get the last tense used in the sentence
* @ param timex timex construct to discover tense data for
* @ return string that contains the tense */
public static String getClosestTense ( Timex3 timex , JCas jcas , Language language ) { } } | RePatternManager rpm = RePatternManager . getInstance ( language , false ) ; String lastTense = "" ; String nextTense = "" ; int tokenCounter = 0 ; int lastid = 0 ; int nextid = 0 ; int tid = 0 ; // Get the sentence
FSIterator iterSentence = jcas . getAnnotationIndex ( Sentence . type ) . iterator ( ) ; Sentence s = new Sentence ( jcas ) ; while ( iterSentence . hasNext ( ) ) { s = ( Sentence ) iterSentence . next ( ) ; if ( ( s . getBegin ( ) <= timex . getBegin ( ) ) && ( s . getEnd ( ) >= timex . getEnd ( ) ) ) { break ; } } // Get the tokens
TreeMap < Integer , Token > tmToken = new TreeMap < Integer , Token > ( ) ; FSIterator iterToken = jcas . getAnnotationIndex ( Token . type ) . subiterator ( s ) ; while ( iterToken . hasNext ( ) ) { Token token = ( Token ) iterToken . next ( ) ; tmToken . put ( token . getEnd ( ) , token ) ; } // Get the last VERB token
for ( Integer tokEnd : tmToken . keySet ( ) ) { tokenCounter ++ ; if ( tokEnd < timex . getBegin ( ) ) { Token token = tmToken . get ( tokEnd ) ; Logger . printDetail ( "GET LAST TENSE: string:" + token . getCoveredText ( ) + " pos:" + token . getPos ( ) ) ; Logger . printDetail ( "hmAllRePattern.containsKey(tensePos4PresentFuture):" + rpm . get ( "tensePos4PresentFuture" ) ) ; Logger . printDetail ( "hmAllRePattern.containsKey(tensePos4Future):" + rpm . get ( "tensePos4Future" ) ) ; Logger . printDetail ( "hmAllRePattern.containsKey(tensePos4Past):" + rpm . get ( "tensePos4Past" ) ) ; Logger . printDetail ( "CHECK TOKEN:" + token . getPos ( ) ) ; if ( token . getPos ( ) == null ) { } else if ( ( rpm . containsKey ( "tensePos4PresentFuture" ) ) && ( token . getPos ( ) . matches ( rpm . get ( "tensePos4PresentFuture" ) ) ) ) { lastTense = "PRESENTFUTURE" ; lastid = tokenCounter ; } else if ( ( rpm . containsKey ( "tensePos4Past" ) ) && ( token . getPos ( ) . matches ( rpm . get ( "tensePos4Past" ) ) ) ) { lastTense = "PAST" ; lastid = tokenCounter ; } else if ( ( rpm . containsKey ( "tensePos4Future" ) ) && ( token . getPos ( ) . matches ( rpm . get ( "tensePos4Future" ) ) ) ) { if ( token . getCoveredText ( ) . matches ( rpm . get ( "tenseWord4Future" ) ) ) { lastTense = "FUTURE" ; lastid = tokenCounter ; } } } else { if ( tid == 0 ) { tid = tokenCounter ; } } } tokenCounter = 0 ; for ( Integer tokEnd : tmToken . keySet ( ) ) { tokenCounter ++ ; if ( nextTense . equals ( "" ) ) { if ( tokEnd > timex . getEnd ( ) ) { Token token = tmToken . get ( tokEnd ) ; Logger . printDetail ( "GET NEXT TENSE: string:" + token . getCoveredText ( ) + " pos:" + token . getPos ( ) ) ; Logger . printDetail ( "hmAllRePattern.containsKey(tensePos4PresentFuture):" + rpm . get ( "tensePos4PresentFuture" ) ) ; Logger . printDetail ( "hmAllRePattern.containsKey(tensePos4Future):" + rpm . get ( "tensePos4Future" ) ) ; Logger . printDetail ( "hmAllRePattern.containsKey(tensePos4Past):" + rpm . get ( "tensePos4Past" ) ) ; Logger . printDetail ( "CHECK TOKEN:" + token . getPos ( ) ) ; if ( token . getPos ( ) == null ) { } else if ( ( rpm . containsKey ( "tensePos4PresentFuture" ) ) && ( token . getPos ( ) . matches ( rpm . get ( "tensePos4PresentFuture" ) ) ) ) { nextTense = "PRESENTFUTURE" ; nextid = tokenCounter ; } else if ( ( rpm . containsKey ( "tensePos4Past" ) ) && ( token . getPos ( ) . matches ( rpm . get ( "tensePos4Past" ) ) ) ) { nextTense = "PAST" ; nextid = tokenCounter ; } else if ( ( rpm . containsKey ( "tensePos4Future" ) ) && ( token . getPos ( ) . matches ( rpm . get ( "tensePos4Future" ) ) ) ) { if ( token . getCoveredText ( ) . matches ( rpm . get ( "tenseWord4Future" ) ) ) { nextTense = "FUTURE" ; nextid = tokenCounter ; } } } } } if ( lastTense . equals ( "" ) ) { Logger . printDetail ( "TENSE: " + nextTense ) ; return nextTense ; } else if ( nextTense . equals ( "" ) ) { Logger . printDetail ( "TENSE: " + lastTense ) ; return lastTense ; } else { // If there is tense before and after the timex token ,
// return the closer one :
if ( ( tid - lastid ) > ( nextid - tid ) ) { Logger . printDetail ( "TENSE: " + nextTense ) ; return nextTense ; } else { Logger . printDetail ( "TENSE: " + lastTense ) ; return lastTense ; } } |
public class RequestParams { /** * Checks if HTTP request matches all fields specified in config .
* Fails on first mismatch . Both headers and query params can be configured as regex .
* @ param req
* @ return */
public boolean matches ( Request req ) { } } | if ( ! method . equals ( req . getMethod ( ) ) ) return false ; if ( useRegexForPath ) { if ( ! req . getPath ( ) . getPath ( ) . matches ( basePath ) ) return false ; } else { if ( ! basePath . equals ( req . getPath ( ) . getPath ( ) ) ) return false ; } if ( ! queries . keySet ( ) . containsAll ( req . getQuery ( ) . keySet ( ) ) ) return false ; if ( ! req . getNames ( ) . containsAll ( headers . keySet ( ) ) ) return false ; try { if ( ! isEmpty ( bodyMustContain ) && ! req . getContent ( ) . contains ( bodyMustContain ) ) return false ; } catch ( IOException e ) { return false ; } for ( Map . Entry < String , String > reqQuery : req . getQuery ( ) . entrySet ( ) ) { String respRegex = queries . get ( reqQuery . getKey ( ) ) ; if ( ! reqQuery . getValue ( ) . matches ( respRegex ) ) return false ; } for ( Map . Entry < String , String > header : headers . entrySet ( ) ) { String headerValueRegex = header . getValue ( ) ; if ( ! req . getValue ( header . getKey ( ) ) . matches ( headerValueRegex ) ) return false ; } return true ; |
public class MiriamLink { /** * Converts a MIRIAM URN into its equivalent Identifiers . org URL .
* @ see # getURI ( String , String ) - use this to get the URN
* @ see # getIdentifiersOrgURI ( String , String ) - prefered URI
* @ param urn - an existing Miriam URN , e . g . , " urn : miriam : obo . go : GO % 3A0045202"
* @ return the Identifiers . org URL corresponding to the data URN , e . g . , " http : / / identifiers . org / obo . go / GO : 0045202"
* @ deprecated this method applies { @ link URLDecoder # decode ( String ) } to the last part of the URN , which may not always work as expected ( test yours ! ) */
public static String convertUrn ( String urn ) { } } | String [ ] tokens = urn . split ( ":" ) ; return "http://identifiers.org/" + tokens [ tokens . length - 2 ] + "/" + URLDecoder . decode ( tokens [ tokens . length - 1 ] ) ; |
public class CommonConfigUtils { /** * Returns the value for the configuration attribute matching the key provided . If the value does not exist or is empty , the
* resulting value will be { @ code null } and an error message will be logged . */
public String getRequiredConfigAttributeWithDefaultValue ( Map < String , Object > props , String key , String defaultValue ) { } } | return getRequiredConfigAttributeWithDefaultValueAndConfigId ( props , key , defaultValue , null ) ; |
public class ModuleImpl { /** * This accessor is provided for unit testing only */
protected String getCachedFileName ( String key ) throws InterruptedException { } } | String result = null ; if ( _moduleBuilds != null ) { CacheEntry bld = _moduleBuilds . get ( key ) ; for ( int i = 0 ; i < 5 ; i ++ ) { if ( bld . filename == null ) { Thread . sleep ( 500L ) ; } } result = _moduleBuilds . get ( key ) . fileName ( ) ; } return result ; |
public class AttachProvider { /** * Attaches to a Java virtual machine .
* A Java virtual machine can be described using a { @ link
* com . sun . tools . attach . VirtualMachineDescriptor VirtualMachineDescriptor } .
* This method invokes the descriptor ' s { @ link
* com . sun . tools . attach . VirtualMachineDescriptor # provider ( ) provider ( ) } method
* to check that it is equal to this provider . It then attempts to attach to the
* Java virtual machine .
* @ param vmd The virtual machine descriptor
* @ return VirtualMachine representing the target virtual machine .
* @ throws SecurityException If a security manager has been installed and it denies
* { @ link com . sun . tools . attach . AttachPermission AttachPermission }
* < tt > ( " attachVirtualMachine " ) < / tt > , or other permission
* required by the implementation .
* @ throws AttachNotSupportedException If the descriptor ' s { @ link
* com . sun . tools . attach . VirtualMachineDescriptor # provider ( ) provider ( ) } method
* returns a provider that is not this provider , or it does not correspond
* to a Java virtual machine to which this provider can attach .
* @ throws IOException If some other I / O error occurs
* @ throws NullPointerException If < code > vmd < / code > is < code > null < / code > */
public VirtualMachine attachVirtualMachine ( VirtualMachineDescriptor vmd ) throws AttachNotSupportedException , IOException { } } | if ( vmd . provider ( ) != this ) { throw new AttachNotSupportedException ( "provider mismatch" ) ; } return attachVirtualMachine ( vmd . id ( ) ) ; |
public class BoxFolder { /** * Adds a collaborator to this folder . An email will be sent to the collaborator if they don ' t already have a Box
* account .
* @ param email the email address of the collaborator to add .
* @ param role the role of the collaborator .
* @ param notify the user / group should receive email notification of the collaboration or not .
* @ param canViewPath the view path collaboration feature is enabled or not .
* View path collaborations allow the invitee to see the entire ancestral path to the associated
* folder . The user will not gain privileges in any ancestral folder .
* @ return info about the new collaboration . */
public BoxCollaboration . Info collaborate ( String email , BoxCollaboration . Role role , Boolean notify , Boolean canViewPath ) { } } | JsonObject accessibleByField = new JsonObject ( ) ; accessibleByField . add ( "login" , email ) ; accessibleByField . add ( "type" , "user" ) ; return this . collaborate ( accessibleByField , role , notify , canViewPath ) ; |
public class MpxjFilter { /** * Apply a filter to the list of all tasks , and show the results .
* @ param project project file
* @ param filter filter */
private static void processTaskFilter ( ProjectFile project , Filter filter ) { } } | for ( Task task : project . getTasks ( ) ) { if ( filter . evaluate ( task , null ) ) { System . out . println ( task . getID ( ) + "," + task . getUniqueID ( ) + "," + task . getName ( ) ) ; } } |
public class CacheProxy { /** * Returns a copy of the value if value - based caching is enabled .
* @ param object the object to be copied
* @ param < T > the type of object being copied
* @ return a copy of the object if storing by value or the same instance if by reference */
@ SuppressWarnings ( "NullAway" ) protected final @ NonNull < T > T copyOf ( @ Nullable T object ) { } } | if ( object == null ) { return null ; } T copy = copier . copy ( object , cacheManager . getClassLoader ( ) ) ; return requireNonNull ( copy ) ; |
public class ScanQueryEngine { /** * If we ' re performing time - ordering , we want to scan through the first ` limit ` rows in each segment ignoring the number
* of rows already counted on other segments . */
private long calculateLimit ( ScanQuery query , Map < String , Object > responseContext ) { } } | if ( query . getOrder ( ) . equals ( ScanQuery . Order . NONE ) ) { return query . getLimit ( ) - ( long ) responseContext . get ( ScanQueryRunnerFactory . CTX_COUNT ) ; } return query . getLimit ( ) ; |
public class Parameters { /** * Creates a new set of parameters with only those parameters in the specified namespace ( that is ,
* prefixed by " namespace . " . The namespace prefix and period will be removed from parameter names
* in the new { @ code Parameters } . The name space name should * not * have a
* trailing " . " . */
public Parameters copyNamespace ( final String requestedNamespace ) { } } | checkArgument ( ! requestedNamespace . isEmpty ( ) ) ; checkArgument ( ! requestedNamespace . endsWith ( DELIM ) ) ; final ImmutableMap . Builder < String , String > ret = ImmutableMap . builder ( ) ; final String dottedNamespace = requestedNamespace + DELIM ; for ( final Map . Entry < String , String > param : params . entrySet ( ) ) { if ( param . getKey ( ) . startsWith ( dottedNamespace ) ) { ret . put ( param . getKey ( ) . substring ( dottedNamespace . length ( ) ) , param . getValue ( ) ) ; } } final List < String > newNamespace = Lists . newArrayList ( ) ; newNamespace . addAll ( namespace ) ; newNamespace . add ( requestedNamespace ) ; final Parameters paramsRet = new Parameters ( ret . build ( ) , newNamespace ) ; // our children inherit our listeners
for ( final Listener listener : listeners ) { paramsRet . registerListener ( listener ) ; } return paramsRet ; |
public class GridFilesystem { /** * Opens an InputStream for reading from the given file .
* @ param file the file to open for reading
* @ return an InputStream for reading from the file
* @ throws FileNotFoundException if the file does not exist or is a directory */
public InputStream getInput ( File file ) throws FileNotFoundException { } } | return file != null ? getInput ( file . getPath ( ) ) : null ; |
public class FeatureUtilities { /** * Getter for attributes of a feature .
* < p > If the attribute is not found , checks are done in non
* case sensitive mode .
* @ param feature the feature from which to get the attribute .
* @ param field the name of the field .
* @ return the attribute or null if none found . */
public static Object getAttributeCaseChecked ( SimpleFeature feature , String field ) { } } | Object attribute = feature . getAttribute ( field ) ; if ( attribute == null ) { attribute = feature . getAttribute ( field . toLowerCase ( ) ) ; if ( attribute != null ) return attribute ; attribute = feature . getAttribute ( field . toUpperCase ( ) ) ; if ( attribute != null ) return attribute ; // alright , last try , search for it
SimpleFeatureType featureType = feature . getFeatureType ( ) ; field = findAttributeName ( featureType , field ) ; if ( field != null ) { return feature . getAttribute ( field ) ; } } return attribute ; |
public class CmsStaticExportManager { /** * Returns the key for the online , export and secure cache . < p >
* @ param siteRoot the site root of the resource
* @ param uri the URI of the resource
* @ return a key for the cache */
public String getCacheKey ( String siteRoot , String uri ) { } } | return new StringBuffer ( siteRoot ) . append ( uri ) . toString ( ) ; |
public class ExprTag { /** * / * Evaluates expressions as necessary */
private void evaluateExpressions ( ) throws JspException { } } | /* * Note : we don ' t check for type mismatches here ; we assume
* the expression evaluator will return the expected type
* ( by virtue of knowledge we give it about what that type is ) .
* A ClassCastException here is truly unexpected , so we let it
* propagate up . */
if ( escapeXml_ != null ) { Boolean b = ( Boolean ) ExpressionUtil . evalNotNull ( "out" , "escapeXml" , escapeXml_ , Boolean . class , this , pageContext ) ; if ( b == null ) { escapeXml = false ; } else { escapeXml = b . booleanValue ( ) ; } } |
public class VisibilityAlgorithm { /** * Add occlusion segment in isovist
* @ param p0 segment origin
* @ param p1 segment destination */
public void addSegment ( Coordinate p0 , Coordinate p1 ) { } } | if ( p0 . distance ( p1 ) < epsilon ) { return ; } addSegment ( originalSegments , p0 , p1 ) ; |
public class ResultSetIterator { /** * { @ inheritDoc } */
@ Override public T next ( ) { } } | if ( closed ) throw new IllegalStateException ( "This iterator has been closed!" ) ; try { return rowMapper . mapRow ( resultSet , rowNum ) ; } catch ( Exception e ) { close ( ) ; throw e instanceof DaoException ? ( DaoException ) e : new DaoException ( e ) ; } finally { rowNum ++ ; } |
public class JDBCCallableStatement { /** * # ifdef JAVA6 */
public synchronized void setSQLXML ( String parameterName , SQLXML xmlObject ) throws SQLException { } } | super . setSQLXML ( findParameterIndex ( parameterName ) , xmlObject ) ; |
public class RedisInner { /** * Retrieve a Redis cache ' s access keys . This operation requires write permission to the cache resource .
* @ param resourceGroupName The name of the resource group .
* @ param name The name of the Redis cache .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the RedisAccessKeysInner object */
public Observable < RedisAccessKeysInner > listKeysAsync ( String resourceGroupName , String name ) { } } | return listKeysWithServiceResponseAsync ( resourceGroupName , name ) . map ( new Func1 < ServiceResponse < RedisAccessKeysInner > , RedisAccessKeysInner > ( ) { @ Override public RedisAccessKeysInner call ( ServiceResponse < RedisAccessKeysInner > response ) { return response . body ( ) ; } } ) ; |
public class TypeAnnotationPosition { /** * Create a { @ code TypeAnnotationPosition } for a field .
* @ param location The type path .
* @ param onLambda The lambda for this variable .
* @ param pos The position from the associated tree node . */
public static TypeAnnotationPosition field ( final List < TypePathEntry > location , final JCLambda onLambda , final int pos ) { } } | return new TypeAnnotationPosition ( TargetType . FIELD , pos , Integer . MIN_VALUE , onLambda , Integer . MIN_VALUE , Integer . MIN_VALUE , location ) ; |
public class ConfigurationDocGenerator { /** * Writes property key to csv files .
* @ param defaultKeys Collection which is from PropertyKey DEFAULT _ KEYS _ MAP . values ( )
* @ param filePath path for csv files */
@ VisibleForTesting public static void writeCSVFile ( Collection < ? extends PropertyKey > defaultKeys , String filePath ) throws IOException { } } | if ( defaultKeys . size ( ) == 0 ) { return ; } FileWriter fileWriter ; Closer closer = Closer . create ( ) ; String [ ] fileNames = { "user-configuration.csv" , "master-configuration.csv" , "worker-configuration.csv" , "security-configuration.csv" , "common-configuration.csv" , "cluster-management-configuration.csv" } ; try { // HashMap for FileWriter per each category
Map < String , FileWriter > fileWriterMap = new HashMap < > ( ) ; for ( String fileName : fileNames ) { fileWriter = new FileWriter ( PathUtils . concatPath ( filePath , fileName ) ) ; // Write the CSV file header and line separator after the header
fileWriter . append ( CSV_FILE_HEADER + "\n" ) ; // put fileWriter
String key = fileName . substring ( 0 , fileName . indexOf ( "configuration" ) - 1 ) ; fileWriterMap . put ( key , fileWriter ) ; // register file writer
closer . register ( fileWriter ) ; } // Sort defaultKeys
List < PropertyKey > dfkeys = new ArrayList < > ( defaultKeys ) ; Collections . sort ( dfkeys ) ; for ( PropertyKey propertyKey : dfkeys ) { String pKey = propertyKey . toString ( ) ; String defaultDescription ; if ( propertyKey . getDefaultSupplier ( ) . get ( ) == null ) { defaultDescription = "" ; } else { defaultDescription = propertyKey . getDefaultSupplier ( ) . getDescription ( ) ; } // Quote the whole description to escape characters such as commas .
defaultDescription = String . format ( "\"%s\"" , defaultDescription ) ; // Write property key and default value to CSV
String keyValueStr = pKey + "," + defaultDescription + "\n" ; if ( pKey . startsWith ( "alluxio.user." ) ) { fileWriter = fileWriterMap . get ( "user" ) ; } else if ( pKey . startsWith ( "alluxio.master." ) ) { fileWriter = fileWriterMap . get ( "master" ) ; } else if ( pKey . startsWith ( "alluxio.worker." ) ) { fileWriter = fileWriterMap . get ( "worker" ) ; } else if ( pKey . startsWith ( "alluxio.security." ) ) { fileWriter = fileWriterMap . get ( "security" ) ; } else if ( pKey . startsWith ( "alluxio.keyvalue." ) ) { fileWriter = fileWriterMap . get ( "key-value" ) ; } else if ( pKey . startsWith ( "alluxio.integration" ) ) { fileWriter = fileWriterMap . get ( "cluster-management" ) ; } else { fileWriter = fileWriterMap . get ( "common" ) ; } fileWriter . append ( keyValueStr ) ; } LOG . info ( "Property Key CSV files were created successfully." ) ; } catch ( Exception e ) { throw closer . rethrow ( e ) ; } finally { try { closer . close ( ) ; } catch ( IOException e ) { LOG . error ( "Error while flushing/closing Property Key CSV FileWriter" , e ) ; } } |
public class AmazonRedshiftClient { /** * Deletes a specified Amazon Redshift parameter group .
* < note >
* You cannot delete a parameter group if it is associated with a cluster .
* < / note >
* @ param deleteClusterParameterGroupRequest
* @ return Result of the DeleteClusterParameterGroup operation returned by the service .
* @ throws InvalidClusterParameterGroupStateException
* The cluster parameter group action can not be completed because another task is in progress that involves
* the parameter group . Wait a few moments and try the operation again .
* @ throws ClusterParameterGroupNotFoundException
* The parameter group name does not refer to an existing parameter group .
* @ sample AmazonRedshift . DeleteClusterParameterGroup
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / redshift - 2012-12-01 / DeleteClusterParameterGroup "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DeleteClusterParameterGroupResult deleteClusterParameterGroup ( DeleteClusterParameterGroupRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDeleteClusterParameterGroup ( request ) ; |
public class MusixMatch { /** * Handle the error response .
* @ param jsonResponse
* the jsonContent .
* @ throws MusixMatchException
* if any error occurs */
private void handleErrorResponse ( String jsonResponse ) throws MusixMatchException { } } | StatusCode statusCode ; Gson gson = new Gson ( ) ; System . out . println ( jsonResponse ) ; ErrorMessage errMessage = gson . fromJson ( jsonResponse , ErrorMessage . class ) ; int responseCode = errMessage . getMessageContainer ( ) . getHeader ( ) . getStatusCode ( ) ; switch ( responseCode ) { case 400 : statusCode = StatusCode . BAD_SYNTAX ; break ; case 401 : statusCode = StatusCode . AUTH_FAILED ; break ; case 402 : statusCode = StatusCode . LIMIT_REACHED ; break ; case 403 : statusCode = StatusCode . NOT_AUTHORIZED ; break ; case 404 : statusCode = StatusCode . RESOURCE_NOT_FOUND ; break ; case 405 : statusCode = StatusCode . METHOD_NOT_FOUND ; break ; default : statusCode = StatusCode . ERROR ; break ; } throw new MusixMatchException ( statusCode . getStatusMessage ( ) ) ; |
public class PluginResourceButton { /** * Returns the value of the button ' s caption text .
* @ return The button ' s caption text . */
public String getCaption ( ) { } } | return caption != null && caption . toLowerCase ( ) . startsWith ( "label:" ) ? StrUtil . getLabel ( caption . substring ( 6 ) ) : caption ; |
public class CmsADEConfigCacheState { /** * Helper method for getting the best matching sitemap configuration object for a given root path , ignoring the module
* configuration . < p >
* For example , if there are configurations available for the paths / a , / a / b / c , / a / b / x and / a / b / c / d / e , then
* the method will return the configuration object for / a / b / c when passed the path / a / b / c / d .
* If no configuration data is found for the path , null will be returned . < p >
* @ param path a root path
* @ return the configuration data for the given path , or null if none was found */
protected CmsADEConfigDataInternal getSiteConfigData ( String path ) { } } | if ( path == null ) { return null ; } List < String > prefixes = getSiteConfigPaths ( path ) ; if ( prefixes . size ( ) == 0 ) { return null ; } // for any two prefixes of a string , one is a prefix of the other . so the alphabetically last
// prefix is the longest prefix of all .
return m_siteConfigurationsByPath . get ( prefixes . get ( prefixes . size ( ) - 1 ) ) ; |
public class AbstractYarnClusterDescriptor { /** * Method to validate cluster specification before deploy it , it will throw
* an { @ link FlinkException } if the { @ link ClusterSpecification } is invalid .
* @ param clusterSpecification cluster specification to check against the configuration of the
* AbstractYarnClusterDescriptor
* @ throws FlinkException if the cluster cannot be started with the provided { @ link ClusterSpecification } */
private void validateClusterSpecification ( ClusterSpecification clusterSpecification ) throws FlinkException { } } | try { final long taskManagerMemorySize = clusterSpecification . getTaskManagerMemoryMB ( ) ; // We do the validation by calling the calculation methods here
// Internally these methods will check whether the cluster can be started with the provided
// ClusterSpecification and the configured memory requirements
final long cutoff = ContaineredTaskManagerParameters . calculateCutoffMB ( flinkConfiguration , taskManagerMemorySize ) ; TaskManagerServices . calculateHeapSizeMB ( taskManagerMemorySize - cutoff , flinkConfiguration ) ; } catch ( IllegalArgumentException iae ) { throw new FlinkException ( "Cannot fulfill the minimum memory requirements with the provided " + "cluster specification. Please increase the memory of the cluster." , iae ) ; } |
public class ConnectionTcp { /** * Wake a connection .
* The connection may be idle because it ' s received a close - read but
* HTTP is still processing . When the HTTP write completes , it will
* wake the read thread . */
@ Override public void requestWake ( ) { } } | try { _state = _state . toWake ( ) ; requestLoop ( ) ; } catch ( Exception e ) { log . log ( Level . WARNING , e . toString ( ) , e ) ; } /* if ( _ stateRef . get ( ) . toWake ( _ stateRef ) ) {
offer ( getConnectionTask ( ) ) ; */ |
public class ExpressionTree { /** * Get the root of the tree
* @ return the root of the tree or EMPTY _ SLOT if the tree is empty */
@ Nonnull public ExpressionTreeElement getRoot ( ) { } } | if ( last . isEmptySlot ( ) ) { return this . last ; } else { ExpressionTreeElement element = last ; while ( ! Thread . currentThread ( ) . isInterrupted ( ) ) { final ExpressionTreeElement next = element . getParent ( ) ; if ( next == null ) { return element ; } else { element = next ; } } } return ExpressionTreeElement . EMPTY_SLOT ; |
public class PopupMenuItemSiteNodeContainer { /** * Convenience method that extracts a { @ code SiteNode } from the given { @ code historyReference } . If no { @ code SiteNode } is
* found { @ code null } is returned .
* @ param historyReference the history reference
* @ return the { @ code SiteNode } or { @ code null } if not found
* @ see # isButtonEnabledForHistoryReference ( HistoryReference ) */
protected static SiteNode getSiteNode ( HistoryReference historyReference ) { } } | SiteNode sn = historyReference . getSiteNode ( ) ; if ( sn == null ) { sn = Model . getSingleton ( ) . getSession ( ) . getSiteTree ( ) . getSiteNode ( historyReference . getHistoryId ( ) ) ; } return sn ; |
public class Util { /** * helper method to get the color by attr ( if defined in the style ) or by resource .
* @ param ctx
* @ param attr attribute that defines the color
* @ param res color resource id
* @ return */
public static int getThemeColorFromAttrOrRes ( Context ctx , int attr , int res ) { } } | int color = getThemeColor ( ctx , attr ) ; // If this color is not styled , use the default from the resource
if ( color == 0 ) { color = ContextCompat . getColor ( ctx , res ) ; } return color ; |
public class AWSSimpleSystemsManagementClient { /** * Retrieves the current snapshot for the patch baseline the instance uses . This API is primarily used by the
* AWS - RunPatchBaseline Systems Manager document .
* @ param getDeployablePatchSnapshotForInstanceRequest
* @ return Result of the GetDeployablePatchSnapshotForInstance operation returned by the service .
* @ throws InternalServerErrorException
* An error occurred on the server side .
* @ throws UnsupportedOperatingSystemException
* The operating systems you specified is not supported , or the operation is not supported for the operating
* system . Valid operating systems include : Windows , AmazonLinux , RedhatEnterpriseLinux , and Ubuntu .
* @ sample AWSSimpleSystemsManagement . GetDeployablePatchSnapshotForInstance
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ssm - 2014-11-06 / GetDeployablePatchSnapshotForInstance "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public GetDeployablePatchSnapshotForInstanceResult getDeployablePatchSnapshotForInstance ( GetDeployablePatchSnapshotForInstanceRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeGetDeployablePatchSnapshotForInstance ( request ) ; |
public class RegistriesInner { /** * Creates a new build based on the request parameters and add it to the build queue .
* @ param resourceGroupName The name of the resource group to which the container registry belongs .
* @ param registryName The name of the container registry .
* @ param buildRequest The parameters of a build that needs to queued .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < BuildInner > queueBuildAsync ( String resourceGroupName , String registryName , QueueBuildRequest buildRequest ) { } } | return queueBuildWithServiceResponseAsync ( resourceGroupName , registryName , buildRequest ) . map ( new Func1 < ServiceResponse < BuildInner > , BuildInner > ( ) { @ Override public BuildInner call ( ServiceResponse < BuildInner > response ) { return response . body ( ) ; } } ) ; |
public class Segment { /** * Releases an entry from the segment .
* @ param index The index of the entry to release .
* @ return Indicates whether the entry was newly released from the segment .
* @ throws IllegalStateException if the segment is not open */
public boolean release ( long index ) { } } | assertSegmentOpen ( ) ; long offset = offsetIndex . find ( relativeOffset ( index ) ) ; return offset != - 1 && offsetPredicate . release ( offset ) ; |
public class DualMessageReceiver { /** * Add this message filter to this receive queue .
* Also adds a message filter to the remote queue .
* @ param The message filter to add .
* @ return The message filter passed in . */
public void addMessageFilter ( MessageFilter messageFilter ) { } } | boolean bThinTarget = ( ( BaseMessageFilter ) messageFilter ) . isThinTarget ( ) ; ( ( BaseMessageFilter ) messageFilter ) . setThinTarget ( false ) ; // If this is replicated to a server , the server needs to know that I am thick .
super . addMessageFilter ( messageFilter ) ; ( ( BaseMessageFilter ) messageFilter ) . setThinTarget ( bThinTarget ) ; |
public class PhpDependencyResolver { /** * collect children ' s recursively for each dependencyInfo object */
private void collectChildren ( DependencyInfo dependencyInfo , HashMap < String , DependencyInfo > packageDependencyMap , HashMap < DependencyInfo , Collection < String > > requireDependenciesMap ) { } } | Collection < String > requires = requireDependenciesMap . get ( dependencyInfo ) ; // check if dependencyInfo object already have children ' s
if ( dependencyInfo . getChildren ( ) . isEmpty ( ) ) { for ( String require : requires ) { DependencyInfo dependencyChild = packageDependencyMap . get ( require ) ; if ( dependencyChild != null ) { dependencyInfo . getChildren ( ) . add ( dependencyChild ) ; collectChildren ( dependencyChild , packageDependencyMap , requireDependenciesMap ) ; } } } |
public class LocalFileResource { /** * { @ inheritDoc } */
@ Override public void put ( ReadableByteChannel source ) throws IOException { } } | if ( source == null || ! source . isOpen ( ) ) return ; WritableByteChannel os = putChannel ( ) ; FileChannel fos = null ; try { if ( source instanceof FileChannel ) { fos = ( FileChannel ) os ; FileChannel fis = ( FileChannel ) source ; fis . transferTo ( 0 , fis . size ( ) , fos ) ; } else { ByteBuffer buf = ByteBuffer . allocate ( 1024 ) ; int rc ; while ( ( rc = source . read ( buf ) ) >= 0 ) { if ( rc == 0 ) continue ; buf . flip ( ) ; // flip ( position back to 0 , limit to amount read )
os . write ( buf ) ; // copy data into writable channel
buf . clear ( ) ; // prepare for new read
} } } finally { if ( fos != null ) fos . close ( ) ; else if ( os != null ) os . close ( ) ; } |
public class InstanceTypeFactory { /** * Constructs an { @ link InstanceType } from the given parameters .
* @ param identifier
* identifier for this instance type
* @ param numberOfComputeUnits
* number of computational units of this instance type
* @ param numberOfCores
* number of CPU cores of this instance type
* @ param memorySize
* amount of main memory of this instance type ( in MB )
* @ param diskCapacity
* disk capacity of this instance type ( in GB )
* @ param pricePerHour
* price per hour that is charged for running instances of this type */
public static InstanceType construct ( String identifier , int numberOfComputeUnits , int numberOfCores , int memorySize , int diskCapacity , int pricePerHour ) { } } | return new InstanceType ( identifier , numberOfComputeUnits , numberOfCores , memorySize , diskCapacity , pricePerHour ) ; |
public class Ranges { /** * Return the center of the specified range .
* @ param < C > range endpoint type
* @ param range range , must not be null
* @ return the center of the specified range */
public static < C extends Comparable > C center ( final Range < C > range ) { } } | checkNotNull ( range ) ; if ( ! range . hasLowerBound ( ) && ! range . hasUpperBound ( ) ) { throw new IllegalStateException ( "cannot find the center of a range without bounds" ) ; } if ( ! range . hasLowerBound ( ) ) { return range . upperEndpoint ( ) ; } if ( ! range . hasUpperBound ( ) ) { return range . lowerEndpoint ( ) ; } C lowerEndpoint = range . lowerEndpoint ( ) ; C upperEndpoint = range . upperEndpoint ( ) ; if ( upperEndpoint instanceof Integer ) { Integer upper = ( Integer ) upperEndpoint ; Integer lower = ( Integer ) lowerEndpoint ; return ( C ) Integer . valueOf ( ( upper . intValue ( ) + lower . intValue ( ) ) / 2 ) ; } if ( upperEndpoint instanceof Long ) { Long upper = ( Long ) upperEndpoint ; Long lower = ( Long ) lowerEndpoint ; return ( C ) Long . valueOf ( ( upper . longValue ( ) + lower . longValue ( ) ) / 2L ) ; } if ( upperEndpoint instanceof BigInteger ) { BigInteger upper = ( BigInteger ) upperEndpoint ; BigInteger lower = ( BigInteger ) lowerEndpoint ; BigInteger two = BigInteger . valueOf ( 2L ) ; return ( C ) upper . subtract ( lower ) . divide ( two ) ; } // todo : could potentially calculate the center of any range with a discrete domain
throw new IllegalStateException ( "cannot find the center of a range whose endpoint type is not Integer, Long, or BigInteger" ) ; |
public class GuildWars2 { /** * Set language for APIs that are locale aware < br / >
* Possible Value : english ( default ) , german , spanish , french , chinese
* @ param lang selected language */
public static void setLanguage ( LanguageSelect lang ) { } } | if ( lang == null ) GuildWars2 . lang = LanguageSelect . English ; else GuildWars2 . lang = lang ; |
public class LineWrapper { /** * Emit a newline character if the line will exceed it ' s limit , otherwise do nothing . */
void zeroWidthSpace ( int indentLevel ) throws IOException { } } | if ( closed ) throw new IllegalStateException ( "closed" ) ; if ( column == 0 ) return ; if ( this . nextFlush != null ) flush ( nextFlush ) ; this . nextFlush = FlushType . EMPTY ; this . indentLevel = indentLevel ; |
public class GVRShaderData { /** * Gets the name of the vertex attribute containing the texture
* coordinates for the named texture .
* @ param texName name of texture
* @ return name of texture coordinate vertex attribute */
public String getTexCoordAttr ( String texName ) { } } | GVRTexture tex = textures . get ( texName ) ; if ( tex != null ) { return tex . getTexCoordAttr ( ) ; } return null ; |
public class Iterators { /** * Combines multiple iterators into a single iterator . The returned iterator
* iterates across the elements of each iterator in { @ code inputs } . The input
* iterators are not polled until necessary .
* < p > The returned iterator supports { @ code remove ( ) } when the corresponding
* input iterator supports it .
* @ throws NullPointerException if any of the provided iterators is null */
public static < T > Iterator < T > concat ( Iterator < ? extends T > ... inputs ) { } } | for ( Iterator < ? extends T > input : checkNotNull ( inputs ) ) { checkNotNull ( input ) ; } return concat ( new ConsumingQueueIterator < Iterator < ? extends T > > ( inputs ) ) ; |
public class LazyReact { /** * Construct a FutureStream from specified Suppliers . Each Supplier is executed asyncrhonously ,
* and it ' s results provided to next phase of the Stream
* < pre >
* { @ code
* LazyReact . parallelBuilder ( )
* . react ( asList ( this : : load )
* . map ( list - > 1 + 2)
* . block ( ) ;
* < / pre >
* @ param actions Suppliers to execute
* @ return FutureStream
* @ see com . oath . cyclops . react . stream . BaseSimpleReact # react ( java . util . List ) */
public < U > FutureStream < U > react ( final Collection < Supplier < U > > actions ) { } } | final ReactiveSeq < Supplier < U > > seq = actions instanceof List ? ReactiveSeq . fromList ( ( List ) actions ) : ReactiveSeq . fromIterable ( actions ) ; return fromStreamAsync ( seq ) ; |
public class TileSetBundlerTask { /** * Returns the target path in which our bundler will write the tile set . */
protected String getTargetPath ( File fromDir , String path ) { } } | return path . substring ( 0 , path . length ( ) - 4 ) + ".jar" ; |
public class DateModifiedFollowingHandler { public Date adjustDate ( final Date startDate , final int increment , final NonWorkingDayChecker < Date > checker ) { } } | final Calendar cal = ( Calendar ) Utils . getCal ( startDate ) . clone ( ) ; int step = increment ; final int month = cal . get ( Calendar . MONTH ) ; while ( checker . isNonWorkingDay ( cal . getTime ( ) ) ) { cal . add ( Calendar . DAY_OF_MONTH , step ) ; if ( month != cal . get ( Calendar . MONTH ) ) { // switch direction and go back
step *= - 1 ; cal . add ( Calendar . DAY_OF_MONTH , step ) ; } } return cal . getTime ( ) ; |
public class GlobalUsersInner { /** * Starts an environment by starting all resources inside the environment . This operation can take a while to complete .
* @ param userName The name of the user .
* @ param environmentId The resourceId of the environment
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < Void > startEnvironmentAsync ( String userName , String environmentId ) { } } | return startEnvironmentWithServiceResponseAsync ( userName , environmentId ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ; |
public class ServletResponse { /** * { @ inheritDoc } */
@ Override public void setHeader ( final String name , final String value ) { } } | backing . setHeader ( name , value ) ; |
public class ComponentEnhancer { /** * Check if annotation can be processed for the given class .
* @ param componentClass the class to check
* @ return true if annotation can be processed */
public static boolean canProcessAnnotation ( final Class < ? extends Component < ? > > componentClass ) { } } | final SkipAnnotation skip = ClassUtility . getLastClassAnnotation ( componentClass , SkipAnnotation . class ) ; // No annotation or annotation deactivated = = > skip annotation processing
return ! ( skip == null || skip . value ( ) ) ; |
public class ClassReader { /** * Include class corresponding to given class file in package ,
* unless ( 1 ) we already have one the same kind ( . class or . java ) , or
* ( 2 ) we have one of the other kind , and the given class file
* is older . */
protected void includeClassFile ( PackageSymbol p , JavaFileObject file ) { } } | if ( ( p . flags_field & EXISTS ) == 0 ) for ( Symbol q = p ; q != null && q . kind == PCK ; q = q . owner ) q . flags_field |= EXISTS ; JavaFileObject . Kind kind = file . getKind ( ) ; int seen ; if ( kind == JavaFileObject . Kind . CLASS ) seen = CLASS_SEEN ; else seen = SOURCE_SEEN ; String binaryName = fileManager . inferBinaryName ( currentLoc , file ) ; int lastDot = binaryName . lastIndexOf ( "." ) ; Name classname = names . fromString ( binaryName . substring ( lastDot + 1 ) ) ; boolean isPkgInfo = classname == names . package_info ; ClassSymbol c = isPkgInfo ? p . package_info : ( ClassSymbol ) p . members_field . lookup ( classname ) . sym ; if ( c == null ) { c = enterClass ( classname , p ) ; if ( c . classfile == null ) // only update the file if ' s it ' s newly created
c . classfile = file ; if ( isPkgInfo ) { p . package_info = c ; } else { if ( c . owner == p ) // it might be an inner class
p . members_field . enter ( c ) ; } } else if ( c . classfile != null && ( c . flags_field & seen ) == 0 ) { // if c . classfile = = null , we are currently compiling this class
// and no further action is necessary .
// if ( c . flags _ field & seen ) ! = 0 , we have already encountered
// a file of the same kind ; again no further action is necessary .
if ( ( c . flags_field & ( CLASS_SEEN | SOURCE_SEEN ) ) != 0 ) c . classfile = preferredFileObject ( file , c . classfile ) ; } c . flags_field |= seen ; |
public class ArrayUtil { /** * Return count of true elements in array */
public static int countTrueElements ( boolean [ ] arra ) { } } | int count = 0 ; for ( int i = 0 ; i < arra . length ; i ++ ) { if ( arra [ i ] ) { count ++ ; } } return count ; |
public class ModifySnapshotScheduleResult { /** * A list of clusters associated with the schedule . A maximum of 100 clusters is returned .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setAssociatedClusters ( java . util . Collection ) } or { @ link # withAssociatedClusters ( java . util . Collection ) } if
* you want to override the existing values .
* @ param associatedClusters
* A list of clusters associated with the schedule . A maximum of 100 clusters is returned .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ModifySnapshotScheduleResult withAssociatedClusters ( ClusterAssociatedToSchedule ... associatedClusters ) { } } | if ( this . associatedClusters == null ) { setAssociatedClusters ( new com . amazonaws . internal . SdkInternalList < ClusterAssociatedToSchedule > ( associatedClusters . length ) ) ; } for ( ClusterAssociatedToSchedule ele : associatedClusters ) { this . associatedClusters . add ( ele ) ; } return this ; |
public class CustomFunctions { /** * コレクションの値を結合する 。
* @ param collection 結合対象のコレクション
* @ param delimiter 区切り文字
* @ return 結合した文字列を返す 。 結合の対象のコレクションがnulの場合 、 空文字を返す 。 */
public static String join ( final Collection < ? > collection , final String delimiter ) { } } | if ( collection == null || collection . isEmpty ( ) ) { return "" ; } String value = collection . stream ( ) . map ( v -> v . toString ( ) ) . collect ( Collectors . joining ( defaultString ( delimiter ) ) ) ; return value ; |
public class StatusCodeDumper { /** * impl of dumping response status code to result
* @ param result A map you want to put dump information to */
@ Override public void dumpResponse ( Map < String , Object > result ) { } } | this . statusCodeResult = String . valueOf ( exchange . getStatusCode ( ) ) ; this . putDumpInfoTo ( result ) ; |
public class PdfTable { /** * Updates the table row additions in the underlying table object */
private void updateRowAdditionsInternal ( ) { } } | // correct table : fill empty cells / parse table in table
Row row ; int prevRows = rows ( ) ; int rowNumber = 0 ; int groupNumber = 0 ; boolean groupChange ; int firstDataRow = table . getLastHeaderRow ( ) + 1 ; Cell cell ; PdfCell currentCell ; ArrayList newCells = new ArrayList ( ) ; int rows = table . size ( ) + 1 ; float [ ] offsets = new float [ rows ] ; for ( int i = 0 ; i < rows ; i ++ ) { offsets [ i ] = getBottom ( ) ; } // loop over all the rows
for ( Iterator rowIterator = table . iterator ( ) ; rowIterator . hasNext ( ) ; ) { groupChange = false ; row = ( Row ) rowIterator . next ( ) ; if ( row . isEmpty ( ) ) { if ( rowNumber < rows - 1 && offsets [ rowNumber + 1 ] > offsets [ rowNumber ] ) offsets [ rowNumber + 1 ] = offsets [ rowNumber ] ; } else { for ( int i = 0 ; i < row . getColumns ( ) ; i ++ ) { cell = ( Cell ) row . getCell ( i ) ; if ( cell != null ) { currentCell = new PdfCell ( cell , rowNumber + prevRows , positions [ i ] , positions [ i + cell . getColspan ( ) ] , offsets [ rowNumber ] , cellspacing ( ) , cellpadding ( ) ) ; if ( rowNumber < firstDataRow ) { currentCell . setHeader ( ) ; headercells . add ( currentCell ) ; if ( ! table . isNotAddedYet ( ) ) continue ; } try { if ( offsets [ rowNumber ] - currentCell . getHeight ( ) - cellpadding ( ) < offsets [ rowNumber + currentCell . rowspan ( ) ] ) { offsets [ rowNumber + currentCell . rowspan ( ) ] = offsets [ rowNumber ] - currentCell . getHeight ( ) - cellpadding ( ) ; } } catch ( ArrayIndexOutOfBoundsException aioobe ) { if ( offsets [ rowNumber ] - currentCell . getHeight ( ) < offsets [ rows - 1 ] ) { offsets [ rows - 1 ] = offsets [ rowNumber ] - currentCell . getHeight ( ) ; } } currentCell . setGroupNumber ( groupNumber ) ; groupChange |= cell . getGroupChange ( ) ; newCells . add ( currentCell ) ; } } } rowNumber ++ ; if ( groupChange ) groupNumber ++ ; } // loop over all the cells
int n = newCells . size ( ) ; for ( int i = 0 ; i < n ; i ++ ) { currentCell = ( PdfCell ) newCells . get ( i ) ; try { currentCell . setBottom ( offsets [ currentCell . rownumber ( ) - prevRows + currentCell . rowspan ( ) ] ) ; } catch ( ArrayIndexOutOfBoundsException aioobe ) { currentCell . setBottom ( offsets [ rows - 1 ] ) ; } } cells . addAll ( newCells ) ; setBottom ( offsets [ rows - 1 ] ) ; |
public class CancelRotateSecretRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CancelRotateSecretRequest cancelRotateSecretRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( cancelRotateSecretRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( cancelRotateSecretRequest . getSecretId ( ) , SECRETID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class DOMUtils { /** * Parse the given input source and return the root Element .
* This uses the document builder associated with the current thread . */
public static Element parse ( InputSource source ) throws IOException { } } | try { Document doc ; DocumentBuilder builder = getDocumentBuilder ( ) ; synchronized ( builder ) // synchronize to prevent concurrent parsing on the same DocumentBuilder
{ doc = builder . parse ( source ) ; } return doc . getDocumentElement ( ) ; } catch ( SAXException se ) { throw new IOException ( se . toString ( ) ) ; } finally { InputStream is = source . getByteStream ( ) ; if ( is != null ) { is . close ( ) ; } Reader r = source . getCharacterStream ( ) ; if ( r != null ) { r . close ( ) ; } } |
public class TextMateGenerator2 { /** * Generate the rules for the primitive types .
* @ param primitiveTypes the primitive types .
* @ return the rules . */
protected List < Map < String , ? > > generatePrimitiveTypes ( Set < String > primitiveTypes ) { } } | final List < Map < String , ? > > list = new ArrayList < > ( ) ; if ( ! primitiveTypes . isEmpty ( ) ) { list . add ( pattern ( it -> { it . matches ( keywordRegex ( primitiveTypes ) + "(?:\\s*\\[\\s*\\])*" ) ; // $ NON - NLS - 1 $
it . style ( PRIMITIVE_TYPE_STYLE ) ; it . comment ( "Primitive types" ) ; // $ NON - NLS - 1 $
} ) ) ; } return list ; |
public class ManagementResource { /** * Upload an image for a template / application .
* If an image was already set , it is overridden by the new one .
* @ param name the name of the template / application .
* @ param qualifier the qualifier of the template , or { @ code null } for an application .
* @ param image the uploaded image .
* @ param fileDetail the image details .
* @ throws IllegalArgumentException if the image is too large , or is not supported .
* @ throws NoSuchElementException if the application / template cannot be found .
* @ throws IOException if the image cannot be stored . */
private void doSetImage ( final String name , final String qualifier , final InputStream image , final FormDataContentDisposition fileDetail ) throws IOException { } } | // Check image size and extension .
final long size = fileDetail . getSize ( ) ; final String extension = getFileExtension ( fileDetail . getFileName ( ) ) ; if ( size > MAX_IMAGE_SIZE ) throw new IllegalArgumentException ( "Image is too large: " + size ) ; if ( ! SUPPORTED_EXTENSIONS . contains ( extension ) ) throw new IllegalArgumentException ( "Unsupported image file extension: " + extension ) ; // Get the target directory .
File targetDir ; if ( qualifier != null ) { this . logger . fine ( "Request: set template image: " + name + '/' + qualifier + "." ) ; final ApplicationTemplate template = this . manager . applicationTemplateMngr ( ) . findTemplate ( name , qualifier ) ; if ( template == null ) throw new NoSuchElementException ( "Cannot find template: " + name + '/' + qualifier ) ; targetDir = new File ( template . getDirectory ( ) , Constants . PROJECT_DIR_DESC ) ; } else { this . logger . fine ( "Request: set application image: " + name + "." ) ; final Application application = this . manager . applicationMngr ( ) . findApplicationByName ( name ) ; if ( application == null ) throw new NoSuchElementException ( "Cannot find application: " + name ) ; targetDir = new File ( application . getDirectory ( ) , Constants . PROJECT_DIR_DESC ) ; } // First clean the previous " application . * " images , as they may be chosen instead of the one we ' re uploading .
for ( final String ext : SUPPORTED_EXTENSIONS ) { File f = new File ( targetDir , "application." + ext ) ; Utils . deleteFilesRecursivelyAndQuietly ( f ) ; } // Now store the image : rename it to application . X , so we get sure it is chosen as THE app / template icon .
// ( where X is the uploaded file extension ) .
Utils . copyStream ( image , new File ( targetDir , "application." + extension ) ) ; |
public class I18n { /** * Note , calling this method will < em > not < / em > trigger localization of the supplied internationalization class .
* @ param i18nClass The internalization class for which localization problem locales should be returned .
* @ return The locales for which localization problems were encountered while localizing the supplied internationalization
* class ; never < code > null < / code > . */
public static Set < Locale > getLocalizationProblemLocales ( Class < ? > i18nClass ) { } } | CheckArg . isNotNull ( i18nClass , "i18nClass" ) ; Set < Locale > locales = new HashSet < Locale > ( LOCALE_TO_CLASS_TO_PROBLEMS_MAP . size ( ) ) ; for ( Entry < Locale , Map < Class < ? > , Set < String > > > localeEntry : LOCALE_TO_CLASS_TO_PROBLEMS_MAP . entrySet ( ) ) { for ( Entry < Class < ? > , Set < String > > classEntry : localeEntry . getValue ( ) . entrySet ( ) ) { if ( ! classEntry . getValue ( ) . isEmpty ( ) ) { locales . add ( localeEntry . getKey ( ) ) ; break ; } } } return locales ; |
public class SymmetryAxes { /** * Get the first repeat index of each axis of a specified level .
* @ param level level of the tree to cut at
* @ return List of first Repeats of each index , sorted in ascending order */
public List < Integer > getFirstRepeats ( int level ) { } } | List < Integer > firstRepeats = new ArrayList < Integer > ( ) ; int m = getNumRepeats ( level + 1 ) ; // size of the level
int d = axes . get ( level ) . getOrder ( ) ; // degree of this level
int n = m * d ; // number of repeats included in each axis
for ( int firstRepeat = 0 ; firstRepeat < getNumRepeats ( ) ; firstRepeat += n ) firstRepeats . add ( firstRepeat ) ; return firstRepeats ; |
public class MetaTinyTypes { /** * Checks whether a class is a TinyType . A class is considered a TinyType if
* is a direct ancestor of a tech . anima . tinytypes . * TinyType , is not abstract
* and provides a ctor matching super .
* @ param candidate the class to be checked
* @ return true if the candidate is a TinyType , false otherwise . */
public static boolean isTinyType ( Class < ? > candidate ) { } } | for ( MetaTinyType meta : metas ) { if ( meta . isMetaOf ( candidate ) ) { return true ; } } return false ; |
public class DefaultDocWorkUnitHandler { /** * Add any custom freemarker bindings discovered via custom javadoc tags . Subclasses can override this to
* provide additional custom bindings .
* @ param currentWorkUnit the work unit for the feature being documented */
protected void addCustomBindings ( final DocWorkUnit currentWorkUnit ) { } } | final String tagFilterPrefix = getTagPrefix ( ) ; Arrays . stream ( currentWorkUnit . getClassDoc ( ) . inlineTags ( ) ) . filter ( t -> t . name ( ) . startsWith ( tagFilterPrefix ) ) . forEach ( t -> currentWorkUnit . setProperty ( t . name ( ) . substring ( tagFilterPrefix . length ( ) ) , t . text ( ) ) ) ; |
public class AccountsInner { /** * Updates a Cognitive Services account .
* @ param resourceGroupName The name of the resource group within the user ' s subscription .
* @ param accountName The name of Cognitive Services account .
* @ param sku Gets or sets the SKU of the resource .
* @ param tags Gets or sets a list of key value pairs that describe the resource . These tags can be used in viewing and grouping this resource ( across resource groups ) . A maximum of 15 tags can be provided for a resource . Each tag must have a key no greater than 128 characters and value no greater than 256 characters .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the CognitiveServicesAccountInner object if successful . */
public CognitiveServicesAccountInner update ( String resourceGroupName , String accountName , Sku sku , Map < String , String > tags ) { } } | return updateWithServiceResponseAsync ( resourceGroupName , accountName , sku , tags ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class AbstractHttpWriterBuilder { /** * For backward compatibility on how Fork creates writer , invoke fromState when it ' s called writeTo method .
* @ param destination
* @ return */
@ Override public B writeTo ( Destination destination ) { } } | super . writeTo ( destination ) ; fromState ( destination . getProperties ( ) ) ; return typedSelf ( ) ; |
public class MapsInner { /** * Get the content callback url .
* @ param resourceGroupName The resource group name .
* @ param integrationAccountName The integration account name .
* @ param mapName The integration account map name .
* @ param listContentCallbackUrl the GetCallbackUrlParameters value
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < WorkflowTriggerCallbackUrlInner > listContentCallbackUrlAsync ( String resourceGroupName , String integrationAccountName , String mapName , GetCallbackUrlParameters listContentCallbackUrl , final ServiceCallback < WorkflowTriggerCallbackUrlInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( listContentCallbackUrlWithServiceResponseAsync ( resourceGroupName , integrationAccountName , mapName , listContentCallbackUrl ) , serviceCallback ) ; |
public class ManagementClientImpl { /** * as readProperty , but collects all responses until response timeout is reached */
List < byte [ ] > readProperty2 ( final Destination dst , final int objIndex , final int propertyId , final int start , final int elements ) throws KNXTimeoutException , KNXRemoteException , KNXDisconnectException , KNXLinkClosedException , InterruptedException { } } | return readProperty ( dst , objIndex , propertyId , start , elements , false ) ; |
public class CPSpecificationOptionPersistenceImpl { /** * Returns the last cp specification option in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; .
* @ param uuid the uuid
* @ param companyId the company ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching cp specification option
* @ throws NoSuchCPSpecificationOptionException if a matching cp specification option could not be found */
@ Override public CPSpecificationOption findByUuid_C_Last ( String uuid , long companyId , OrderByComparator < CPSpecificationOption > orderByComparator ) throws NoSuchCPSpecificationOptionException { } } | CPSpecificationOption cpSpecificationOption = fetchByUuid_C_Last ( uuid , companyId , orderByComparator ) ; if ( cpSpecificationOption != null ) { return cpSpecificationOption ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( ", companyId=" ) ; msg . append ( companyId ) ; msg . append ( "}" ) ; throw new NoSuchCPSpecificationOptionException ( msg . toString ( ) ) ; |
public class EffectUtil { /** * Returns an image that can be used by effects as a temp image .
* @ return The scratch image used for temporary operations */
static public BufferedImage getScratchImage ( ) { } } | Graphics2D g = ( Graphics2D ) scratchImage . getGraphics ( ) ; g . setComposite ( AlphaComposite . Clear ) ; g . fillRect ( 0 , 0 , GlyphPage . MAX_GLYPH_SIZE , GlyphPage . MAX_GLYPH_SIZE ) ; g . setComposite ( AlphaComposite . SrcOver ) ; g . setColor ( java . awt . Color . white ) ; return scratchImage ; |
public class PublicKeyExtensions { /** * Transform the given { @ link PublicKey } to a base64 encoded { @ link String } value .
* @ param publicKey
* the public key
* @ return the base64 encoded { @ link String } value . */
public static String toBase64 ( final PublicKey publicKey ) { } } | final byte [ ] encoded = publicKey . getEncoded ( ) ; return Base64 . encodeBase64String ( encoded ) ; |
public class NetUtils { /** * 获取服务端错误消息
* @ param request { @ link HttpServletRequest }
* @ param e { @ link Exception }
* @ return { @ link Map }
* @ since 1.1.1 */
public static Map < String , String > getServerErrorMap ( HttpServletRequest request , Exception e ) { } } | Map < String , String > attributes = new HashMap < > ( 4 ) ; attributes . put ( "code" , "500" ) ; attributes . put ( "message" , e . getMessage ( ) ) ; String queryString = request . getQueryString ( ) ; attributes . put ( "url" , request . getRequestURI ( ) + ( Checker . isEmpty ( queryString ) ? "" : "?" + queryString ) ) ; return attributes ; |
public class ArrayConstructor { /** * Create an object based on machine code type */
public Object construct ( char typecode , int machinecode , byte [ ] data ) throws PickleException { } } | // Machine format codes .
// Search for " enum machine _ format _ code " in Modules / arraymodule . c to get
// the authoritative values .
// UNKNOWN _ FORMAT = - 1
// UNSIGNED _ INT8 = 0
// SIGNED _ INT8 = 1
// UNSIGNED _ INT16 _ LE = 2
// UNSIGNED _ INT16 _ BE = 3
// SIGNED _ INT16 _ LE = 4
// SIGNED _ INT16 _ BE = 5
// UNSIGNED _ INT32 _ LE = 6
// UNSIGNED _ INT32 _ BE = 7
// SIGNED _ INT32 _ LE = 8
// SIGNED _ INT32 _ BE = 9
// UNSIGNED _ INT64 _ LE = 10
// UNSIGNED _ INT64 _ BE = 11
// SIGNED _ INT64 _ LE = 12
// SIGNED _ INT64 _ BE = 13
// IEEE _ 754 _ FLOAT _ LE = 14
// IEEE _ 754 _ FLOAT _ BE = 15
// IEEE _ 754 _ DOUBLE _ LE = 16
// IEEE _ 754 _ DOUBLE _ BE = 17
// UTF16 _ LE = 18
// UTF16 _ BE = 19
// UTF32 _ LE = 20
// UTF32 _ BE = 21
if ( machinecode < 0 ) throw new PickleException ( "unknown machine type format" ) ; switch ( typecode ) { case 'c' : // character 1 - > char [ ]
case 'u' : // Unicode character 2 - > char [ ]
{ if ( machinecode != 18 && machinecode != 19 && machinecode != 20 && machinecode != 21 ) throw new PickleException ( "for c/u type must be 18/19/20/21" ) ; if ( machinecode == 18 || machinecode == 19 ) { // utf - 16 , 2 bytes
if ( data . length % 2 != 0 ) throw new PickleException ( "data size alignment error" ) ; return constructCharArrayUTF16 ( machinecode , data ) ; } else { // utf - 32 , 4 bytes
if ( data . length % 4 != 0 ) throw new PickleException ( "data size alignment error" ) ; return constructCharArrayUTF32 ( machinecode , data ) ; } } case 'b' : // signed integer 1 - > byte [ ]
{ if ( machinecode != 1 ) throw new PickleException ( "for b type must be 1" ) ; return data ; } case 'B' : // unsigned integer 1 - > short [ ]
{ if ( machinecode != 0 ) throw new PickleException ( "for B type must be 0" ) ; return constructShortArrayFromUByte ( data ) ; } case 'h' : // signed integer 2 - > short [ ]
{ if ( machinecode != 4 && machinecode != 5 ) throw new PickleException ( "for h type must be 4/5" ) ; if ( data . length % 2 != 0 ) throw new PickleException ( "data size alignment error" ) ; return constructShortArraySigned ( machinecode , data ) ; } case 'H' : // unsigned integer 2 - > int [ ]
{ if ( machinecode != 2 && machinecode != 3 ) throw new PickleException ( "for H type must be 2/3" ) ; if ( data . length % 2 != 0 ) throw new PickleException ( "data size alignment error" ) ; return constructIntArrayFromUShort ( machinecode , data ) ; } case 'i' : // signed integer 4 - > int [ ]
{ if ( machinecode != 8 && machinecode != 9 ) throw new PickleException ( "for i type must be 8/9" ) ; if ( data . length % 4 != 0 ) throw new PickleException ( "data size alignment error" ) ; return constructIntArrayFromInt32 ( machinecode , data ) ; } case 'l' : // signed integer 4/8 - > int [ ]
{ if ( machinecode != 8 && machinecode != 9 && machinecode != 12 && machinecode != 13 ) throw new PickleException ( "for l type must be 8/9/12/13" ) ; if ( ( machinecode == 8 || machinecode == 9 ) && ( data . length % 4 != 0 ) ) throw new PickleException ( "data size alignment error" ) ; if ( ( machinecode == 12 || machinecode == 13 ) && ( data . length % 8 != 0 ) ) throw new PickleException ( "data size alignment error" ) ; if ( machinecode == 8 || machinecode == 9 ) { // 32 bits
return constructIntArrayFromInt32 ( machinecode , data ) ; } else { // 64 bits
return constructLongArrayFromInt64 ( machinecode , data ) ; } } case 'I' : // unsigned integer 4 - > long [ ]
{ if ( machinecode != 6 && machinecode != 7 ) throw new PickleException ( "for I type must be 6/7" ) ; if ( data . length % 4 != 0 ) throw new PickleException ( "data size alignment error" ) ; return constructLongArrayFromUInt32 ( machinecode , data ) ; } case 'L' : // unsigned integer 4/8 - > long [ ]
{ if ( machinecode != 6 && machinecode != 7 && machinecode != 10 && machinecode != 11 ) throw new PickleException ( "for L type must be 6/7/10/11" ) ; if ( ( machinecode == 6 || machinecode == 7 ) && ( data . length % 4 != 0 ) ) throw new PickleException ( "data size alignment error" ) ; if ( ( machinecode == 10 || machinecode == 11 ) && ( data . length % 8 != 0 ) ) throw new PickleException ( "data size alignment error" ) ; if ( machinecode == 6 || machinecode == 7 ) { // 32 bits
return constructLongArrayFromUInt32 ( machinecode , data ) ; } else { // 64 bits
return constructLongArrayFromUInt64 ( machinecode , data ) ; } } case 'f' : // floating point 4 - > float [ ]
{ if ( machinecode != 14 && machinecode != 15 ) throw new PickleException ( "for f type must be 14/15" ) ; if ( data . length % 4 != 0 ) throw new PickleException ( "data size alignment error" ) ; return constructFloatArray ( machinecode , data ) ; } case 'd' : // floating point 8 - > double [ ]
{ if ( machinecode != 16 && machinecode != 17 ) throw new PickleException ( "for d type must be 16/17" ) ; if ( data . length % 8 != 0 ) throw new PickleException ( "data size alignment error" ) ; return constructDoubleArray ( machinecode , data ) ; } default : throw new PickleException ( "invalid array typecode: " + typecode ) ; } |
public class ZoneExtension { /** * Add System Controller Filters */
public void addSysControllerFilters ( ) { } } | for ( SystemRequest rq : SystemRequest . values ( ) ) { ISystemFilterChain filterChain = new SysControllerFilterChain ( ) ; filterChain . addFilter ( "EzyFoxFilterChain#" + rq , new BaseSysControllerFilter ( appContext ( ) , rq ) ) ; getParentZone ( ) . setFilterChain ( rq , filterChain ) ; } |
public class PathComputer { /** * Given an installation directory , find the root directory
* for the nunaliit2 project . This makes sense only in the
* context that the command - line tool is run from a development
* environment .
* @ param installDir Computed install directory where command - line is run
* @ return Root directory where nunaliit2 project is located , or null
* if not found . */
static public File computeNunaliitDir ( File installDir ) { } } | while ( null != installDir ) { // The root of the nunalii2 project contains " nunaliit2 - couch - command " ,
// " nunaliit2 - couch - sdk " and " nunaliit2 - js "
boolean commandExists = ( new File ( installDir , "nunaliit2-couch-command" ) ) . exists ( ) ; boolean sdkExists = ( new File ( installDir , "nunaliit2-couch-sdk" ) ) . exists ( ) ; boolean jsExists = ( new File ( installDir , "nunaliit2-js" ) ) . exists ( ) ; if ( commandExists && sdkExists && jsExists ) { return installDir ; } else { // Go to parent
installDir = installDir . getParentFile ( ) ; } } return null ; |
public class DefaultNamespaceService { /** * Creates a new namespace .
* @ param namespace The namespace to create . Cannot be null .
* @ return The updated namespace object having the ID field populated .
* @ throws SystemException If a duplicate namespace exists . */
@ Override public Namespace createNamespace ( Namespace namespace ) { } } | requireNotDisposed ( ) ; requireArgument ( namespace != null , "null namespace cannot be created." ) ; if ( ! _validateQualifier ( namespace . getQualifier ( ) ) ) { throw new SystemException ( new IllegalArgumentException ( "Illegal characters found while generating namespace. Cannot generate a namespace with this qualifier." ) ) ; } if ( Namespace . findByQualifier ( emf . get ( ) , namespace . getQualifier ( ) ) != null ) { throw new SystemException ( new IllegalArgumentException ( "Namespace already exists. Please try a different namespace." ) ) ; } namespace = updateNamespace ( namespace ) ; _logger . debug ( "Generated namespace {}." , namespace ) ; return namespace ; |
public class ExampleConvolution { /** * Convolves a 2D kernel */
private static void convolve2D ( GrayU8 gray ) { } } | // By default 2D kernels will be centered around width / 2
Kernel2D_S32 kernel = new Kernel2D_S32 ( 3 ) ; kernel . set ( 1 , 0 , 2 ) ; kernel . set ( 2 , 1 , 2 ) ; kernel . set ( 0 , 1 , - 2 ) ; kernel . set ( 1 , 2 , - 2 ) ; // Output needs to handle the increased domain after convolution . Can ' t be 8bit
GrayS16 output = new GrayS16 ( gray . width , gray . height ) ; ImageBorder < GrayU8 > border = FactoryImageBorder . wrap ( BorderType . EXTENDED , gray ) ; GConvolveImageOps . convolve ( kernel , gray , output , border ) ; panel . addImage ( VisualizeImageData . standard ( output , null ) , "2D Kernel" ) ; |
public class M2tsSettings { /** * Specify the packet identifiers ( PIDs ) for any elementary audio streams you include in this output . Specify
* multiple PIDs as a JSON array . Default is the range 482-492.
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setAudioPids ( java . util . Collection ) } or { @ link # withAudioPids ( java . util . Collection ) } if you want to
* override the existing values .
* @ param audioPids
* Specify the packet identifiers ( PIDs ) for any elementary audio streams you include in this output . Specify
* multiple PIDs as a JSON array . Default is the range 482-492.
* @ return Returns a reference to this object so that method calls can be chained together . */
public M2tsSettings withAudioPids ( Integer ... audioPids ) { } } | if ( this . audioPids == null ) { setAudioPids ( new java . util . ArrayList < Integer > ( audioPids . length ) ) ; } for ( Integer ele : audioPids ) { this . audioPids . add ( ele ) ; } return this ; |
public class MarkerUtil { /** * Fish an IMarker out of given selection .
* @ param selection
* the selection
* @ return the selected IMarker , or null if we can ' t find an IMarker in the
* selection */
public static Set < IMarker > getMarkerFromSelection ( ISelection selection ) { } } | Set < IMarker > markers = new HashSet < > ( ) ; if ( ! ( selection instanceof IStructuredSelection ) ) { return markers ; } IStructuredSelection sSelection = ( IStructuredSelection ) selection ; for ( Iterator < ? > iter = sSelection . iterator ( ) ; iter . hasNext ( ) ; ) { Object next = iter . next ( ) ; markers . addAll ( getMarkers ( next ) ) ; } return markers ; |
public class FileMixedMetaManager { /** * = = = = = helper method = = = = = */
private File getDataFile ( String destination ) { } } | File destinationMetaDir = new File ( dataDir , destination ) ; if ( ! destinationMetaDir . exists ( ) ) { try { FileUtils . forceMkdir ( destinationMetaDir ) ; } catch ( IOException e ) { throw new CanalMetaManagerException ( e ) ; } } return new File ( destinationMetaDir , dataFileName ) ; |
public class Vector2f { /** * / * ( non - Javadoc )
* @ see org . joml . Vector2fc # fma ( org . joml . Vector2fc , org . joml . Vector2fc , org . joml . Vector2f ) */
public Vector2f fma ( Vector2fc a , Vector2fc b , Vector2f dest ) { } } | dest . x = x + a . x ( ) * b . x ( ) ; dest . y = y + a . y ( ) * b . y ( ) ; return dest ; |
public class TapConsumerImpl { /** * { @ inheritDoc } */
@ Override public TestSet load ( Readable tapStream ) { } } | try { this . testSet = this . parser . parseTapStream ( tapStream ) ; } catch ( ParserException e ) { throw new TapConsumerException ( "Failed to parse TAP Stream " + tapStream + ": " + e . getMessage ( ) , e ) ; } return this . testSet ; |
public class ObjectUtils { /** * Get all declared fields on the leaf class and all superclasses . Leaf class methods are included first .
* @ param leafClass
* the leaf class .
* @ return all declared fields .
* @ see ReflectionUtils # getAllDeclaredMethods ( Class ) since is the same approach as this one . */
public static Field [ ] getAllDeclaredFields ( Class < ? > leafClass ) { } } | Assert . notNull ( leafClass , "leafClass" ) ; final List < Field > fields = new ArrayList < Field > ( 32 ) ; ReflectionUtils . doWithFields ( leafClass , new ReflectionUtils . FieldCallback ( ) { @ Override public void doWith ( Field field ) throws IllegalArgumentException , IllegalAccessException { fields . add ( field ) ; } } ) ; return fields . toArray ( new Field [ fields . size ( ) ] ) ; |
public class AbstractFormComponentInterceptor { /** * Check for JScrollPane .
* @ param component
* @ return the component itself , or the inner component if it was a
* JScrollPane . */
protected JComponent getInnerComponent ( JComponent component ) { } } | if ( component instanceof JScrollPane ) { return getInnerComponent ( ( JComponent ) ( ( JScrollPane ) component ) . getViewport ( ) . getView ( ) ) ; } if ( component instanceof HasInnerComponent ) { return getInnerComponent ( ( ( HasInnerComponent ) component ) . getInnerComponent ( ) ) ; } return component ; |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcProductsOfCombustionProperties ( ) { } } | if ( ifcProductsOfCombustionPropertiesEClass == null ) { ifcProductsOfCombustionPropertiesEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 389 ) ; } return ifcProductsOfCombustionPropertiesEClass ; |
public class UniqueId { /** * Finds the ID associated with a given name or creates it .
* < strong > This method is blocking . < / strong > Its use within OpenTSDB itself
* is discouraged , please use { @ link # getOrCreateIdAsync } instead .
* The length of the byte array is fixed in advance by the implementation .
* @ param name The name to lookup in the table or to assign an ID to .
* @ throws HBaseException if there is a problem communicating with HBase .
* @ throws IllegalStateException if all possible IDs are already assigned .
* @ throws IllegalStateException if the ID found in HBase is encoded on the
* wrong number of bytes . */
public byte [ ] getOrCreateId ( final String name ) throws HBaseException { } } | try { return getIdAsync ( name ) . joinUninterruptibly ( ) ; } catch ( NoSuchUniqueName e ) { if ( tsdb != null && tsdb . getUidFilter ( ) != null && tsdb . getUidFilter ( ) . fillterUIDAssignments ( ) ) { try { if ( ! tsdb . getUidFilter ( ) . allowUIDAssignment ( type , name , null , null ) . join ( ) ) { rejected_assignments ++ ; throw new FailedToAssignUniqueIdException ( new String ( kind ) , name , 0 , "Blocked by UID filter." ) ; } } catch ( FailedToAssignUniqueIdException e1 ) { throw e1 ; } catch ( InterruptedException e1 ) { LOG . error ( "Interrupted" , e1 ) ; Thread . currentThread ( ) . interrupt ( ) ; } catch ( Exception e1 ) { throw new RuntimeException ( "Should never be here" , e1 ) ; } } Deferred < byte [ ] > assignment = null ; boolean pending = false ; synchronized ( pending_assignments ) { assignment = pending_assignments . get ( name ) ; if ( assignment == null ) { // to prevent UID leaks that can be caused when multiple time
// series for the same metric or tags arrive , we need to write a
// deferred to the pending map as quickly as possible . Then we can
// start the assignment process after we ' ve stashed the deferred
// and released the lock
assignment = new Deferred < byte [ ] > ( ) ; pending_assignments . put ( name , assignment ) ; } else { pending = true ; } } if ( pending ) { LOG . info ( "Already waiting for UID assignment: " + name ) ; try { return assignment . joinUninterruptibly ( ) ; } catch ( Exception e1 ) { throw new RuntimeException ( "Should never be here" , e1 ) ; } } // start the assignment dance after stashing the deferred
byte [ ] uid = null ; try { uid = new UniqueIdAllocator ( name , assignment ) . tryAllocate ( ) . joinUninterruptibly ( ) ; } catch ( RuntimeException e1 ) { throw e1 ; } catch ( Exception e1 ) { throw new RuntimeException ( "Should never be here" , e ) ; } finally { synchronized ( pending_assignments ) { if ( pending_assignments . remove ( name ) != null ) { LOG . info ( "Completed pending assignment for: " + name ) ; } } } return uid ; } catch ( Exception e ) { throw new RuntimeException ( "Should never be here" , e ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.