signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class VoldemortBuildAndPushJob { /** * Get the schema for the Avro Record from the object container file */ public String getRecordSchema ( ) throws IOException { } }
Schema schema = getInputPathAvroSchema ( ) ; String recSchema = schema . toString ( ) ; return recSchema ;
public class DefaultJobCatalogListenerImpl { /** * { @ inheritDoc } */ @ Override public void onUpdateJob ( JobSpec updatedJob ) { } }
if ( _log . isPresent ( ) ) { _log . get ( ) . info ( "JobSpec changed: " + updatedJob . toShortString ( ) ) ; }
public class GBSInsertHeight { /** * Restore the height balance of a tree following an insert . * @ param stack The NodeStack used to do the insert . * @ param q The root of the newly added fringe . */ void balance ( NodeStack stack , GBSNode q ) { } }
GBSNode p ; int bpidx = stack . balancePointIndex ( ) ; int x = bpidx ; GBSNode bpoint = stack . node ( x ) ; GBSNode bfather = stack . node ( x - 1 ) ; /* Adjust balance factors in intervening nodes */ if ( bpoint . leftChild ( ) == stack . node ( x + 1 ) ) p = bpoint . leftChild ( ) ; else p = bpoint . rightChild ( ) ; x ++ ; while ( p != q ) { if ( p . leftChild ( ) == stack . node ( x + 1 ) ) { /* We followed and added to left path */ p . setBalance ( - 1 ) ; /* It is now left heavy */ p = p . leftChild ( ) ; } else /* We followed and added to right path */ { p . setBalance ( 1 ) ; /* It is now right heavy */ p = p . rightChild ( ) ; } x ++ ; } /* Adjust the balance factor at the balance point . */ /* Re - balance if necessary . */ if ( bpoint . leftChild ( ) == stack . node ( bpidx + 1 ) ) { /* Added to left side */ int bpb = bpoint . balance ( ) ; switch ( bpb ) { case 0 : bpoint . setBalance ( - 1 ) ; break ; case 1 : bpoint . clearBalance ( ) ; break ; case - 1 : rotateLeft ( bfather , bpoint ) ; break ; default : String zzz1 = "Help1 !, bpb = " + bpb ; throw new RuntimeException ( zzz1 ) ; } } else /* Added to right side */ { int bpb = bpoint . balance ( ) ; switch ( bpb ) { case 0 : bpoint . setBalance ( 1 ) ; break ; case - 1 : bpoint . clearBalance ( ) ; break ; case 1 : rotateRight ( bfather , bpoint ) ; break ; default : String zzz2 = "Help2 !, bpb = " + bpb ; throw new RuntimeException ( zzz2 ) ; } }
public class SeleniumController { /** * This method starts the selenium remote control using the parameters * informed by testng . xml file * @ param parameters * @ throws Exception */ @ BeforeSuite ( alwaysRun = true ) @ Parameters ( value = { } }
"parameters" } ) public static void startSelenium ( String parameters ) { parametersMap = parameterScanner ( parameters ) ; parametersInfo ( ) ; String browserName = parametersMap . get ( "browser" ) , profile = parametersMap . get ( "profile" ) , chromeDriverBin = parametersMap . get ( "chromeDriverBin" ) , ieDriverBin = parametersMap . get ( "ieDriverBin" ) , chromeBin = parametersMap . get ( "chromeBin" ) , languages = parametersMap . get ( "languages" ) ; if ( browserName == null ) { throw new IllegalArgumentException ( String . format ( ErrorMessages . ERROR_TEMPLATE_VARIABLE_NULL , "browser" ) ) ; } if ( driver == null ) { if ( BrowsersList . FIREFOX . equalsString ( browserName ) ) { FirefoxProfile fp = new FirefoxProfile ( ) ; fp . setPreference ( "dom.max_script_run_time" , 0 ) ; fp . setPreference ( "dom.max_chrome_script_run_time" , 0 ) ; if ( profile != null && ! profile . isEmpty ( ) ) { fp . setPreference ( "webdriver.firefox.profile" , profile ) ; } if ( languages != null && ! languages . isEmpty ( ) ) { fp . setPreference ( "intl.accept_languages" , languages ) ; } driver = new WebDriverAdapter ( new FirefoxDriver ( fp ) ) ; } else if ( BrowsersList . CHROME . equalsString ( browserName ) ) { if ( chromeBin == null ) { throw new IllegalArgumentException ( String . format ( ErrorMessages . ERROR_TEMPLATE_VARIABLE_NULL , "chromeBin" ) ) ; } // Optional , if not specified , WebDriver will search your path for chromedriver // in the system environment . ( OBS : To evade problems , webdriver . chrome . driver MUST have a value . if ( System . getProperty ( "webdriver.chrome.driver" ) == null || System . getProperty ( "webdriver.chrome.driver" ) . isEmpty ( ) ) { if ( chromeDriverBin == null ) { throw new IllegalArgumentException ( String . format ( ErrorMessages . ERROR_TEMPLATE_VARIABLE_NULL , "chromeDriverBin" ) ) ; } System . setProperty ( "webdriver.chrome.driver" , chromeDriverBin ) ; } ChromeOptions co = new ChromeOptions ( ) ; // Get the chrome binary directory path from System Envionment . co . setBinary ( new File ( chromeBin ) ) ; driver = new WebDriverAdapter ( new ChromeDriver ( co ) ) ; } else if ( BrowsersList . IE . equalsString ( browserName ) ) { if ( ieDriverBin == null ) { throw new IllegalArgumentException ( String . format ( ErrorMessages . ERROR_TEMPLATE_VARIABLE_NULL , "ieDriverBin" ) ) ; } System . setProperty ( "webdriver.ie.driver" , ieDriverBin ) ; driver = new WebDriverAdapter ( new InternetExplorerDriver ( ) ) ; } else if ( BrowsersList . HTML_UNIT . equalsString ( browserName ) ) { driver = new HtmlUnitDriver ( true ) ; } else { throw new IllegalArgumentException ( ErrorMessages . ERROR_BROWSER_INVALID ) ; } } /* Sets to all driver methods the global timeout of 1 second . * To tests , Timeouts must be specified on the components . */ SeleniumController . driver . manage ( ) . timeouts ( ) . implicitlyWait ( 1 , TimeUnit . SECONDS ) ; SeleniumController . builder = new SeleniumBuilder ( driver ) ; SeleniumController . browser = new SeleniumBrowser ( ) ; ListenerGateway . setWebDriver ( driver ) ; ListenerGateway . setParameters ( parametersMap ) ;
public class PluginDefaultGroovyMethods { /** * Support the range subscript operator for StringBuilder . * Index values are treated as characters within the builder . * @ param self a StringBuilder * @ param range a Range * @ param value the object that ' s toString ( ) will be inserted */ public static void putAt ( StringBuilder self , IntRange range , Object value ) { } }
RangeInfo info = subListBorders ( self . length ( ) , range ) ; self . replace ( info . from , info . to , value . toString ( ) ) ;
public class ADAuthenticator { /** * Checks if is authenticed . * @ param host the host * @ param port the port * @ param userName the user name * @ param password the password * @ return true , if is authenticed * @ throws NamingException the naming exception */ public static boolean isAuthenticed ( String host , int port , String userName , String password ) throws NamingException { } }
log . info ( "isAuthenticed" ) ; // Set up the environment for creating the initial context Hashtable < String , String > env = new Hashtable < String , String > ( ) ; env . put ( Context . INITIAL_CONTEXT_FACTORY , "com.sun.jndi.ldap.LdapCtxFactory" ) ; env . put ( Context . PROVIDER_URL , "ldap://" + host + ":" + port ) ; env . put ( Context . SECURITY_AUTHENTICATION , "simple" ) ; env . put ( Context . SECURITY_PRINCIPAL , userName + "@" + host ) ; log . info ( env . toString ( ) ) ; env . put ( Context . SECURITY_CREDENTIALS , password ) ; // Create the initial context DirContext ctx = new InitialDirContext ( env ) ; log . info ( "DirContext Init Succ" ) ; boolean result = ctx != null ; if ( ctx != null ) { log . info ( "Closing DirContext" ) ; ctx . close ( ) ; } return result ;
public class ConfluenceGreenPepper { /** * < p > isCredentialsValid . < / p > * @ param username a { @ link java . lang . String } object . * @ param password a { @ link java . lang . String } object . * @ return a boolean . */ public boolean isCredentialsValid ( String username , String password ) { } }
try { String token = getTokenAuthenticationManager ( ) . login ( StringUtil . toEmptyIfNull ( username ) , StringUtil . toEmptyIfNull ( password ) ) ; getTokenAuthenticationManager ( ) . logout ( token ) ; return true ; } catch ( Exception ex ) { return false ; }
public class StreamingJsonBuilder { /** * The empty args call will create a key whose value will be an empty JSON object : * < pre class = " groovyTestCase " > * new StringWriter ( ) . with { w { @ code - > } * def json = new groovy . json . StreamingJsonBuilder ( w ) * json . person ( ) * assert w . toString ( ) = = ' { " person " : { } } ' * < / pre > * @ param name The name of the empty object to create * @ throws IOException */ public void call ( String name ) throws IOException { } }
writer . write ( generator . toJson ( Collections . singletonMap ( name , Collections . emptyMap ( ) ) ) ) ;
public class AbstractCasView { /** * Gets model attributes . * @ param model the model * @ return the model attributes */ protected Map < String , Object > getModelAttributes ( final Map < String , Object > model ) { } }
return ( Map < String , Object > ) model . get ( CasProtocolConstants . VALIDATION_CAS_MODEL_ATTRIBUTE_NAME_ATTRIBUTES ) ;
public class DockerUtils { /** * Finds an image by ID or by tag . * @ param name an image ID or a tag name ( can be null ) * @ param dockerClient a Docker client ( not null ) * @ return an image , or null if none matched */ public static Image findImageByIdOrByTag ( String name , DockerClient dockerClient ) { } }
Image image = null ; if ( ! Utils . isEmptyOrWhitespaces ( name ) ) { Logger logger = Logger . getLogger ( DockerUtils . class . getName ( ) ) ; List < Image > images = dockerClient . listImagesCmd ( ) . exec ( ) ; if ( ( image = DockerUtils . findImageById ( name , images ) ) != null ) logger . fine ( "Found a Docker image with ID " + name ) ; else if ( ( image = DockerUtils . findImageByTag ( name , images ) ) != null ) logger . fine ( "Found a Docker image with tag " + name ) ; } return image ;
public class ZipUtils { /** * Zips a number of collections into one list of tuples * @ return zipped list */ public static < T1 , T2 > List < Tuple2 < T1 , T2 > > zip ( Collection < T1 > col1 , Collection < T2 > col2 ) { } }
int resultSize = _max ( col1 . size ( ) , col2 . size ( ) ) ; List < Tuple2 < T1 , T2 > > result = new ArrayList < Tuple2 < T1 , T2 > > ( resultSize ) ; Iterator < T1 > it1 = col1 . iterator ( ) ; Iterator < T2 > it2 = col2 . iterator ( ) ; while ( resultSize -- > 0 ) { result . add ( tuple ( next ( it1 ) , next ( it2 ) ) ) ; } return result ;
public class WBeanComponent { /** * Sets the bean associated with this WBeanComponent . This method of bean association is discouraged , as the bean * will be stored in the user ' s session . A better alternative is to provide a BeanProvider and a Bean Id . * @ param bean the bean to associate */ @ Override public void setBean ( final Object bean ) { } }
BeanAndProviderBoundComponentModel model = getOrCreateComponentModel ( ) ; model . setBean ( bean ) ; if ( getBeanProperty ( ) == null ) { setBeanProperty ( "." ) ; } // Remove values in scratch map removeBeanFromScratchMap ( ) ;
public class InstancesDistributor { /** * http : / / stackoverflow . com / questions / 23903113 / mapreduce - error - usergroupinformation - priviledgedactionexception */ protected static String getInstancesFolder ( FileSystem fS , Configuration conf ) throws IOException { } }
return conf . get ( HDFS_TMP_FOLDER_CONF , fS . equals ( FileSystem . getLocal ( conf ) ) ? DEFAULT_LOCAL_TMP_FOLDER_CONF_VALUE : DEFAULT_HDFS_TMP_FOLDER_CONF_VALUE ) ;
public class DocumentRunner { /** * { @ inheritDoc } */ public void run ( String input , String output ) { } }
Report report = null ; try { report = reportGenerator . openReport ( output ) ; monitor . testRunning ( input ) ; final Timer total = new Timer ( ) . start ( ) ; documentRepository . setSpecificationDialect ( dialect ) ; Document document = documentRepository . loadDocument ( input ) ; document . setSections ( sections ) ; document . addFilter ( new CommentTableFilter ( ) ) ; document . addFilter ( new SectionsTableFilter ( sections ) ) ; document . addFilter ( new GreenPepperTableFilter ( lazy ) ) ; final Timer execution = new Timer ( ) . start ( ) ; systemUnderDevelopment . onStartDocument ( document ) ; document . execute ( newInterpreterSelector ( systemUnderDevelopment ) ) ; systemUnderDevelopment . onEndDocument ( document ) ; execution . stop ( ) ; document . done ( ) ; total . stop ( ) ; document . getTimeStatistics ( ) . tally ( total . elapse ( ) , execution . elapse ( ) ) ; report . generate ( document ) ; Statistics stats = document . getStatistics ( ) ; monitor . testDone ( stats . rightCount ( ) , stats . wrongCount ( ) , stats . exceptionCount ( ) , stats . ignoredCount ( ) ) ; } catch ( Exception e ) { LOGGER . error ( "Failed to run the specification " + input , e ) ; if ( report != null ) report . renderException ( e ) ; monitor . exceptionOccured ( e ) ; } finally { closeReport ( report ) ; }
public class Assert { /** * Asserts that two booleans are equal . If they are not * an AssertionFailedError is thrown with the given message . */ static public void assertEquals ( String message , boolean expected , boolean actual ) { } }
assertEquals ( message , Boolean . valueOf ( expected ) , Boolean . valueOf ( actual ) ) ;
public class AbstractIndex { /** * Closes this index , releasing all held resources . */ synchronized void close ( ) { } }
releaseWriterAndReaders ( ) ; if ( directory != null ) { try { directory . close ( ) ; } catch ( IOException e ) { directory = null ; } }
public class Iterate { /** * Returns the first element of the iterable that evaluates to true for the specified predicate2 and parameter , * or returns the result ifNone if no element evaluates to true . */ public static < T , P > T detectWithIfNone ( Iterable < T > iterable , Predicate2 < ? super T , ? super P > predicate , P parameter , T ifNone ) { } }
T result = Iterate . detectWith ( iterable , predicate , parameter ) ; return result == null ? ifNone : result ;
public class Dcs_sqr { /** * / * compute nnz ( V ) = S - > lnz , S - > pinv , S - > leftmost , S - > m2 from A and S - > parent */ private static boolean cs_vcount ( Dcs A , Dcss S ) { } }
int i , k , p , pa , n = A . n , m = A . m , Ap [ ] = A . p , Ai [ ] = A . i , next [ ] , head [ ] , tail [ ] , nque [ ] , pinv [ ] , leftmost [ ] , w [ ] , parent [ ] = S . parent ; S . pinv = pinv = new int [ m + n ] ; /* allocate pinv , */ S . leftmost = leftmost = new int [ m ] ; /* and leftmost */ w = new int [ m + 3 * n ] ; /* get workspace */ next = w ; head = w ; int head_offset = m ; tail = w ; int tail_offset = m + n ; nque = w ; int nque_offset = m + 2 * n ; for ( k = 0 ; k < n ; k ++ ) head [ head_offset + k ] = - 1 ; /* queue k is empty */ for ( k = 0 ; k < n ; k ++ ) tail [ tail_offset + k ] = - 1 ; for ( k = 0 ; k < n ; k ++ ) nque [ nque_offset + k ] = 0 ; for ( i = 0 ; i < m ; i ++ ) leftmost [ i ] = - 1 ; for ( k = n - 1 ; k >= 0 ; k -- ) { for ( p = Ap [ k ] ; p < Ap [ k + 1 ] ; p ++ ) { leftmost [ Ai [ p ] ] = k ; /* leftmost [ i ] = min ( find ( A ( i , : ) ) ) */ } } for ( i = m - 1 ; i >= 0 ; i -- ) /* scan rows in reverse order */ { pinv [ i ] = - 1 ; /* row i is not yet ordered */ k = leftmost [ i ] ; if ( k == - 1 ) continue ; /* row i is empty */ if ( nque [ nque_offset + k ] ++ == 0 ) tail [ tail_offset + k ] = i ; /* first row in queue k */ next [ i ] = head [ head_offset + k ] ; /* put i at head of queue k */ head [ head_offset + k ] = i ; } S . lnz = 0 ; S . m2 = m ; for ( k = 0 ; k < n ; k ++ ) /* find row permutation and nnz ( V ) */ { i = head [ head_offset + k ] ; /* remove row i from queue k */ S . lnz ++ ; /* count V ( k , k ) as nonzero */ if ( i < 0 ) i = S . m2 ++ ; /* add a fictitious row */ pinv [ i ] = k ; /* associate row i with V ( : , k ) */ if ( -- nque [ nque_offset + k ] <= 0 ) continue ; /* skip if V ( k + 1 : m , k ) is empty */ S . lnz += nque [ nque_offset + k ] ; /* nque [ nque _ offset + k ] is nnz ( V ( k + 1 : m , k ) ) */ if ( ( pa = parent [ k ] ) != - 1 ) /* move all rows to parent of k */ { if ( nque [ nque_offset + pa ] == 0 ) tail [ tail_offset + pa ] = tail [ tail_offset + k ] ; next [ tail [ tail_offset + k ] ] = head [ head_offset + pa ] ; head [ head_offset + pa ] = next [ i ] ; nque [ nque_offset + pa ] += nque [ nque_offset + k ] ; } } for ( i = 0 ; i < m ; i ++ ) if ( pinv [ i ] < 0 ) pinv [ i ] = k ++ ; w = null ; return ( true ) ;
public class StringUtil { /** * Checks that the given value ' s normalized length is in the allowed range . * @ param name the name of the property being checked , for error reporting . * @ param value the value . * @ param maxLen the maximum length allowed . * @ return the normalized value . * @ throws IllegalArgumentException if the normalized value is null , * zero - length , or exceeds the maximum length specified . */ public static String validate ( String name , String value , int maxLen ) throws IllegalArgumentException { } }
String normVal = normalize ( value ) ; if ( normVal == null ) { throw new IllegalArgumentException ( "A value must be specified for" + " '" + name + "'" ) ; } else if ( normVal . length ( ) > maxLen ) { throw new IllegalArgumentException ( "The value specified for" + " '" + name + "' was too long. It must not exceed" + " " + maxLen + " characters." ) ; } return normVal ;
public class AbstractMetamodelDeclarationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setEPackage ( EPackage newEPackage ) { } }
EPackage oldEPackage = ePackage ; ePackage = newEPackage ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , XtextPackage . ABSTRACT_METAMODEL_DECLARATION__EPACKAGE , oldEPackage , ePackage ) ) ;
public class Comparators { /** * code to freak out ; I don ' t entirely understand why */ @ ReplacedBy ( "com.google.common.collect.Ordering.natural().nullsLast()" ) public static final < T extends Comparable < ? > > Comparator < T > comparable ( ) { } }
@ SuppressWarnings ( "unchecked" ) Comparator < T > comp = ( Comparator < T > ) COMPARABLE ; return comp ;
public class Skew { /** * { @ inheritDoc } * Additional constraints enforced by Karkkainen - Sanders algorithm : * < ul > * < li > non - negative ( & gt ; 0 ) symbols in the input ( because of radix sort ) < / li > * < li > < code > input . length < / code > & gt ; = < code > start + length + 3 < / code > ( to simplify * border cases ) < / li > * < li > length & gt ; = 2 < / li > * < / ul > * If the input contains zero or negative values , or has no extra trailing cells , * adapters can be used in the following way : * < pre > * return new { @ link DensePositiveDecorator } ( * new { @ link ExtraTrailingCellsDecorator } ( * new { @ link Skew } ( ) , 3 ) ) ; * < / pre > * @ see ExtraTrailingCellsDecorator * @ see DensePositiveDecorator */ @ Override public int [ ] buildSuffixArray ( int [ ] input , int start , int length ) { } }
Tools . assertAlways ( input != null , "input must not be null" ) ; Tools . assertAlways ( length >= 2 , "input length must be >= 2" ) ; Tools . assertAlways ( input . length >= start + length + 3 , "no extra space after input end" ) ; assert Tools . allPositive ( input , start , length ) ; final int alphabetSize = Tools . max ( input , start , length ) ; final int [ ] SA = new int [ length + 3 ] ; // Preserve the tail of the input ( destroyed when constructing the array ) . final int [ ] tail = new int [ 3 ] ; System . arraycopy ( input , start + length , tail , 0 , 3 ) ; Arrays . fill ( input , start + length , start + length + 3 , 0 ) ; suffixArray ( input , SA , length , alphabetSize , start , new int [ alphabetSize + 2 ] ) ; // Reconstruct the input ' s tail . System . arraycopy ( tail , 0 , input , start + length , 3 ) ; return SA ;
public class GroupedMap { /** * 指定分组中是否包含指定key * @ param group 分组 * @ param key 键 * @ return 是否包含key */ public boolean containsKey ( String group , String key ) { } }
group = StrUtil . nullToEmpty ( group ) . trim ( ) ; readLock . lock ( ) ; try { final LinkedHashMap < String , String > valueMap = this . get ( group ) ; if ( MapUtil . isNotEmpty ( valueMap ) ) { return valueMap . containsKey ( key ) ; } } finally { readLock . unlock ( ) ; } return false ;
public class CmsWorkplace { /** * Returns the current used macro resolver instance . < p > * @ return the macro resolver */ public CmsMacroResolver getMacroResolver ( ) { } }
if ( m_macroResolver == null ) { // create a new macro resolver " with everything we got " m_macroResolver = CmsMacroResolver . newInstance ( ) // initialize resolver with the objects available . setCmsObject ( m_cms ) . setMessages ( getMessages ( ) ) . setJspPageContext ( ( m_jsp == null ) ? null : m_jsp . getJspContext ( ) ) ; m_macroResolver . setParameterMap ( m_parameterMap ) ; } return m_macroResolver ;
public class PluginManager { /** * Get method object for a class / method name * @ param className name of class * @ param methodName name of method * @ return Method * @ throws Exception exception */ public com . groupon . odo . proxylib . models . Method getMethod ( String className , String methodName ) throws Exception { } }
// TODO : fix this so it returns the right override ID com . groupon . odo . proxylib . models . Method m = null ; // calls getClass first in case the loaded class needs to be invalidated Class < ? > gottenClass = getClass ( className ) ; ClassInformation classInfo = classInformation . get ( className ) ; String fullName = className + "." + methodName ; if ( methodInformation . containsKey ( fullName ) ) { m = methodInformation . get ( fullName ) ; } else { logger . info ( "Getting method info: {}" , fullName ) ; // Make a new classpool with the system classpath URLS // We create a new classpool each time since we want to reload plugin information in case it has changed . // Once a method is loaded this should not get called so the extra expense is not always taken as a hit ClassPool classPool = new ClassPool ( ) ; ClassLoader sysClassLoader = Thread . currentThread ( ) . getContextClassLoader ( ) ; // Get the URLs URL [ ] urls = ( ( URLClassLoader ) sysClassLoader ) . getURLs ( ) ; for ( int i = 0 ; i < urls . length ; i ++ ) { try { // insert all classpaths into the javassist classpool File f1 = new File ( urls [ i ] . getFile ( ) ) ; if ( f1 . exists ( ) ) { // only add if file exists to avoid an uncatchable exception in Java 8 classPool . insertClassPath ( urls [ i ] . getFile ( ) ) ; } } catch ( NotFoundException e ) { e . printStackTrace ( ) ; } } classPool . insertClassPath ( classInfo . pluginPath ) ; // load method information Method [ ] methods = gottenClass . getDeclaredMethods ( ) ; for ( Method method : methods ) { if ( method . getName ( ) . compareTo ( methodName ) != 0 ) { continue ; } try { // get annotation information Annotation [ ] annotations = method . getAnnotations ( ) ; for ( Annotation annotation : annotations ) { com . groupon . odo . proxylib . models . Method newMethod = new com . groupon . odo . proxylib . models . Method ( ) ; newMethod . setClassName ( className ) ; newMethod . setMethodName ( methodName ) ; newMethod . setMethod ( method ) ; newMethod . setMethodType ( annotation . annotationType ( ) . toString ( ) ) ; String [ ] argNames = null ; String [ ] defaultArgs = null ; String description = null ; // Convert to the right type and get annotation information if ( annotation . annotationType ( ) . toString ( ) . endsWith ( Constants . PLUGIN_RESPONSE_OVERRIDE_CLASS ) ) { ResponseOverride roAnnotation = ( ResponseOverride ) annotation ; newMethod . setHttpCode ( roAnnotation . httpCode ( ) ) ; description = roAnnotation . description ( ) ; argNames = roAnnotation . parameters ( ) ; defaultArgs = new String [ 0 ] ; newMethod . setOverrideVersion ( 1 ) ; } else if ( annotation . annotationType ( ) . toString ( ) . endsWith ( Constants . PLUGIN_RESPONSE_OVERRIDE_V2_CLASS ) ) { com . groupon . odo . plugin . v2 . ResponseOverride roAnnotation = ( com . groupon . odo . plugin . v2 . ResponseOverride ) annotation ; description = roAnnotation . description ( ) ; argNames = roAnnotation . parameters ( ) ; defaultArgs = roAnnotation . argDefaults ( ) ; newMethod . setBlockRequest ( roAnnotation . blockRequest ( ) ) ; newMethod . setOverrideVersion ( 2 ) ; } else { continue ; } // identify arguments // first arg is always a reserved that we skip ArrayList < String > params = new ArrayList < String > ( ) ; if ( method . getParameterTypes ( ) . length > 1 ) { for ( int x = 1 ; x < method . getParameterTypes ( ) . length ; x ++ ) { params . add ( method . getParameterTypes ( ) [ x ] . getName ( ) ) ; } } newMethod . setMethodArguments ( params . toArray ( new Object [ 0 ] ) ) ; newMethod . setMethodArgumentNames ( argNames ) ; newMethod . setMethodDefaultArguments ( defaultArgs ) ; newMethod . setDescription ( description ) ; newMethod . setIdString ( className + "." + methodName ) ; methodInformation . put ( fullName , newMethod ) ; m = newMethod ; } break ; } catch ( Exception e ) { // in this case we just return null since the method would be unuseable return null ; } } } return m ;
public class ConfigServerImpl { /** * private static long _ countx ( Mapping mapping ) { PCLCollection pcl = * ( ( MappingImpl ) mapping ) . getPCLCollection ( ) ; return pcl = = null ? 0 : pcl . count ( ) ; } */ @ Override public Cluster createClusterScope ( ) throws PageException { } }
Cluster cluster = null ; try { if ( Reflector . isInstaneOf ( getClusterClass ( ) , Cluster . class ) ) { cluster = ( Cluster ) ClassUtil . loadInstance ( getClusterClass ( ) , ArrayUtil . OBJECT_EMPTY ) ; cluster . init ( this ) ; } else if ( Reflector . isInstaneOf ( getClusterClass ( ) , ClusterRemote . class ) ) { ClusterRemote cb = ( ClusterRemote ) ClassUtil . loadInstance ( getClusterClass ( ) , ArrayUtil . OBJECT_EMPTY ) ; cluster = new ClusterWrap ( this , cb ) ; // cluster . init ( cs ) ; } } catch ( Exception e ) { throw Caster . toPageException ( e ) ; } return cluster ;
public class MovementBounds { /** * Restricts x & amp ; y coordinates to current bounds ( as calculated in { @ link # set ( State ) } ) . * @ param x X coordinate * @ param y Y coordinate * @ param extraX Extra area bounds ( horizontal ) * @ param extraY Extra area bounds ( vertical ) * @ param out Output rectangle */ public void restrict ( float x , float y , float extraX , float extraY , PointF out ) { } }
tmpPointArr [ 0 ] = x ; tmpPointArr [ 1 ] = y ; if ( boundsRotation != 0f ) { // Rotating given point so we can apply rectangular bounds . tmpMatrix . setRotate ( - boundsRotation , boundsPivotX , boundsPivotY ) ; tmpMatrix . mapPoints ( tmpPointArr ) ; } // Applying restrictions tmpPointArr [ 0 ] = MathUtils . restrict ( tmpPointArr [ 0 ] , bounds . left - extraX , bounds . right + extraX ) ; tmpPointArr [ 1 ] = MathUtils . restrict ( tmpPointArr [ 1 ] , bounds . top - extraY , bounds . bottom + extraY ) ; if ( boundsRotation != 0f ) { // Rotating restricted point back to original coordinates tmpMatrix . setRotate ( boundsRotation , boundsPivotX , boundsPivotY ) ; tmpMatrix . mapPoints ( tmpPointArr ) ; } out . set ( tmpPointArr [ 0 ] , tmpPointArr [ 1 ] ) ;
public class RuntimeMojoSupport { /** * Resolves the base directory for the current execution . */ private File resolveBasedir ( ) throws IOException { } }
String path = null ; if ( project != null ) { File file = project . getBasedir ( ) ; if ( file != null ) { path = file . getAbsolutePath ( ) ; } } if ( path == null ) { path = session . getExecutionRootDirectory ( ) ; } if ( path == null ) { path = System . getProperty ( "user.dir" ) ; } return new File ( path ) . getCanonicalFile ( ) ;
public class Configuration { /** * Get the literal representation of the given constant * @ param o object * @ return literal representation */ @ SuppressWarnings ( "unchecked" ) public String asLiteral ( Object o ) { } }
if ( o == null || o instanceof Null ) { return "null" ; } else { Type type = javaTypeMapping . getType ( o . getClass ( ) ) ; if ( type != null ) { return templates . serialize ( type . getLiteral ( o ) , type . getSQLTypes ( ) [ 0 ] ) ; } else { throw new IllegalArgumentException ( "Unsupported literal type " + o . getClass ( ) . getName ( ) ) ; } }
public class StreamDescriptionSummary { /** * Represents the current enhanced monitoring settings of the stream . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setEnhancedMonitoring ( java . util . Collection ) } or { @ link # withEnhancedMonitoring ( java . util . Collection ) } if * you want to override the existing values . * @ param enhancedMonitoring * Represents the current enhanced monitoring settings of the stream . * @ return Returns a reference to this object so that method calls can be chained together . */ public StreamDescriptionSummary withEnhancedMonitoring ( EnhancedMetrics ... enhancedMonitoring ) { } }
if ( this . enhancedMonitoring == null ) { setEnhancedMonitoring ( new com . amazonaws . internal . SdkInternalList < EnhancedMetrics > ( enhancedMonitoring . length ) ) ; } for ( EnhancedMetrics ele : enhancedMonitoring ) { this . enhancedMonitoring . add ( ele ) ; } return this ;
public class AttachmentStateChangeMarshaller { /** * Marshall the given parameter object . */ public void marshall ( AttachmentStateChange attachmentStateChange , ProtocolMarshaller protocolMarshaller ) { } }
if ( attachmentStateChange == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( attachmentStateChange . getAttachmentArn ( ) , ATTACHMENTARN_BINDING ) ; protocolMarshaller . marshall ( attachmentStateChange . getStatus ( ) , STATUS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AbstractMaterialDialogBuilder { /** * Obtains the üadding from a specific theme . * @ param themeResourceId * The resource id of the theme , the padding should be obtained from , as an { @ link * Integer } value */ private void obtainPadding ( @ StyleRes final int themeResourceId ) { } }
TypedArray typedArray = getContext ( ) . getTheme ( ) . obtainStyledAttributes ( themeResourceId , new int [ ] { R . attr . materialDialogPaddingLeft , R . attr . materialDialogPaddingTop , R . attr . materialDialogPaddingRight , R . attr . materialDialogPaddingBottom } ) ; int defaultLeftPadding = getContext ( ) . getResources ( ) . getDimensionPixelSize ( R . dimen . dialog_left_padding ) ; int defaultTopPadding = getContext ( ) . getResources ( ) . getDimensionPixelSize ( R . dimen . dialog_top_padding ) ; int defaultRightPadding = getContext ( ) . getResources ( ) . getDimensionPixelSize ( R . dimen . dialog_right_padding ) ; int defaultBottomPadding = getContext ( ) . getResources ( ) . getDimensionPixelSize ( R . dimen . dialog_bottom_padding ) ; int left = typedArray . getDimensionPixelSize ( 0 , defaultLeftPadding ) ; int top = typedArray . getDimensionPixelSize ( 1 , defaultTopPadding ) ; int right = typedArray . getDimensionPixelSize ( 2 , defaultRightPadding ) ; int bottom = typedArray . getDimensionPixelSize ( 3 , defaultBottomPadding ) ; setPadding ( left , top , right , bottom ) ;
public class JsonPolicyWriter { /** * Writes an array along with its values to the JSONGenerator . * @ param arrayName * name of the JSON array . * @ param values * values of the JSON array . */ private void writeJsonArray ( String arrayName , List < String > values ) throws JsonGenerationException , IOException { } }
writeJsonArrayStart ( arrayName ) ; for ( String value : values ) generator . writeString ( value ) ; writeJsonArrayEnd ( ) ;
public class AndroidUtil { /** * Utility method to create a standard tile renderer layer ( without hillshading ) . * @ param tileCache the cache * @ param mapViewPosition the position * @ param mapFile the map file * @ param renderTheme the render theme to use * @ return the layer */ public static TileRendererLayer createTileRendererLayer ( TileCache tileCache , IMapViewPosition mapViewPosition , MapDataStore mapFile , XmlRenderTheme renderTheme ) { } }
TileRendererLayer tileRendererLayer = new TileRendererLayer ( tileCache , mapFile , mapViewPosition , AndroidGraphicFactory . INSTANCE ) ; tileRendererLayer . setXmlRenderTheme ( renderTheme ) ; return tileRendererLayer ;
public class JavaSourceUtils { /** * 合并方法声明 */ public static MethodDeclaration mergeMethod ( MethodDeclaration one , MethodDeclaration two ) { } }
if ( isAllNull ( one , two ) ) return null ; MethodDeclaration md = null ; if ( isAllNotNull ( one , two ) ) { md = new MethodDeclaration ( ) ; md . setName ( one . getName ( ) ) ; md . setType ( mergeSelective ( one . getType ( ) , two . getType ( ) ) ) ; md . setParameters ( mergeParameters ( one . getParameters ( ) , two . getParameters ( ) ) ) ; md . setTypeParameters ( findFirstNotNull ( one . getTypeParameters ( ) , two . getTypeParameters ( ) ) ) ; md . setThrows ( mergeListNoDuplicate ( one . getThrows ( ) , two . getThrows ( ) ) ) ; md . setAnnotations ( mergeListNoDuplicate ( one . getAnnotations ( ) , two . getAnnotations ( ) ) ) ; md . setArrayCount ( one . getArrayCount ( ) ) ; md . setModifiers ( mergeModifiers ( one . getModifiers ( ) , two . getModifiers ( ) ) ) ; md . setBody ( mergeBlock ( one . getBody ( ) , two . getBody ( ) ) ) ; md . setJavaDoc ( mergeSelective ( one . getJavaDoc ( ) , two . getJavaDoc ( ) ) ) ; LOG . info ( "merge MethodDeclaration --> {}" , md . getName ( ) ) ; } else { md = findFirstNotNull ( one , two ) ; LOG . info ( "add MethodDeclaration --> {}" , md . getName ( ) ) ; } return md ;
public class MSPDIReader { /** * Read a single calendar exception . * @ param bc parent calendar * @ param exception exception data */ private void readException ( ProjectCalendar bc , Project . Calendars . Calendar . Exceptions . Exception exception ) { } }
Date fromDate = exception . getTimePeriod ( ) . getFromDate ( ) ; Date toDate = exception . getTimePeriod ( ) . getToDate ( ) ; // Vico Schedule Planner seems to write start and end dates to FromTime and ToTime // rather than FromDate and ToDate . This is plain wrong , and appears to be ignored by MS Project // so we will ignore it too ! if ( fromDate != null && toDate != null ) { ProjectCalendarException bce = bc . addCalendarException ( fromDate , toDate ) ; bce . setName ( exception . getName ( ) ) ; readRecurringData ( bce , exception ) ; Project . Calendars . Calendar . Exceptions . Exception . WorkingTimes times = exception . getWorkingTimes ( ) ; if ( times != null ) { List < Project . Calendars . Calendar . Exceptions . Exception . WorkingTimes . WorkingTime > time = times . getWorkingTime ( ) ; for ( Project . Calendars . Calendar . Exceptions . Exception . WorkingTimes . WorkingTime period : time ) { Date startTime = period . getFromTime ( ) ; Date endTime = period . getToTime ( ) ; if ( startTime != null && endTime != null ) { if ( startTime . getTime ( ) >= endTime . getTime ( ) ) { endTime = DateHelper . addDays ( endTime , 1 ) ; } bce . addRange ( new DateRange ( startTime , endTime ) ) ; } } } }
public class CoreEncoders { /** * Encodes multipart / form - data where the body content must be an instance of the { @ link MultipartContent } class . Individual parts will be * encoded using the encoders available to the { @ link ChainedHttpConfig } object . * @ param config the chained configuration object * @ param ts the server adapter */ public static void multipart ( final ChainedHttpConfig config , final ToServer ts ) { } }
try { final ChainedHttpConfig . ChainedRequest request = config . getChainedRequest ( ) ; final Object body = request . actualBody ( ) ; if ( ! ( body instanceof MultipartContent ) ) { throw new IllegalArgumentException ( "Multipart body content must be MultipartContent." ) ; } final String contentType = request . actualContentType ( ) ; if ( ! ( contentType . equals ( MULTIPART_FORMDATA . getAt ( 0 ) ) || contentType . equals ( MULTIPART_MIXED . getAt ( 0 ) ) ) ) { throw new IllegalArgumentException ( "Multipart body content must be multipart/form-data." ) ; } final MimeMultipart mimeMultipart = new MimeMultipart ( ) ; for ( final MultipartContent . MultipartPart mpe : ( ( MultipartContent ) body ) . parts ( ) ) { mimeMultipart . addBodyPart ( part ( config , mpe ) ) ; } request . setContentType ( mimeMultipart . getContentType ( ) ) ; final ByteArrayOutputStream bytesOut = new ByteArrayOutputStream ( ) ; mimeMultipart . writeTo ( bytesOut ) ; ts . toServer ( new ByteArrayInputStream ( bytesOut . toByteArray ( ) ) ) ; } catch ( IOException | MessagingException ioe ) { ioe . printStackTrace ( ) ; }
public class ConfigValidator { /** * Answer the definition of an attribute . Answer null if a definition * is not available . * @ param registryEntry The registry entry from which to retrieve the attribute definition . * @ param attributeName The name of the attribute to locate . * @ return The definition of the attribute . Null if no definition is available for the attribute . */ private ExtendedAttributeDefinition getAttributeDefinition ( RegistryEntry registryEntry , String attributeName ) { } }
if ( registryEntry == null ) { return null ; } ExtendedObjectClassDefinition definition = registryEntry . getObjectClassDefinition ( ) ; if ( definition == null ) { return null ; } Map < String , ExtendedAttributeDefinition > attributeMap = definition . getAttributeMap ( ) ; ExtendedAttributeDefinition attributeDefinition = attributeMap . get ( attributeName ) ; return attributeDefinition ;
public class TagAttributesImpl { /** * Get all TagAttributes for the passed namespace * @ param namespace * namespace to search * @ return a non - null array of TagAttributes */ public TagAttribute [ ] getAll ( String namespace ) { } }
int idx = 0 ; if ( namespace == null ) { idx = Arrays . binarySearch ( _namespaces , "" ) ; } else { idx = Arrays . binarySearch ( _namespaces , namespace ) ; } if ( idx >= 0 ) { return _nsattrs . get ( idx ) ; } return EMPTY ;
public class BatchedDataPoints { /** * Computes the proper offset to reach qualifier * @ param i * @ return */ private int qualifierOffset ( final int i ) { } }
int offset = 0 ; for ( int j = 0 ; j < i ; j ++ ) { offset += Internal . getQualifierLength ( batched_qualifier , offset ) ; } return offset ;
public class MolecularFormulaManipulator { /** * The parenthesis convention is used to show a quantity by which a formula is multiplied . * For example : ( C12H20O11)2 really means that a C24H40O22 unit . * @ param formula Formula to correct * @ return Formula with the correction */ private static String breakExtractor ( String formula ) { } }
boolean finalBreak = false ; int innerMostBracket = formula . lastIndexOf ( "(" ) ; if ( innerMostBracket < 0 ) return formula ; String finalformula = formula . substring ( 0 , innerMostBracket ) ; String multipliedformula = "" ; String formulaEnd = "" ; String multiple = "" ; for ( int f = innerMostBracket + 1 ; f < formula . length ( ) ; f ++ ) { char thisChar = formula . charAt ( f ) ; if ( finalBreak ) { if ( isDigit ( thisChar ) ) { multiple += thisChar ; } else { formulaEnd = formula . substring ( f , formula . length ( ) ) ; break ; } } else { if ( thisChar == ')' ) { finalBreak = true ; } else multipliedformula += thisChar ; } } finalformula += muliplier ( multipliedformula , multiple . isEmpty ( ) ? 1 : Integer . valueOf ( multiple ) ) + formulaEnd ; if ( finalformula . contains ( "(" ) ) return breakExtractor ( finalformula ) ; else return finalformula ;
public class CommerceWishListUtil { /** * Returns all the commerce wish lists where userId = & # 63 ; and createDate & lt ; & # 63 ; . * @ param userId the user ID * @ param createDate the create date * @ return the matching commerce wish lists */ public static List < CommerceWishList > findByU_LtC ( long userId , Date createDate ) { } }
return getPersistence ( ) . findByU_LtC ( userId , createDate ) ;
public class BundleMatcher { /** * Inspect the mapping between permutations of locale fields and bundle identifiers . */ public String dump ( ) { } }
StringBuilder buf = new StringBuilder ( ) ; buf . append ( " LOCALE MATCHED BUNDLE\n" ) ; buf . append ( " ====== ==============\n" ) ; List < MetaLocale > keys = availableBundlesMap . keySet ( ) . stream ( ) . map ( k -> ( MetaLocale ) k ) . collect ( Collectors . toList ( ) ) ; Collections . sort ( keys ) ; for ( CLDR . Locale key : keys ) { buf . append ( String . format ( "%20s -> %s\n" , key , availableBundlesMap . get ( key ) ) ) ; } return buf . toString ( ) ;
public class Refactorization { /** * Generic method to rename a SymbolDefinition variable / parameter . * @ param n variable to rename . * @ param newName new name to set . * @ return if the rename procedure has been applied successfully . */ public boolean refactorVariable ( SymbolDefinition n , final String newName ) { } }
Map < String , SymbolDefinition > scope = n . getVariableDefinitions ( ) ; if ( ! scope . containsKey ( newName ) ) { if ( n . getUsages ( ) != null ) { List < SymbolReference > usages = new LinkedList < SymbolReference > ( n . getUsages ( ) ) ; VoidVisitorAdapter < ? > visitor = new VoidVisitorAdapter < Object > ( ) { @ Override public void visit ( NameExpr nexpr , Object ctx ) { Map < String , SymbolDefinition > innerScope = nexpr . getVariableDefinitions ( ) ; if ( innerScope . containsKey ( newName ) ) { nexpr . getParentNode ( ) . replaceChildNode ( nexpr , new FieldAccessExpr ( new ThisExpr ( ) , newName ) ) ; } else { nexpr . getParentNode ( ) . replaceChildNode ( nexpr , new NameExpr ( newName ) ) ; } } @ Override public void visit ( FieldAccessExpr nexpr , Object ctx ) { nexpr . getParentNode ( ) . replaceChildNode ( nexpr , new FieldAccessExpr ( nexpr . getScope ( ) , nexpr . getTypeArgs ( ) , newName ) ) ; } } ; for ( SymbolReference usage : usages ) { Node aux = ( Node ) usage ; aux . accept ( visitor , null ) ; } } return true ; } return false ;
public class DockPaneTargetDragAdapter { /** * { @ inheritDoc } */ @ Override public void dragOver ( final DragEvent dragEvent ) { } }
if ( dragEvent . getSource ( ) instanceof Region ) { if ( dragEvent . getX ( ) > 100 && dragEvent . getX ( ) < ( ( Region ) dragEvent . getSource ( ) ) . getWidth ( ) - 100 || dragEvent . getY ( ) > 100 && dragEvent . getY ( ) < ( ( Region ) dragEvent . getSource ( ) ) . getHeight ( ) - 100 ) { System . out . println ( "drag OVER on => " + controller ( ) . model ( ) . key ( ) ) ; controller ( ) . view ( ) . drawMarker ( dragEvent . getX ( ) , dragEvent . getY ( ) ) ; dragEvent . consume ( ) ; } else { controller ( ) . view ( ) . removeMarker ( ) ; } }
public class Model { /** * Copies all attribute values ( except for ID , created _ at and updated _ at ) from other instance to this one . * @ param other source model . */ public void copyFrom ( Model other ) { } }
if ( ! metaModelLocal . getTableName ( ) . equals ( other . metaModelLocal . getTableName ( ) ) ) { throw new IllegalArgumentException ( "can only copy between the same types" ) ; } Map < String , Object > otherAttributes = other . getAttributes ( ) ; for ( String name : metaModelLocal . getAttributeNamesSkipId ( ) ) { attributes . put ( name , otherAttributes . get ( name ) ) ; dirtyAttributeNames . add ( name ) ; // Why not use setRaw ( ) here ? Does the same and avoids duplication of code . . . ( Garagoth ) // other . setRaw ( name , getRaw ( name ) ) ; }
public class ResponseStatusDetails { /** * Helper method to convert a { @ link ByteBuf } input into the details . * It will NOT release the buffer . */ public static ResponseStatusDetails convert ( final ByteBuf input ) { } }
if ( input . readableBytes ( ) <= 0 ) { return null ; } try { byte [ ] inputBytes = new byte [ input . readableBytes ( ) ] ; input . readBytes ( inputBytes ) ; HashMap < String , HashMap < String , String > > result = DefaultObjectMapper . readValue ( inputBytes , JACKSON_TYPEREF ) ; HashMap < String , String > errorMap = result . get ( "error" ) ; if ( errorMap == null ) { LOGGER . warn ( "Exception while converting ResponseStatusDetails (no error json object), ignoring." ) ; return null ; } return new ResponseStatusDetails ( errorMap . get ( "ref" ) , errorMap . get ( "context" ) ) ; } catch ( Exception ex ) { LOGGER . warn ( "Exception while converting ResponseStatusDetails, ignoring." , ex ) ; return null ; }
public class CopyCriteria { /** * Get the set of Column objects that represent those columns referenced by the visitable object . * @ param visitable the object to be visited * @ return the set of Column objects , with column names that always are the string - form of the { @ link Column # getPropertyName ( ) * property name } ; never null */ public static Set < Column > getColumnsReferencedBy ( Visitable visitable ) { } }
if ( visitable == null ) return Collections . emptySet ( ) ; final Set < Column > symbols = new HashSet < Column > ( ) ; // Walk the entire structure , so only supply a StrategyVisitor ( that does no navigation ) . . . Visitors . visitAll ( visitable , new AbstractVisitor ( ) { protected void addColumnFor ( SelectorName selectorName , String property ) { symbols . add ( new Column ( selectorName , property , property ) ) ; } @ Override public void visit ( Column column ) { symbols . add ( column ) ; } @ Override public void visit ( EquiJoinCondition joinCondition ) { addColumnFor ( joinCondition . selector1Name ( ) , joinCondition . getProperty1Name ( ) ) ; addColumnFor ( joinCondition . selector2Name ( ) , joinCondition . getProperty2Name ( ) ) ; } @ Override public void visit ( PropertyExistence prop ) { addColumnFor ( prop . selectorName ( ) , prop . getPropertyName ( ) ) ; } @ Override public void visit ( PropertyValue prop ) { addColumnFor ( prop . selectorName ( ) , prop . getPropertyName ( ) ) ; } @ Override public void visit ( ReferenceValue ref ) { String propertyName = ref . getPropertyName ( ) ; if ( propertyName != null ) { addColumnFor ( ref . selectorName ( ) , propertyName ) ; } } } ) ; return symbols ;
public class XmlParser { /** * Parse an enumeration . * < pre > * [ 59 ] Enumeration : : = ' ( ' S ? Nmtoken ( S ? ' | ' S ? Nmtoken ) * S ? ' ) ' * < / pre > * NOTE : the ' ( ' has already been read . */ private void parseEnumeration ( boolean isNames ) throws Exception { } }
dataBufferAppend ( '(' ) ; // Read the first token . skipWhitespace ( ) ; dataBufferAppend ( readNmtoken ( isNames ) ) ; // Read the remaining tokens . skipWhitespace ( ) ; while ( ! tryRead ( ')' ) ) { require ( '|' ) ; dataBufferAppend ( '|' ) ; skipWhitespace ( ) ; dataBufferAppend ( readNmtoken ( isNames ) ) ; skipWhitespace ( ) ; } dataBufferAppend ( ')' ) ;
public class ConnectionThrottleFilter { /** * Method responsible for deciding if a connection is OK * to continue * @ param session * The new session that will be verified * @ return * True if the session meets the criteria , otherwise false */ protected boolean isConnectionOk ( IoSession session ) { } }
SocketAddress remoteAddress = session . getRemoteAddress ( ) ; if ( remoteAddress instanceof InetSocketAddress ) { InetSocketAddress addr = ( InetSocketAddress ) remoteAddress ; long now = System . currentTimeMillis ( ) ; if ( clients . containsKey ( addr . getAddress ( ) . getHostAddress ( ) ) ) { LOGGER . debug ( "This is not a new client" ) ; Long lastConnTime = clients . get ( addr . getAddress ( ) . getHostAddress ( ) ) ; clients . put ( addr . getAddress ( ) . getHostAddress ( ) , now ) ; // if the interval between now and the last connection is // less than the allowed interval , return false if ( now - lastConnTime < allowedInterval ) { LOGGER . warn ( "Session connection interval too short" ) ; return false ; } return true ; } clients . put ( addr . getAddress ( ) . getHostAddress ( ) , now ) ; return true ; } return false ;
public class KeyManagementServiceClient { /** * Create a new [ CryptoKey ] [ google . cloud . kms . v1 . CryptoKey ] within a * [ KeyRing ] [ google . cloud . kms . v1 . KeyRing ] . * < p > [ CryptoKey . purpose ] [ google . cloud . kms . v1 . CryptoKey . purpose ] and * [ CryptoKey . version _ template . algorithm ] [ google . cloud . kms . v1 . CryptoKeyVersionTemplate . algorithm ] * are required . * < p > Sample code : * < pre > < code > * try ( KeyManagementServiceClient keyManagementServiceClient = KeyManagementServiceClient . create ( ) ) { * KeyRingName parent = KeyRingName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ KEY _ RING ] " ) ; * String cryptoKeyId = " my - app - key " ; * CryptoKey . CryptoKeyPurpose purpose = CryptoKey . CryptoKeyPurpose . ENCRYPT _ DECRYPT ; * long seconds = 2147483647L ; * Timestamp nextRotationTime = Timestamp . newBuilder ( ) * . setSeconds ( seconds ) * . build ( ) ; * long seconds2 = 604800L ; * Duration rotationPeriod = Duration . newBuilder ( ) * . setSeconds ( seconds2) * . build ( ) ; * CryptoKey cryptoKey = CryptoKey . newBuilder ( ) * . setPurpose ( purpose ) * . setNextRotationTime ( nextRotationTime ) * . setRotationPeriod ( rotationPeriod ) * . build ( ) ; * CryptoKey response = keyManagementServiceClient . createCryptoKey ( parent , cryptoKeyId , cryptoKey ) ; * < / code > < / pre > * @ param parent Required . The [ name ] [ google . cloud . kms . v1 . KeyRing . name ] of the KeyRing associated * with the [ CryptoKeys ] [ google . cloud . kms . v1 . CryptoKey ] . * @ param cryptoKeyId Required . It must be unique within a KeyRing and match the regular * expression ` [ a - zA - Z0-9 _ - ] { 1,63 } ` * @ param cryptoKey A [ CryptoKey ] [ google . cloud . kms . v1 . CryptoKey ] with initial field values . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final CryptoKey createCryptoKey ( KeyRingName parent , String cryptoKeyId , CryptoKey cryptoKey ) { } }
CreateCryptoKeyRequest request = CreateCryptoKeyRequest . newBuilder ( ) . setParent ( parent == null ? null : parent . toString ( ) ) . setCryptoKeyId ( cryptoKeyId ) . setCryptoKey ( cryptoKey ) . build ( ) ; return createCryptoKey ( request ) ;
public class CustomHttpPattern { /** * < pre > * The name of this custom HTTP verb . * < / pre > * < code > string kind = 1 ; < / code > */ public java . lang . String getKind ( ) { } }
java . lang . Object ref = kind_ ; if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; kind_ = s ; return s ; }
public class CommonExpectations { /** * Sets expectations that will check : * < ol > * < li > Successfully reached the specified URL * < li > Response text includes JWT cookie and principal information * < / ol > */ public static Expectations successfullyReachedProtectedResourceWithJwtCookie ( String testAction , String protectedUrl , String username ) { } }
return successfullyReachedProtectedResourceWithJwtCookie ( testAction , protectedUrl , username , JwtFatConstants . DEFAULT_ISS_REGEX ) ;
public class ResolverRuleConfigMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ResolverRuleConfig resolverRuleConfig , ProtocolMarshaller protocolMarshaller ) { } }
if ( resolverRuleConfig == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( resolverRuleConfig . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( resolverRuleConfig . getTargetIps ( ) , TARGETIPS_BINDING ) ; protocolMarshaller . marshall ( resolverRuleConfig . getResolverEndpointId ( ) , RESOLVERENDPOINTID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DatabaseDAODefaultImpl { public String get_info ( Database database ) throws DevFailed { } }
if ( ! database . isAccess_checked ( ) ) checkAccess ( database ) ; // Query info from database DeviceData argOut = command_inout ( database , "DbInfo" ) ; String [ ] info = argOut . extractStringArray ( ) ; // format result as string return stringArray2String ( info ) ;
public class AOStream { /** * Start the stream , i . e . , start sending data and control messages */ public void start ( ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "start" ) ; synchronized ( this ) { active = true ; // start the liveness timer for sending ControlRequestHighestGeneratedTick , if needed if ( ! completedTicksInitialized ) { if ( initRequestId == - 1 ) initRequestId = parent . generateUniqueValue ( ) ; requestHighestGeneratedTickTimer = new RequestHighestGeneratedTick ( ) ; // we call the alarm directly requestHighestGeneratedTickTimer . alarm ( null ) ; } if ( parent . getCardinalityOne ( ) && ! isFlushed ) { inactivityTimer = am . create ( mp . getCustomProperties ( ) . get_remote_consumer_cardinality_inactivity_interval ( ) , inactivityHandler ) ; } if ( resetRequestAckSender != null ) { boolean done = resetRequestAckSender . start ( ) ; if ( done ) resetRequestAckSender = null ; } // start all the liveness timers for value ticks dem . startTimer ( ) ; if ( imeRestorationHandler != null ) imeRestorationHandler . startTimer ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "start" ) ;
public class BadgeRenderer { /** * This methods generates the HTML code of the current b : badge . * @ param context * the FacesContext . * @ param component * the current b : badge . * @ throws IOException * thrown if something goes wrong when writing the HTML code . */ @ Override public void encodeBegin ( FacesContext context , UIComponent component ) throws IOException { } }
if ( ! component . isRendered ( ) ) { return ; } Badge badge = ( Badge ) component ; ResponseWriter rw = context . getResponseWriter ( ) ; String clientId = badge . getClientId ( ) ; if ( ! component . isRendered ( ) ) { return ; } String styleClass = badge . getStyleClass ( ) ; String style = badge . getStyle ( ) ; String val = getValue2Render ( context , badge ) ; generateBadge ( context , badge , rw , clientId , styleClass , style , val , null ) ;
public class Channel { /** * Not public */ Collection < LifecycleQueryInstalledChaincodesProposalResponse > lifecycleQueryInstalledChaincodes ( LifecycleQueryInstalledChaincodesRequest lifecycleQueryInstalledChaincodesRequest , Collection < Peer > peers ) throws InvalidArgumentException , ProposalException { } }
logger . trace ( "LifecycleQueryInstalledChaincodes" ) ; if ( null == lifecycleQueryInstalledChaincodesRequest ) { throw new InvalidArgumentException ( "The lifecycleQueryInstalledChaincodesRequest parameter can not be null." ) ; } checkPeers ( peers ) ; if ( ! isSystemChannel ( ) ) { throw new InvalidArgumentException ( "LifecycleQueryInstalledChaincodes should only be invoked on system channel." ) ; } try { TransactionContext context = getTransactionContext ( lifecycleQueryInstalledChaincodesRequest ) ; FabricProposal . Proposal proposalBuilder = LifecycleQueryInstalledChaincodesBuilder . newBuilder ( ) . context ( context ) . build ( ) ; SignedProposal qProposal = getSignedProposal ( context , proposalBuilder ) ; return sendProposalToPeers ( peers , qProposal , context , LifecycleQueryInstalledChaincodesProposalResponse . class ) ; } catch ( ProposalException e ) { throw e ; } catch ( Exception e ) { throw new ProposalException ( format ( "Query for peer %s channels failed. " + e . getMessage ( ) , name ) , e ) ; }
import java . time . LocalDate ; class VerifyDate { /** * Confirms if a date in the Gregorian calendar is valid or not * > > > verify _ date ( 12 , 25 , 2020) * True * > > > verify _ date ( 2 , 29 , 2019) * False * > > > verify _ date ( ' 12 ' , ' 25 ' , ' 2020 ' ) * True */ public static Boolean verifyDate ( int month , int day , int year ) { } }
try { LocalDate . of ( year , month , day ) ; return true ; } catch ( Exception e ) { return false ; }
public class LineReadingIterator { /** * Loads next line into lookahead spot * @ return whether any item was loaded into next field */ protected boolean lookahead ( ) { } }
try { next = this . reader . readLine ( ) ; if ( next == null ) { // TODO : make this close - on - exhaust optional ? reader . close ( ) ; } return ( next != null ) ; } catch ( IOException e ) { logger . warning ( e . toString ( ) ) ; return false ; }
public class AbstractExternalHighlightingFragment2 { /** * Replies the simple name of the language . * @ return the name . */ protected String getLanguageSimpleName ( ) { } }
final String name = getGrammar ( ) . getName ( ) ; final int index = name . lastIndexOf ( '.' ) ; if ( index > 0 ) { return name . substring ( index + 1 ) ; } return name ;
public class Area { /** * the method check up the array size and necessarily increases it . */ private static double [ ] adjustSize ( double [ ] array , int newSize ) { } }
if ( newSize <= array . length ) { return array ; } double [ ] newArray = new double [ 2 * newSize ] ; System . arraycopy ( array , 0 , newArray , 0 , array . length ) ; return newArray ;
public class CmsContentService { /** * Writes the xml content to the vfs and re - initializes the member variables . < p > * @ param cms the cms context * @ param file the file to write to * @ param content the content * @ param encoding the file encoding * @ return the content * @ throws CmsException if writing the file fails */ private CmsXmlContent writeContent ( CmsObject cms , CmsFile file , CmsXmlContent content , String encoding ) throws CmsException { } }
String decodedContent = content . toString ( ) ; try { file . setContents ( decodedContent . getBytes ( encoding ) ) ; } catch ( UnsupportedEncodingException e ) { throw new CmsException ( org . opencms . workplace . editors . Messages . get ( ) . container ( org . opencms . workplace . editors . Messages . ERR_INVALID_CONTENT_ENC_1 , file . getRootPath ( ) ) , e ) ; } // the file content might have been modified during the write operation cms . getRequestContext ( ) . setAttribute ( ATTR_EDITOR_SAVING , "true" ) ; try { file = cms . writeFile ( file ) ; } finally { cms . getRequestContext ( ) . removeAttribute ( ATTR_EDITOR_SAVING ) ; } return CmsXmlContentFactory . unmarshal ( cms , file ) ;
public class AttributeTransformationDescription { /** * Checks attributes for rejection * @ param rejectedAttributes gathers information about failed attributes * @ param attributeValue the attribute value */ void rejectAttributes ( RejectedAttributesLogContext rejectedAttributes , ModelNode attributeValue ) { } }
for ( RejectAttributeChecker checker : checks ) { rejectedAttributes . checkAttribute ( checker , name , attributeValue ) ; }
public class EventImpl { /** * @ see com . ibm . websphere . event . EventHandle # getProperties ( ) */ public final MapDictionary < String , Object > getProperties ( ) { } }
if ( this . properties . isReadyOnly ( ) ) { return this . properties ; } return new MapDictionary < String , Object > ( this . properties ) ;
public class CmsToolBar { /** * Creates the user info drop down . < p > * @ return the drop down component */ private Component createUserInfoDropDown ( ) { } }
PopupView pv = new PopupView ( new PopupView . Content ( ) { private static final long serialVersionUID = 1L ; public String getMinimizedValueAsHTML ( ) { CmsObject cms = A_CmsUI . getCmsObject ( ) ; return getDropDownButtonHtml ( new ExternalResource ( OpenCms . getWorkplaceAppManager ( ) . getUserIconHelper ( ) . getSmallIconPath ( cms , cms . getRequestContext ( ) . getCurrentUser ( ) ) ) ) ; } public Component getPopupComponent ( ) { return new CmsUserInfo ( new I_UploadListener ( ) { public void onUploadFinished ( List < String > uploadedFiles ) { handleUpload ( uploadedFiles ) ; } } , getDialogContext ( ) ) ; } } ) ; pv . setDescription ( CmsVaadinUtils . getMessageText ( Messages . GUI_USER_INFO_TITLE_0 ) ) ; pv . addStyleName ( OpenCmsTheme . NAVIGATOR_DROPDOWN ) ; pv . setHideOnMouseOut ( false ) ; pv . addStyleName ( OpenCmsTheme . USER_INFO ) ; return pv ;
public class DeploymentOperationsImpl { /** * Lets wait until there are enough Ready pods of the given Deployment */ private void waitUntilDeploymentIsScaled ( final int count ) { } }
final BlockingQueue < Object > queue = new ArrayBlockingQueue < > ( 1 ) ; final AtomicReference < Integer > replicasRef = new AtomicReference < > ( 0 ) ; final String name = checkName ( getItem ( ) ) ; final String namespace = checkNamespace ( getItem ( ) ) ; final Runnable deploymentPoller = ( ) -> { try { Deployment deployment = get ( ) ; // If the deployment is gone , we shouldn ' t wait . if ( deployment == null ) { if ( count == 0 ) { queue . put ( true ) ; return ; } else { queue . put ( new IllegalStateException ( "Can't wait for Deployment: " + checkName ( getItem ( ) ) + " in namespace: " + checkName ( getItem ( ) ) + " to scale. Resource is no longer available." ) ) ; return ; } } replicasRef . set ( deployment . getStatus ( ) . getReplicas ( ) ) ; int currentReplicas = deployment . getStatus ( ) . getReplicas ( ) != null ? deployment . getStatus ( ) . getReplicas ( ) : 0 ; long generation = deployment . getMetadata ( ) . getGeneration ( ) != null ? deployment . getMetadata ( ) . getGeneration ( ) : 0 ; long observedGeneration = deployment . getStatus ( ) != null && deployment . getStatus ( ) . getObservedGeneration ( ) != null ? deployment . getStatus ( ) . getObservedGeneration ( ) : - 1 ; if ( observedGeneration >= generation && Objects . equals ( deployment . getSpec ( ) . getReplicas ( ) , currentReplicas ) ) { queue . put ( true ) ; } else { LOG . debug ( "Only {}/{} pods scheduled for Deployment: {} in namespace: {} seconds so waiting..." , deployment . getStatus ( ) . getReplicas ( ) , deployment . getSpec ( ) . getReplicas ( ) , deployment . getMetadata ( ) . getName ( ) , namespace ) ; } } catch ( Throwable t ) { LOG . error ( "Error while waiting for Deployment to be scaled." , t ) ; } } ; ScheduledExecutorService executor = Executors . newSingleThreadScheduledExecutor ( ) ; ScheduledFuture poller = executor . scheduleWithFixedDelay ( deploymentPoller , 0 , POLL_INTERVAL_MS , TimeUnit . MILLISECONDS ) ; try { if ( Utils . waitUntilReady ( queue , getConfig ( ) . getScaleTimeout ( ) , TimeUnit . MILLISECONDS ) ) { LOG . debug ( "{}/{} pod(s) ready for Deployment: {} in namespace: {}." , replicasRef . get ( ) , count , name , namespace ) ; } else { LOG . error ( "{}/{} pod(s) ready for Deployment: {} in namespace: {} after waiting for {} seconds so giving up" , replicasRef . get ( ) , count , name , namespace , TimeUnit . MILLISECONDS . toSeconds ( getConfig ( ) . getScaleTimeout ( ) ) ) ; } } finally { poller . cancel ( true ) ; executor . shutdown ( ) ; }
public class NormOps_DDRM { /** * Normalizes the matrix such that the Frobenius norm is equal to one . * @ param A The matrix that is to be normalized . */ public static void normalizeF ( DMatrixRMaj A ) { } }
double val = normF ( A ) ; if ( val == 0 ) return ; int size = A . getNumElements ( ) ; for ( int i = 0 ; i < size ; i ++ ) { A . div ( i , val ) ; }
public class SwingFrontend { /** * As the current subject can be different for each Swing Window this method * ensure that no wrong subject stays as current subject * @ param r a runnable ( normally a lambda is used ) */ public static void runWithContext ( Runnable r ) { } }
try { browserStack . push ( SwingFrame . getActiveWindow ( ) . getVisibleTab ( ) ) ; Subject . setCurrent ( SwingFrame . getActiveWindow ( ) . getSubject ( ) ) ; r . run ( ) ; } finally { Subject . setCurrent ( null ) ; browserStack . pop ( ) ; }
public class ParserUtil { /** * Returns the string which was actually parsed with all the substitutions performed */ public static String parseOperationRequest ( String commandLine , final CommandLineParser . CallbackHandler handler ) throws CommandFormatException { } }
SubstitutedLine sl = parseOperationRequestLine ( commandLine , handler , null ) ; return sl == null ? null : sl . getSubstitued ( ) ;
public class ExcelFunctions { /** * Returns the absolute value of a number */ public static BigDecimal abs ( EvaluationContext ctx , Object number ) { } }
return Conversions . toDecimal ( number , ctx ) . abs ( ) ;
public class VodClient { /** * List the properties of all media resource managed by VOD service . * recommend use marker mode to get high performance * The caller < i > must < / i > authenticate with a valid BCE Access Key / Private Key pair . * @ param pageNo The pageNo need to list , must be greater than 0 * @ param pageSize The pageSize , must in range [ LIST _ MIN _ PAGESIZE , LIST _ MAX _ PAGESIZE ] * @ param status The media status , can be null * @ param begin The media create date after begin * @ param end The media create date before end * @ param title The media title , use prefix search * @ return The properties of all specific media resources */ public ListMediaResourceResponse listMediaResources ( int pageNo , int pageSize , String status , Date begin , Date end , String title ) { } }
ListMediaResourceRequest request = new ListMediaResourceRequest ( ) . withPageNo ( pageNo ) . withPageSize ( pageSize ) . withStatus ( status ) . withBegin ( begin ) . withEnd ( end ) . withTitle ( title ) ; return listMediaResources ( request ) ;
public class Page { /** * < p > Returns the Wikipedia article as plain text using the SwebleParser with * a SimpleWikiConfiguration and the PlainTextConverter . < br > * If you have different needs regarding the plain text , you can use * getParsedPage ( Visitor v ) and provide your own Sweble - Visitor . Examples * are in the < code > de . tudarmstadt . ukp . wikipedia . api . sweble < / code > package * or on http : / / www . sweble . org < / p > * < p > Alternatively , use Page . getText ( ) to return the Wikipedia article * with all Wiki markup . You can then use the old JWPL MediaWiki parser for * creating a plain text version . The JWPL parser is now located in a * separate project < code > de . tudarmstad . ukp . wikipedia . parser < / code > . * Please refer to the JWPL Google Code project page for further reference . < / p > * @ return The plain text of a Wikipedia article * @ throws WikiApiException Thrown if errors occurred . */ public String getPlainText ( ) throws WikiApiException { } }
// Configure the PlainTextConverter for plain text parsing return ( String ) parsePage ( new PlainTextConverter ( this . wiki . getWikConfig ( ) , false , Integer . MAX_VALUE ) ) ;
public class ReflectionUtils { /** * This helper method facilitates creation of { @ link DefaultCustomTypeTest } arrays and pre - populates them with the * set of String values provided . * @ param type * A { @ link DefaultCustomType } object that represents the type of the array to be instantiated . * @ param values * A { @ link String } array that represents the set of values that should be used to pre - populate the * newly constructed array . * @ return An array of type { @ link DefaultCustomType } */ public static Object instantiateDefaultCustomTypeArray ( DefaultCustomType type , String [ ] values ) { } }
logger . entering ( new Object [ ] { type , values } ) ; checkArgument ( type != null , "type cannot be null." ) ; checkArgument ( ( values != null && values . length != 0 ) , "The values cannot be null (or) empty." ) ; Object arrayToReturn = Array . newInstance ( type . getCustomTypeClass ( ) , values . length ) ; for ( int i = 0 ; i < values . length ; i ++ ) { Array . set ( arrayToReturn , i , type . instantiateObject ( values [ i ] ) ) ; } logger . exiting ( arrayToReturn ) ; return arrayToReturn ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link PointPropertyType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link PointPropertyType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "centerOf" ) public JAXBElement < PointPropertyType > createCenterOf ( PointPropertyType value ) { } }
return new JAXBElement < PointPropertyType > ( _CenterOf_QNAME , PointPropertyType . class , null , value ) ;
public class PathParamSerializers { /** * Create a PathParamSerializer for Set parameters . */ public static < Param > PathParamSerializer < Set < Param > > set ( String name , Function < PSequence < String > , Param > deserialize , Function < Param , PSequence < String > > serialize ) { } }
return new NamedPathParamSerializer < Set < Param > > ( "Set(" + name + ")" ) { @ Override public PSequence < String > serialize ( Set < Param > parameter ) { Set < String > serializedParams = serializeCollection ( parameter , serialize ) . collect ( Collectors . toSet ( ) ) ; return TreePVector . from ( serializedParams ) ; } @ Override public Set < Param > deserialize ( PSequence < String > parameters ) { return deserializeCollection ( parameters , deserialize ) . collect ( Collectors . toSet ( ) ) ; } } ;
public class CloudWatchLoggingOptionUpdateMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CloudWatchLoggingOptionUpdate cloudWatchLoggingOptionUpdate , ProtocolMarshaller protocolMarshaller ) { } }
if ( cloudWatchLoggingOptionUpdate == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( cloudWatchLoggingOptionUpdate . getCloudWatchLoggingOptionId ( ) , CLOUDWATCHLOGGINGOPTIONID_BINDING ) ; protocolMarshaller . marshall ( cloudWatchLoggingOptionUpdate . getLogStreamARNUpdate ( ) , LOGSTREAMARNUPDATE_BINDING ) ; protocolMarshaller . marshall ( cloudWatchLoggingOptionUpdate . getRoleARNUpdate ( ) , ROLEARNUPDATE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Cache { /** * Adds a prefabricated value to the cache for the given type . * @ param tag A description of the type . Takes generics into account . * @ param red A " red " value for the given type . * @ param black A " black " value for the given type . * @ param redCopy A shallow copy of the given red value . */ public < T > void put ( TypeTag tag , T red , T black , T redCopy ) { } }
cache . put ( tag , new Tuple < > ( red , black , redCopy ) ) ;
public class DBNumBase { /** * { @ inheritDoc } * < p > コンストラクタで指定したロケール ( 言語 ) で許可するかどうか判定する 。 */ @ Override public boolean isApplicable ( final Locale locale ) { } }
if ( locale == null ) { return false ; } final String language = locale . getLanguage ( ) . toLowerCase ( ) ; return allowedLanguages . contains ( language ) ;
public class InitiatorMailbox { /** * that previous partition leader has drained its txns */ private void setMigratePartitionLeaderStatus ( MigratePartitionLeaderMessage message ) { } }
// The host with old partition leader is down . if ( message . isStatusReset ( ) ) { m_migratePartitionLeaderStatus = MigratePartitionLeaderStatus . NONE ; return ; } if ( m_migratePartitionLeaderStatus == MigratePartitionLeaderStatus . NONE ) { // txn draining notification from the old leader arrives before this site is promoted m_migratePartitionLeaderStatus = MigratePartitionLeaderStatus . TXN_DRAINED ; } else if ( m_migratePartitionLeaderStatus == MigratePartitionLeaderStatus . TXN_RESTART ) { // if the new leader has been promoted , stop restarting txns . m_migratePartitionLeaderStatus = MigratePartitionLeaderStatus . NONE ; } tmLog . info ( "MigratePartitionLeader new leader " + CoreUtils . hsIdToString ( m_hsId ) + " is notified by previous leader " + CoreUtils . hsIdToString ( message . getPriorLeaderHSID ( ) ) + ". status:" + m_migratePartitionLeaderStatus ) ;
public class IndexReader { /** * Locates the { @ link TableBucket } s for the given Key Hashes in the given Segment ' s Extended Attribute Index . * @ param segment A { @ link DirectSegmentAccess } providing access to the Segment to look into . * @ param keyHashes A Collection of Key Hashes to look up { @ link TableBucket } s for . * @ param timer Timer for the operation . * @ return A CompletableFuture that , when completed , will contain the requested Bucket information . */ CompletableFuture < Map < UUID , TableBucket > > locateBuckets ( DirectSegmentAccess segment , Collection < UUID > keyHashes , TimeoutTimer timer ) { } }
return segment . getAttributes ( keyHashes , false , timer . getRemaining ( ) ) . thenApply ( attributes -> attributes . entrySet ( ) . stream ( ) . collect ( Collectors . toMap ( Map . Entry :: getKey , e -> new TableBucket ( e . getKey ( ) , e . getValue ( ) ) ) ) ) ;
public class ContentKeyAuthorizationPolicy { /** * Create an operation that will retrieve the given content key * authorization policy * @ param contentKeyAuthorizationPolicyId * id of content key authorization policy to retrieve * @ return the operation */ public static EntityGetOperation < ContentKeyAuthorizationPolicyInfo > get ( String contentKeyAuthorizationPolicyId ) { } }
return new DefaultGetOperation < ContentKeyAuthorizationPolicyInfo > ( ENTITY_SET , contentKeyAuthorizationPolicyId , ContentKeyAuthorizationPolicyInfo . class ) ;
public class DeclarationTransformerImpl { /** * Converts term into TermLength and stores values and types in maps * @ param < T > * CSSProperty * @ param term * Term to be parsed * @ param propertyName * How to store colorIdentificiton * @ param lengthIdentification * What to store under propertyName * @ param properties * Map to store property types * @ param values * Map to store property values * @ return < code > true < / code > in case of success , < code > false < / code > * otherwise */ protected < T extends CSSProperty > boolean genericTermLength ( Term < ? > term , String propertyName , T lengthIdentification , ValueRange range , Map < String , CSSProperty > properties , Map < String , Term < ? > > values ) { } }
if ( term instanceof TermInteger && ( ( TermInteger ) term ) . getUnit ( ) . equals ( TermNumber . Unit . none ) ) { if ( CSSFactory . getImplyPixelLength ( ) || ( ( TermInteger ) term ) . getValue ( ) == 0 ) { // 0 is always allowed with no units // convert to length with units of px TermLength tl = tf . createLength ( ( ( TermInteger ) term ) . getValue ( ) , TermNumber . Unit . px ) ; return genericTerm ( TermLength . class , tl , propertyName , lengthIdentification , range , properties , values ) ; } else { return false ; } } else if ( term instanceof TermLength ) { return genericTerm ( TermLength . class , term , propertyName , lengthIdentification , range , properties , values ) ; } return false ;
public class VirtualCdj { /** * < p > Nudge the playback position by the specified number of milliseconds , to support synchronization with an * external clock . Positive values move playback forward in time , while negative values jump back . If we are * sending beat packets , notify the beat sender that the timeline has changed . < / p > * < p > If the shift would put us back before beat one , we will jump forward a bar to correct that . It is thus not * safe to jump backwards more than a bar ' s worth of time . < / p > * @ param ms the number of millisecond to shift the simulated playback position */ public void adjustPlaybackPosition ( int ms ) { } }
if ( ms != 0 ) { metronome . adjustStart ( - ms ) ; if ( metronome . getBeat ( ) < 1 ) { metronome . adjustStart ( Math . round ( Metronome . beatsToMilliseconds ( metronome . getBeatsPerBar ( ) , metronome . getTempo ( ) ) ) ) ; } notifyBeatSenderOfChange ( ) ; }
public class KerasBatchNormalization { /** * Get BatchNormalization axis from Keras layer configuration . Currently unused . * @ param layerConfig dictionary containing Keras layer configuration * @ return batchnorm axis * @ throws InvalidKerasConfigurationException Invalid Keras config */ private int getBatchNormAxis ( Map < String , Object > layerConfig ) throws InvalidKerasConfigurationException { } }
Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; return ( int ) innerConfig . get ( LAYER_FIELD_AXIS ) ;
public class PageHelper { /** * Typical use is to simulate hitting ESCAPE or ENTER . * @ param driver * @ param keys * @ see Actions # sendKeys ( CharSequence . . . ) */ public static void hitKeys ( WebDriver driver , CharSequence keys ) { } }
new Actions ( driver ) . sendKeys ( keys ) . perform ( ) ;
public class Util { /** * Reconstructs a number that is represented by more than one byte in a network packet in little - endian order , for * the very few protocol values that are sent in this quirky way . * @ param buffer the byte array containing the packet data * @ param start the index of the first byte containing a numeric value * @ param length the number of bytes making up the value * @ return the reconstructed number */ @ SuppressWarnings ( "SameParameterValue" ) public static long bytesToNumberLittleEndian ( byte [ ] buffer , int start , int length ) { } }
long result = 0 ; for ( int index = start + length - 1 ; index >= start ; index -- ) { result = ( result << 8 ) + unsign ( buffer [ index ] ) ; } return result ;
public class ParticleGame { /** * Clear the particle system held in this canvas * @ param additive * True if the particle system should be set to additive */ public void clearSystem ( boolean additive ) { } }
system = new ParticleSystem ( "org/newdawn/slick/data/particle.tga" , 2000 ) ; if ( additive ) { system . setBlendingMode ( ParticleSystem . BLEND_ADDITIVE ) ; } system . setRemoveCompletedEmitters ( false ) ;
public class RedisPubSubClient { /** * - - - CONNECT - - - */ public final void connect ( ) { } }
super . connect ( ) ; List < RedisURI > redisURIs = parseURLs ( urls , password , secure ) ; StatefulRedisPubSubConnection < byte [ ] , byte [ ] > connection ; ByteArrayCodec codec = new ByteArrayCodec ( ) ; if ( urls . length > 1 ) { // Clustered client connection = RedisClusterClient . create ( resources , redisURIs ) . connectPubSub ( codec ) ; } else { // Single connection connection = RedisClient . create ( resources , redisURIs . get ( 0 ) ) . connectPubSub ( codec ) ; } // Add listener if ( listener != null ) { connection . addListener ( listener ) ; } client = connection . async ( ) ;
public class Config { /** * Find configuration object children with requested names . * @ param name one or more child names . * @ return configuration object children with requested names , possible empty . * @ throws IllegalArgumentException if < code > name < / code > list does not contains at least one item . */ public List < Config > findChildren ( String ... name ) { } }
Params . notNullOrEmpty ( name , "Children names" ) ; List < String > names = Arrays . asList ( name ) ; List < Config > results = new ArrayList < Config > ( ) ; for ( Config child : children ) { if ( names . contains ( child . name ) ) { results . add ( child ) ; } } return results ;
public class HikariConfig { /** * Set a MetricRegistry instance to use for registration of metrics used by HikariCP . * @ param metricRegistry the MetricRegistry instance to use */ public void setMetricRegistry ( Object metricRegistry ) { } }
if ( metricsTrackerFactory != null ) { throw new IllegalStateException ( "cannot use setMetricRegistry() and setMetricsTrackerFactory() together" ) ; } if ( metricRegistry != null ) { metricRegistry = getObjectOrPerformJndiLookup ( metricRegistry ) ; if ( ! safeIsAssignableFrom ( metricRegistry , "com.codahale.metrics.MetricRegistry" ) && ! ( safeIsAssignableFrom ( metricRegistry , "io.micrometer.core.instrument.MeterRegistry" ) ) ) { throw new IllegalArgumentException ( "Class must be instance of com.codahale.metrics.MetricRegistry or io.micrometer.core.instrument.MeterRegistry" ) ; } } this . metricRegistry = metricRegistry ;
public class LogUtils { /** * Updates a logger with a given name to the given level . * @ param loggerName * @ param level */ public static LoggerConfig [ ] setLogLevel ( String loggerName , String level ) { } }
if ( StringUtils . isBlank ( loggerName ) ) { loggerName = ch . qos . logback . classic . Logger . ROOT_LOGGER_NAME ; } LoggerContext context = ( LoggerContext ) LoggerFactory . getILoggerFactory ( ) ; log . debug ( "Setting {} to level {}" , loggerName , level ) ; ch . qos . logback . classic . Logger logger = null ; try { logger = context . getLogger ( loggerName ) ; if ( logger != null ) { if ( level . equals ( "null" ) || level . equals ( "none" ) ) { logger . setLevel ( null ) ; } else { logger . setLevel ( Level . toLevel ( level ) ) ; } logger = context . getLogger ( loggerName ) ; return new LoggerConfig [ ] { new LoggerConfig ( logger . getName ( ) , logger . getLevel ( ) + "" ) } ; } return new LoggerConfig [ ] { } ; } catch ( Throwable t ) { log . warn ( "Failed to change log level for logger " + loggerName + " to level " + level , t ) ; return new LoggerConfig [ ] { } ; }
public class ClassFileMetaData { /** * Checks if the constant pool contains the provided double constant , which implies the constant is used somewhere in the code . * NB : compile - time constant expressions are evaluated at compile time . */ public boolean containsDouble ( double value ) { } }
boolean isNan = Double . isNaN ( value ) ; for ( int i = 1 ; i < maxPoolSize ; i ++ ) { if ( types [ i ] == DOUBLE ) { double d = readDouble ( i ) ; if ( d == value || ( isNan && Double . isNaN ( d ) ) ) return true ; } } return false ;
public class AWSShieldClient { /** * Updates the details of the list of email addresses that the DRT can use to contact you during a suspected attack . * @ param updateEmergencyContactSettingsRequest * @ return Result of the UpdateEmergencyContactSettings operation returned by the service . * @ throws InternalErrorException * Exception that indicates that a problem occurred with the service infrastructure . You can retry the * request . * @ throws InvalidParameterException * Exception that indicates that the parameters passed to the API are invalid . * @ throws OptimisticLockException * Exception that indicates that the protection state has been modified by another client . You can retry the * request . * @ throws ResourceNotFoundException * Exception indicating the specified resource does not exist . * @ sample AWSShield . UpdateEmergencyContactSettings * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / shield - 2016-06-02 / UpdateEmergencyContactSettings " * target = " _ top " > AWS API Documentation < / a > */ @ Override public UpdateEmergencyContactSettingsResult updateEmergencyContactSettings ( UpdateEmergencyContactSettingsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUpdateEmergencyContactSettings ( request ) ;
public class SqlHelper { /** * where主键条件 * @ param entityClass * @ param entityName * @ param useVersion * @ return */ public static String wherePKColumns ( Class < ? > entityClass , String entityName , boolean useVersion ) { } }
StringBuilder sql = new StringBuilder ( ) ; boolean hasLogicDelete = hasLogicDeleteColumn ( entityClass ) ; sql . append ( "<where>" ) ; // 获取全部列 Set < EntityColumn > columnSet = EntityHelper . getPKColumns ( entityClass ) ; // 当某个列有主键策略时 , 不需要考虑他的属性是否为空 , 因为如果为空 , 一定会根据主键策略给他生成一个值 for ( EntityColumn column : columnSet ) { sql . append ( " AND " ) . append ( column . getColumnEqualsHolder ( entityName ) ) ; } if ( useVersion ) { sql . append ( whereVersion ( entityClass ) ) ; } if ( hasLogicDelete ) { sql . append ( whereLogicDelete ( entityClass , false ) ) ; } sql . append ( "</where>" ) ; return sql . toString ( ) ;
public class Component { /** * Adds the given key and value to this component ' s own parameter . * If the provided key is < tt > null < / tt > nothing happens . If the provided value is < tt > null < / tt > * any existing parameter with the given key name is removed . * @ param key * the key of the new parameter to add . * @ param value * the value assoicated with the key . */ public void addParameter ( String key , Object value ) { } }
if ( key != null ) { Map < String , Object > params = getParameters ( ) ; if ( value == null ) params . remove ( key ) ; else params . put ( key , value ) ; }
public class CmsADEManager { /** * Adds a wait handle for the next cache update to a formatter configuration . < p > * @ param online true if we want to add a wait handle to the online cache , else the offline cache * @ return the wait handle that has been added */ public CmsWaitHandle addFormatterCacheWaitHandle ( boolean online ) { } }
CmsWaitHandle handle = new CmsWaitHandle ( true ) ; // single use wait handle CmsFormatterConfigurationCache cache = online ? m_onlineFormatterCache : m_offlineFormatterCache ; cache . addWaitHandle ( handle ) ; return handle ;
public class SimonCallbacks { /** * Search callback by type in list of callbacks * @ param callbacks List of callback * @ param callbackType Callback type * @ return Callback matching type */ private static < T extends Callback > T getCallbackByType ( Iterable < Callback > callbacks , Class < T > callbackType ) { } }
T foundCallback = null ; Iterator < Callback > callbackIterator = callbacks . iterator ( ) ; while ( foundCallback == null && callbackIterator . hasNext ( ) ) { Callback callback = callbackIterator . next ( ) ; // Remove callback wrappers while ( ( callback instanceof Delegating ) && ( ! callbackType . isInstance ( callback ) ) ) { callback = ( ( Delegating < Callback > ) callback ) . getDelegate ( ) ; } if ( callbackType . isInstance ( callback ) ) { // Callback found foundCallback = callbackType . cast ( callback ) ; } else if ( callback instanceof CompositeCallback ) { // Visit the composite callback foundCallback = getCallbackByType ( ( ( CompositeCallback ) callback ) . callbacks ( ) , callbackType ) ; } } return foundCallback ;
public class AmazonCloudFrontClient { /** * Get the field - level encryption configuration information . * @ param getFieldLevelEncryptionConfigRequest * @ return Result of the GetFieldLevelEncryptionConfig operation returned by the service . * @ throws AccessDeniedException * Access denied . * @ throws NoSuchFieldLevelEncryptionConfigException * The specified configuration for field - level encryption doesn ' t exist . * @ sample AmazonCloudFront . GetFieldLevelEncryptionConfig * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cloudfront - 2018-11-05 / GetFieldLevelEncryptionConfig " * target = " _ top " > AWS API Documentation < / a > */ @ Override public GetFieldLevelEncryptionConfigResult getFieldLevelEncryptionConfig ( GetFieldLevelEncryptionConfigRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetFieldLevelEncryptionConfig ( request ) ;
public class HThriftClient { /** * { @ inheritDoc } */ public boolean isOpen ( ) { } }
boolean open = false ; if ( transport != null ) { open = transport . isOpen ( ) ; } if ( log . isTraceEnabled ( ) ) { log . trace ( "Transport open status {} for client {}" , open , this ) ; } return open ;