signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Binding { /** * Binding for an exported value that is not a module namespace object . */ static Binding from ( Export boundExport , Node sourceNode ) { } }
return new AutoValue_Binding ( boundExport . moduleMetadata ( ) , sourceNode , boundExport , /* isModuleNamespace = */ false , /* closureNamespace = */ null , CreatedBy . EXPORT ) ;
public class SystemEvent { /** * Get data as Map . * @ return map with data */ @ JsonIgnore @ SuppressWarnings ( "unchecked" ) public Map < String , Object > getDataMap ( ) { } }
if ( data instanceof Map ) { return ( Map < String , Object > ) data ; } return Collections . emptyMap ( ) ;
public class SqlInfoBuilder { /** * 构建 " IS NULL " 和 " IS NOT NULL " 需要的SqlInfo信息 . * @ param fieldText 数据库字段的文本 * @ return SqlInfo信息 */ public SqlInfo buildIsNullSql ( String fieldText ) { } }
this . suffix = StringHelper . isBlank ( this . suffix ) ? ZealotConst . IS_NULL_SUFFIX : this . suffix ; join . append ( prefix ) . append ( fieldText ) . append ( this . suffix ) ; return sqlInfo . setJoin ( join ) ;
public class RScriptExecutor { /** * Retrieve and return R script STDOUT response using OpenCPU * @ param openCpuSessionKey OpenCPU session key * @ return R script STDOUT * @ throws IOException if error occured during script response retrieval */ private String executeScriptGetValueRequest ( String openCpuSessionKey ) throws IOException { } }
URI scriptGetValueResponseUri = getScriptGetValueResponseUri ( openCpuSessionKey ) ; HttpGet httpGet = new HttpGet ( scriptGetValueResponseUri ) ; String responseValue ; try ( CloseableHttpResponse response = httpClient . execute ( httpGet ) ) { int statusCode = response . getStatusLine ( ) . getStatusCode ( ) ; if ( statusCode >= 200 && statusCode < 300 ) { HttpEntity entity = response . getEntity ( ) ; responseValue = EntityUtils . toString ( entity ) ; EntityUtils . consume ( entity ) ; } else { throw new ClientProtocolException ( format ( FORMAT_UNEXPECTED_RESPONSE_STATUS , statusCode ) ) ; } } return responseValue ;
public class ProductBiddingCategoryData { /** * Gets the parentDimensionValue value for this ProductBiddingCategoryData . * @ return parentDimensionValue * The dimension value that corresponds to parent category . */ public com . google . api . ads . adwords . axis . v201809 . cm . ProductBiddingCategory getParentDimensionValue ( ) { } }
return parentDimensionValue ;
public class BeanMappingObjectBuilder { /** * 指定对应的key , 用于script中属性获取 */ public BeanMappingObjectBuilder keys ( String srcKey , String targetKey ) { } }
object . setSrcKey ( srcKey ) ; object . setTargetKey ( targetKey ) ; return this ;
public class MediaQueryTools { /** * Get the CSS wrapped in the specified media query . Note : all existing rule * objects are reused , so modifying them also modifies the original CSS ! * @ param aCSS * The CSS to be wrapped . May not be < code > null < / code > . * @ param aMediaQuery * The media query to use . * @ param bAllowNestedMediaQueries * if < code > true < / code > nested media queries are allowed , * < code > false < / code > if they are prohibited . * @ return < code > null < / code > if out CSS cannot be wrapped , the newly created * { @ link CascadingStyleSheet } object otherwise . */ @ Nullable public static CascadingStyleSheet getWrappedInMediaQuery ( @ Nonnull final CascadingStyleSheet aCSS , @ Nonnull final CSSMediaQuery aMediaQuery , final boolean bAllowNestedMediaQueries ) { } }
return getWrappedInMediaQuery ( aCSS , new CommonsArrayList < > ( aMediaQuery ) , bAllowNestedMediaQueries ) ;
public class CmsResultsBackwardsScrollHandler { /** * Updates the handler with a new search bean . < p > * @ param searchBean the search bean */ public void updateSearchBean ( CmsGallerySearchBean searchBean ) { } }
m_searchBean = searchBean ; if ( searchBean != null ) { m_pageSize = searchBean . getMatchesPerPage ( ) ; int lastPage = searchBean . getLastPage ( ) ; // we don ' t just store the search bean because it gets reused for multiple searches // and so the result list may change . m_resultBeans = searchBean . getResults ( ) ; if ( lastPage != - 1 ) { loadPage ( lastPage ) ; m_firstShownPage = lastPage ; if ( lastPage > 1 ) { loadPage ( lastPage - 1 ) ; m_firstShownPage = lastPage - 1 ; } } } else { m_resultBeans = null ; }
public class CommonsArchiver { /** * Creates a new { @ link ArchiveEntry } in the given { @ link ArchiveOutputStream } , and copies the given { @ link File } * into the new entry . * @ param file the file to add to the archive * @ param entryName the name of the archive entry * @ param archive the archive to write to * @ throws IOException when an I / O error occurs during FileInputStream creation or during copying */ protected void createArchiveEntry ( File file , String entryName , ArchiveOutputStream archive ) throws IOException { } }
ArchiveEntry entry = archive . createArchiveEntry ( file , entryName ) ; // TODO # 23 : read permission from file , write it to the ArchiveEntry archive . putArchiveEntry ( entry ) ; if ( ! entry . isDirectory ( ) ) { FileInputStream input = null ; try { input = new FileInputStream ( file ) ; IOUtils . copy ( input , archive ) ; } finally { IOUtils . closeQuietly ( input ) ; } } archive . closeArchiveEntry ( ) ;
class RecursiveGCD { /** * This function finds the greatest common divisor ( gcd ) of two numbers using a recursive approach . * > > > recursive _ gcd ( 12 , 14) * > > > recursive _ gcd ( 13 , 17) * > > > recursive _ gcd ( 9 , 3) */ public static int recursiveGCD ( int x , int y ) { } }
int smaller = Math . min ( x , y ) ; int larger = Math . max ( x , y ) ; if ( smaller == 0 ) { return larger ; } else if ( smaller == 1 ) { return 1 ; } else { return recursiveGCD ( smaller , larger % smaller ) ; }
public class ns_save_config { /** * < pre > * Performs generic data validation for the operation to be performed * < / pre > */ protected void validate ( String operationType ) throws Exception { } }
super . validate ( operationType ) ; MPSIPAddress ns_ip_address_arr_validator = new MPSIPAddress ( ) ; ns_ip_address_arr_validator . setConstraintIsReq ( MPSConstants . ADD_CONSTRAINT , true ) ; if ( ns_ip_address_arr != null ) { for ( int i = 0 ; i < ns_ip_address_arr . length ; i ++ ) { ns_ip_address_arr_validator . validate ( operationType , ns_ip_address_arr [ i ] , "ns_ip_address_arr[" + i + "]" ) ; } }
public class HelpDoclet { /** * Ensure that { @ link # settingsDir } exists and is a directory . * Throws a { @ link RuntimeException } if { @ link # settingsDir } is invalid . */ private void validateSettingsDir ( ) { } }
if ( ! settingsDir . exists ( ) ) { throw new RuntimeException ( SETTINGS_DIR_OPTION + " : " + settingsDir . getPath ( ) + " does not exist!" ) ; } else if ( ! settingsDir . isDirectory ( ) ) { throw new RuntimeException ( SETTINGS_DIR_OPTION + " : " + settingsDir . getPath ( ) + " is not a directory!" ) ; }
public class AsynchronousRequest { /** * For more info on Recipes search API go < a href = " https : / / wiki . guildwars2 . com / wiki / API : 2 / recipes / search " > here < / a > < br / > * Give user the access to { @ link Callback # onResponse ( Call , Response ) } and { @ link Callback # onFailure ( Call , Throwable ) } methods for custom interactions * @ param callback callback that is going to be used for { @ link Call # enqueue ( Callback ) } * @ throws NullPointerException if given { @ link Callback } is empty * @ see Recipe recipe info */ public void searchRecipes ( boolean isInput , int id , Callback < List < Integer > > callback ) throws NullPointerException { } }
if ( isInput ) gw2API . searchInputRecipes ( Integer . toString ( id ) ) . enqueue ( callback ) ; else gw2API . searchOutputRecipes ( Integer . toString ( id ) ) . enqueue ( callback ) ;
public class Distance { /** * Gets the Bray Curtis distance between two points . * @ param x1 X1 axis coordinate . * @ param y1 Y1 axis coordinate . * @ param x2 X2 axis coordinate . * @ param y2 X2 axis coordinate . * @ return The Bray Curtis distance between x and y . */ public static double BrayCurtis ( double x1 , double y1 , double x2 , double y2 ) { } }
double sumN = Math . abs ( x1 - x2 ) + Math . abs ( y1 - y2 ) ; double sumP = Math . abs ( x1 + x2 ) + Math . abs ( y1 + y2 ) ; return sumN / sumP ;
public class DBPropertiesUpdate { /** * Update a Default . * @ param _ inst OID of the value to update * @ param _ value value */ private void updateDefault ( final Instance _inst , final String _value ) { } }
try { final Update update = new Update ( _inst ) ; update . add ( "Default" , _value ) ; update . execute ( ) ; } catch ( final EFapsException e ) { DBPropertiesUpdate . LOG . error ( "updateDefault(String, String)" , e ) ; }
public class CloneCommand { /** * Replaces the content contained within the Git repository branch pointed to by the { @ link GitService } object with the source . * @ param source The directory to commit the content from . * @ param service The { @ link GitService } pointing to the repository and branch to commit to . * @ param comment The commit message . * @ return The new revision . * @ throws Exception */ public static String cloneContent ( String source , GitService service , String comment ) throws Exception { } }
String rev = GitService . moveContentToBranch ( source , service , service . getBranchName ( ) , comment ) ; service . push ( false ) ; return rev ;
public class CmsSearch { /** * Sets the parsed query , which will be parameter decoded first . < p > * The parsed query is automatically set by the OpenCms search index when a query is created * with either { @ link # setQuery ( String ) } or { @ link # addFieldQuery ( CmsSearchFieldQuery ) } . * The Lucene query build from the parameters is stored here and can be later used * for paging through the results . < p > * Please note that this applies only to the query part , not the filter part of the search . < p > * @ param parsedQuery the parsed query to set */ public void setParsedQuery ( String parsedQuery ) { } }
try { m_parsedQuerySet = true ; m_parameters . setParsedQuery ( CmsEncoder . decodeParameter ( parsedQuery ) ) ; } catch ( CmsIllegalArgumentException iae ) { m_lastException = iae ; }
public class Caffeine { /** * Enables the accumulation of { @ link CacheStats } during the operation of the cache . Without this * { @ link Cache # stats } will return zero for all statistics . Note that recording statistics * requires bookkeeping to be performed with each operation , and thus imposes a performance * penalty on cache operation . * @ return this { @ code Caffeine } instance ( for chaining ) */ @ NonNull public Caffeine < K , V > recordStats ( ) { } }
requireState ( this . statsCounterSupplier == null , "Statistics recording was already set" ) ; statsCounterSupplier = ENABLED_STATS_COUNTER_SUPPLIER ; return this ;
public class vlan_nsip6_binding { /** * Use this API to fetch vlan _ nsip6 _ binding resources of given name . */ public static vlan_nsip6_binding [ ] get ( nitro_service service , Long id ) throws Exception { } }
vlan_nsip6_binding obj = new vlan_nsip6_binding ( ) ; obj . set_id ( id ) ; vlan_nsip6_binding response [ ] = ( vlan_nsip6_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class AbstractDataEditorWidget { /** * Save the changes made in the detailForm according to following steps : * < ol > * < li > commit form < / li > * < li > formObject sent to back - end < / li > * < li > changes are handled in back - end < / li > * < li > changed object is returned to client < / li > * < li > old object is replaced by changed object < / li > * < / ol > */ protected void doUpdate ( ) { } }
getDetailForm ( ) . commit ( ) ; Object savedObject = null ; try { savedObject = saveEntity ( getDetailForm ( ) . getFormObject ( ) ) ; setDetailFormObject ( savedObject , tableSelectionObserver , false ) ; } catch ( RuntimeException e ) { Object changedObject = getDetailForm ( ) . getFormObject ( ) ; // the following actually requests the object from the back - end boolean success = setDetailFormObject ( changedObject , tableSelectionObserver , true ) ; // set the changes back on the model if the object could be set on the form model if ( success ) ObjectUtils . mapObjectOnFormModel ( getDetailForm ( ) . getFormModel ( ) , changedObject ) ; throw e ; }
public class TableListener { /** * this adds the text to the jtable * @ param text */ private void addContents ( String text ) { } }
int firstColSelected = table . getSelectedColumn ( ) ; int firstRowSelected = table . getSelectedRow ( ) ; int temp = firstColSelected ; if ( firstColSelected == - 1 || firstRowSelected == - 1 ) { return ; } ArrayList < ArrayList < String > > clipboard = parseString ( text ) ; for ( int i = 0 ; i < clipboard . size ( ) ; i ++ ) { for ( int j = 0 ; j < clipboard . get ( i ) . size ( ) ; j ++ ) { try { table . getModel ( ) . setValueAt ( clipboard . get ( i ) . get ( j ) , firstRowSelected , temp ++ ) ; } catch ( Exception e ) { } } temp = firstColSelected ; firstRowSelected ++ ; }
public class EsAbstractBehavior { /** * to suppress xacceptUpdateColumnModifiedPropertiesIfNeeds ( ) ' s specify process */ @ Override protected UpdateOption < CB > createPlainUpdateOption ( ) { } }
UpdateOption < CB > updateOption = new UpdateOption < CB > ( ) ; updateOption . xtoBeCompatibleBatchUpdateDefaultEveryColumn ( ) ; return updateOption ;
public class LocalFileSink { /** * Write all messages in msgList to file writer , sync the file , * commit the queue and clear messages * @ param msgList * @ throws java . io . IOException */ @ Override protected void write ( List < Message > msgList ) throws IOException { } }
for ( Message msg : msgList ) { writer . writeTo ( msg ) ; String routingKey = normalizeRoutingKey ( msg ) ; DynamicCounter . increment ( MonitorConfig . builder ( "writtenMessages" ) . withTag ( TagKey . DATA_SOURCE , routingKey ) . build ( ) ) ; ++ writtenMessages ; DynamicCounter . increment ( MonitorConfig . builder ( "writtenBytes" ) . withTag ( TagKey . DATA_SOURCE , routingKey ) . build ( ) , msg . getPayload ( ) . length ) ; writtenBytes += msg . getPayload ( ) . length ; messageWrittenInRotation = true ; } writer . sync ( ) ; throughput . increment ( msgList . size ( ) ) ;
import java . util . ArrayList ; import java . util . Collections ; import java . util . List ; import java . util . PriorityQueue ; public class SortedMerge { /** * This function takes multiple sorted lists as input and returns a single merged * sorted list . The merge operation makes use of heap queue algorithm for efficiency . * Example usage : * > sortedMerge ( Arrays . asList ( Arrays . asList ( 18 , 14 , 10 , 9 , 8 , 7 , 9 , 3 , 2 , 4 , 1 ) , Arrays . asList ( 25 , 35 , 22 , 85 , 14 , 65 , 75 , 25 , 58 ) , Arrays . asList ( 12 , 74 , 9 , 50 , 61 , 41 ) ) ) * [ 1 , 2 , 3 , 4 , 7 , 8 , 9 , 9 , 9 , 10 , 12 , 14 , 14 , 18 , 22 , 25 , 25 , 35 , 41 , 50 , 58 , 61 , 65 , 74 , 75 , 85] */ public static List < Integer > sortedMerge ( List < List < Integer > > args ) { } }
// Sort the input lists for ( List < Integer > lst : args ) { Collections . sort ( lst ) ; } PriorityQueue < Integer > merged = new PriorityQueue < > ( ) ; for ( List < Integer > lst : args ) { merged . addAll ( lst ) ; } List < Integer > result = new ArrayList < > ( ) ; while ( ! merged . isEmpty ( ) ) { result . add ( merged . poll ( ) ) ; } return result ;
public class DeleteVpcEndpointServiceConfigurationsRequest { /** * The IDs of one or more services . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setServiceIds ( java . util . Collection ) } or { @ link # withServiceIds ( java . util . Collection ) } if you want to * override the existing values . * @ param serviceIds * The IDs of one or more services . * @ return Returns a reference to this object so that method calls can be chained together . */ public DeleteVpcEndpointServiceConfigurationsRequest withServiceIds ( String ... serviceIds ) { } }
if ( this . serviceIds == null ) { setServiceIds ( new com . amazonaws . internal . SdkInternalList < String > ( serviceIds . length ) ) ; } for ( String ele : serviceIds ) { this . serviceIds . add ( ele ) ; } return this ;
public class GraphVar { /** * Remove node x from the domain * Removes x from the upper bound graph * @ param x node ' s index * @ param cause algorithm which is related to the removal * @ return true iff the removal has an effect */ public boolean removeNode ( int x , ICause cause ) throws ContradictionException { } }
assert cause != null ; assert ( x >= 0 && x < n ) ; if ( LB . getNodes ( ) . contains ( x ) ) { this . contradiction ( cause , "remove mandatory node" ) ; return true ; } else if ( ! UB . getNodes ( ) . contains ( x ) ) { return false ; } ISet nei = UB . getSuccOrNeighOf ( x ) ; for ( int i : nei ) { removeArc ( x , i , cause ) ; } nei = UB . getPredOrNeighOf ( x ) ; for ( int i : nei ) { removeArc ( i , x , cause ) ; } if ( UB . removeNode ( x ) ) { if ( reactOnModification ) { delta . add ( x , GraphDelta . NR , cause ) ; } GraphEventType e = GraphEventType . REMOVE_NODE ; notifyPropagators ( e , cause ) ; return true ; } return false ;
public class MultimapSubject { /** * Returns a context - aware { @ link Subject } for making assertions about the values for the given * key within the { @ link Multimap } . * < p > This method performs no checks on its own and cannot cause test failures . Subsequent * assertions must be chained onto this method call to test properties of the { @ link Multimap } . */ @ SuppressWarnings ( "unchecked" ) // safe because we only read , not write public IterableSubject valuesForKey ( @ NullableDecl Object key ) { } }
return check ( "valuesForKey(%s)" , key ) . that ( ( ( Multimap < Object , Object > ) actual ( ) ) . get ( key ) ) ;
public class TypeParameterBuilderImpl { /** * Initialize the type parameter . * < p > Caution : This initialization function does not add the type parameter in its container . * The container is responsible of adding the type parameter in its internal object . * @ param name the name of the type parameter . * @ param typeContext the provider of types or null . */ public void eInit ( EObject context , String name , IJvmTypeProvider typeContext ) { } }
setTypeResolutionContext ( typeContext ) ; this . context = context ; this . parameter = this . jvmTypesFactory . createJvmTypeParameter ( ) ; this . parameter . setName ( name ) ;
public class HttpMessageSecurity { /** * Generates AES key . * @ return Random AES key or pre - defined value for test mode . */ private byte [ ] generateAesKey ( ) { } }
byte [ ] bytes = new byte [ 32 ] ; if ( ! testMode ) { SecureRandom random = new SecureRandom ( ) ; random . nextBytes ( bytes ) ; } else { bytes = "TEST1234TEST1234TEST1234TEST1234" . getBytes ( MESSAGE_ENCODING ) ; } return bytes ;
public class JspResourcesContainerImpl { /** * { @ inheritDoc } */ @ Override public boolean isOutdated ( ) { } }
File nullSourceFile = null ; Entry containerEntry = inputSource . getInputSourceEntry ( ) ; Entry webinfClassEntry = container . getEntry ( webinfClassRelativeUrl ) ; // If the inputSource is not a container entry if ( containerEntry == null ) { long lastModified = inputSource . getLastModified ( ) ; return ResourceUtil . isOutdated ( lastModified , null , nullSourceFile , generatedSourceFile , classFile , webinfClassEntry , null ) ; } return ResourceUtil . isOutdated ( containerEntry , nullSourceFile , generatedSourceFile , classFile , webinfClassEntry , null ) ;
public class DfPattern { /** * Create a pattern which can be used to find molecules which contain the * { @ code query } structure . If a ' real ' molecule is provided is is converted * with { @ link QueryAtomContainer # create ( IAtomContainer , Expr . Type . . . ) } * matching elements , aromaticity status , and bond orders . * @ param query the substructure to find * @ return a pattern for finding the { @ code query } * @ see QueryAtomContainer # create ( IAtomContainer , Expr . Type . . . ) */ public static DfPattern findSubstructure ( IAtomContainer query ) { } }
if ( query instanceof IQueryAtomContainer ) return new DfPattern ( ( IQueryAtomContainer ) query ) ; else return new DfPattern ( QueryAtomContainer . create ( query , ALIPHATIC_ELEMENT , AROMATIC_ELEMENT , SINGLE_OR_AROMATIC , ALIPHATIC_ORDER , STEREOCHEMISTRY ) ) ;
public class Iterators { /** * Returns the number of elements in the specified iterator that equal the * specified object . The iterator will be left exhausted : its * { @ code hasNext ( ) } method will return { @ code false } . * @ see Collections # frequency */ public static int frequency ( Iterator < ? > iterator , @ Nullable Object element ) { } }
return size ( filter ( iterator , equalTo ( element ) ) ) ;
public class ArrayUtils { /** * / * non - Javadoc */ @ NullSafe @ SuppressWarnings ( "unchecked" ) static < T > Class < ? > componentType ( T [ ] array ) { } }
return array != null ? array . getClass ( ) . getComponentType ( ) : Object . class ;
public class OutputRegistry { /** * Retrieve { @ link OutputType } for a { @ link CommandOutput } type . * @ param commandOutputClass * @ return */ static OutputType getOutputComponentType ( Class < ? extends CommandOutput > commandOutputClass ) { } }
ClassTypeInformation < ? extends CommandOutput > classTypeInformation = ClassTypeInformation . from ( commandOutputClass ) ; TypeInformation < ? > superTypeInformation = classTypeInformation . getSuperTypeInformation ( CommandOutput . class ) ; if ( superTypeInformation == null ) { return null ; } List < TypeInformation < ? > > typeArguments = superTypeInformation . getTypeArguments ( ) ; return new OutputType ( commandOutputClass , typeArguments . get ( 2 ) , false ) { @ Override public ResolvableType withCodec ( RedisCodec < ? , ? > codec ) { TypeInformation < ? > typeInformation = ClassTypeInformation . from ( codec . getClass ( ) ) ; ResolvableType resolvableType = ResolvableType . forType ( commandOutputClass , new CodecVariableTypeResolver ( typeInformation ) ) ; while ( ! resolvableType . getRawClass ( ) . equals ( CommandOutput . class ) ) { resolvableType = resolvableType . getSuperType ( ) ; } return resolvableType . getGeneric ( 2 ) ; } } ;
public class RowBuilder { /** * Creates a 16 - bit integer valued column * @ param name the column name */ public RowBuilder int16 ( String name ) { } }
ColumnInt16 column = new ColumnInt16 ( _columns . size ( ) , name , _offset ) ; _offset += column . length ( ) ; _columns . add ( column ) ; return this ;
public class AbstractPatternFilter { /** * Assigns the StringConverter used to convert T - type objects to Strings . * This StringConverter is used to acquire input comparison values for all Patterns to T - object candidates . * @ param converter The StringConverter used to convert T - type objects to Strings which should * be matched by all supplied Patterns to T - object candidates . */ public void setConverter ( final StringConverter < T > converter ) { } }
// Check sanity Validate . notNull ( converter , "converter" ) ; validateDiSetterCalledBeforeInitialization ( "converter" ) ; // Assign internal state this . converter = converter ;
public class BigtableDataClient { /** * Convenience method for synchronously reading a single row . If the row does not exist , the value * will be null . * < p > Sample code : * < pre > { code * try ( BigtableDataClient bigtableDataClient = BigtableDataClient . create ( " [ PROJECT ] " , " [ INSTANCE ] " ) ) { * String tableId = " [ TABLE ] " ; * Row row = bigtableDataClient . readRow ( tableId , ByteString . copyFromUtf8 ( " key " ) ) ; * / / Do something with row , for example , display all cells * if ( row ! = null ) { * System . out . println ( row . getKey ( ) . toStringUtf8 ( ) ) ; * for ( RowCell cell : row . getCells ( ) ) { * System . out . printf ( " Family : % s Qualifier : % s Value : % s " , cell . getFamily ( ) , * cell . getQualifier ( ) . toStringUtf8 ( ) , cell . getValue ( ) . toStringUtf8 ( ) ) ; * } catch ( ApiException e ) { * e . printStackTrace ( ) ; * } < / pre > * @ throws com . google . api . gax . rpc . ApiException when a serverside error occurs */ public Row readRow ( String tableId , ByteString rowKey ) { } }
return ApiExceptions . callAndTranslateApiException ( readRowAsync ( tableId , rowKey , null ) ) ;
public class SourceStream { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . interfaces . BatchListener # batchCommitted ( ) */ public void batchCommitted ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "batchCommitted" ) ; if ( batchSendList . size ( ) != 0 ) { try { sendMsgs ( batchSendList , false ) ; } catch ( SIResourceException e ) { // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.gd.SourceStream.batchCommitted" , "1:2590:1.138" , this ) ; SibTr . exception ( tc , e ) ; } } // Empty list ready for use by next batch batchList . clear ( ) ; batchSendList . clear ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "batchCommitted" ) ;
public class Wepay { /** * 初始化操作 * @ return this */ public Wepay init ( ) { } }
pays = new Pays ( this ) ; orders = new Orders ( this ) ; refunds = new Refunds ( this ) ; notifies = new Notifies ( this ) ; bills = new Bills ( this ) ; if ( certs != null && ! Strings . isNullOrEmpty ( certPasswd ) ) { sslSocketFactory = initSSLSocketFactory ( ) ; } return this ;
public class HtmlDocletWriter { /** * Retrieve the class link with the package portion of the label in * plain text . If the qualifier is excluded , it will not be included in the * link label . * @ param cd the class to link to . * @ param isStrong true if the link should be strong . * @ return the link with the package portion of the label in plain text . */ public Content getPreQualifiedClassLink ( LinkInfoImpl . Kind context , ClassDoc cd , boolean isStrong ) { } }
ContentBuilder classlink = new ContentBuilder ( ) ; PackageDoc pd = cd . containingPackage ( ) ; if ( pd != null && ! configuration . shouldExcludeQualifier ( pd . name ( ) ) ) { classlink . addContent ( getPkgName ( cd ) ) ; } classlink . addContent ( getLink ( new LinkInfoImpl ( configuration , context , cd ) . label ( cd . name ( ) ) . strong ( isStrong ) ) ) ; return classlink ;
public class StackTracePrinter { /** * Method prints the given stack trace in a human readable way . * @ param message the reason for printing the stack trace . * @ param stackTraces the stack trace to print . * @ param logger the logger used for printing . * @ param logLevel the log level used for logging the stack trace . */ public static void printStackTrace ( final String message , final StackTraceElement [ ] stackTraces , final Logger logger , final LogLevel logLevel ) { } }
String stackTraceString = "" ; for ( final StackTraceElement stackTrace : stackTraces ) { stackTraceString += stackTrace . toString ( ) + "\n" ; } Printer . print ( ( message == null ? "" : message ) + "\n=== Stacktrace ===\n" + stackTraceString + "==================" , logLevel , logger ) ;
public class AWSIotClient { /** * Creates a bulk thing provisioning task . * @ param startThingRegistrationTaskRequest * @ return Result of the StartThingRegistrationTask operation returned by the service . * @ throws InvalidRequestException * The request is not valid . * @ throws ThrottlingException * The rate exceeds the limit . * @ throws UnauthorizedException * You are not authorized to perform this operation . * @ throws InternalFailureException * An unexpected error has occurred . * @ sample AWSIot . StartThingRegistrationTask */ @ Override public StartThingRegistrationTaskResult startThingRegistrationTask ( StartThingRegistrationTaskRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeStartThingRegistrationTask ( request ) ;
public class DBInitializerHelper { /** * Returns SQL script for create objects such as index , primary of foreign key . */ public static String getObjectScript ( String objectName , boolean multiDb , String dialect , WorkspaceEntry wsEntry ) throws RepositoryConfigurationException , IOException { } }
String scripts = prepareScripts ( wsEntry , dialect ) ; String sql = null ; for ( String query : JDBCUtils . splitWithSQLDelimiter ( scripts ) ) { String q = JDBCUtils . cleanWhitespaces ( query ) ; if ( q . contains ( objectName ) ) { if ( sql != null ) { throw new RepositoryConfigurationException ( "Can't find unique script for object creation. Object name: " + objectName ) ; } sql = q ; } } if ( sql != null ) { return sql ; } throw new RepositoryConfigurationException ( "Script for object creation is not found. Object name: " + objectName ) ;
public class StratifiedSampling { /** * Calculates Standard Deviation for Xbar * @ param sampleDataCollection * @ param nh * @ param populationNh * @ return */ public static double xbarStd ( TransposeDataCollection sampleDataCollection , AssociativeArray nh , AssociativeArray populationNh ) { } }
return Math . sqrt ( xbarVariance ( sampleDataCollection , nh , populationNh ) ) ;
public class JAXBNamedResourceFactory { /** * Resolve this property reference to a deserialised JAXB value * @ return */ public T get ( ) { } }
T value = get ( null ) ; if ( value == null ) throw new RuntimeException ( "Missing property for JAXB resource: " + name ) ; else return value ;
public class TagVFilter { /** * Runs through the loaded plugin map and dumps the names , description and * examples into a map to serialize via the API . * @ return A map of filter meta data . */ public static Map < String , Map < String , String > > loadedFilters ( ) { } }
final Map < String , Map < String , String > > filters = new HashMap < String , Map < String , String > > ( tagv_filter_map . size ( ) ) ; for ( final Pair < Class < ? > , Constructor < ? extends TagVFilter > > pair : tagv_filter_map . values ( ) ) { final Map < String , String > filter_meta = new HashMap < String , String > ( 1 ) ; try { Method method = pair . getKey ( ) . getDeclaredMethod ( "description" ) ; filter_meta . put ( "description" , ( String ) method . invoke ( null ) ) ; method = pair . getKey ( ) . getDeclaredMethod ( "examples" ) ; filter_meta . put ( "examples" , ( String ) method . invoke ( null ) ) ; final Field filter_name = pair . getKey ( ) . getDeclaredField ( "FILTER_NAME" ) ; filters . put ( ( String ) filter_name . get ( null ) , filter_meta ) ; } catch ( SecurityException e ) { throw new RuntimeException ( "Unexpected security exception" , e ) ; } catch ( NoSuchMethodException e ) { LOG . error ( "Filter plugin " + pair . getClass ( ) . getCanonicalName ( ) + " did not implement one of the \"description\" or \"examples\" methods" ) ; } catch ( NoSuchFieldException e ) { LOG . error ( "Filter plugin " + pair . getClass ( ) . getCanonicalName ( ) + " did not have the \"FILTER_NAME\" field" ) ; } catch ( IllegalArgumentException e ) { throw new RuntimeException ( "Unexpected exception" , e ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( "Unexpected security exception" , e ) ; } catch ( InvocationTargetException e ) { throw new RuntimeException ( "Unexpected security exception" , e ) ; } } return filters ;
public class TypeMappings { /** * Get the input unique group ID mapping for the UserRegistry . * @ param inputVirtualRealm Virtual realm to find the mappings . * @ return The input unique group ID property . * @ pre inputVirtualRealm ! = null * @ pre inputVirtualRealm ! = " " * @ post $ return ! = " " * @ post $ return ! = null */ public String getInputUniqueGroupId ( String inputVirtualRealm ) { } }
// initialize the return value String returnValue = getInputMapping ( inputVirtualRealm , Service . CONFIG_DO_UNIQUE_GROUP_ID_MAPPING , INPUT_UNIQUE_GROUP_ID_DEFAULT ) ; return returnValue ;
public class AWSOpsWorksClient { /** * Requests a description of a specified set of apps . * < note > * This call accepts only one resource - identifying parameter . * < / note > * < b > Required Permissions < / b > : To use this action , an IAM user must have a Show , Deploy , or Manage permissions * level for the stack , or an attached policy that explicitly grants permissions . For more information about user * permissions , see < a * href = " http : / / docs . aws . amazon . com / opsworks / latest / userguide / opsworks - security - users . html " > Managing User * Permissions < / a > . * @ param describeAppsRequest * @ return Result of the DescribeApps operation returned by the service . * @ throws ValidationException * Indicates that a request was not valid . * @ throws ResourceNotFoundException * Indicates that a resource was not found . * @ sample AWSOpsWorks . DescribeApps * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / opsworks - 2013-02-18 / DescribeApps " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DescribeAppsResult describeApps ( DescribeAppsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeApps ( request ) ;
public class Jpa2AttackStore { /** * { @ inheritDoc } */ @ Override public Collection < Attack > findAttacks ( SearchCriteria criteria ) { } }
Collection < Attack > attacksAllTimestamps = attackRepository . find ( criteria ) ; // timestamp stored as string not queryable in DB , all timestamps come back , still need to filter this subset return findAttacks ( criteria , attacksAllTimestamps ) ;
public class CrawlConfig { /** * Validates the configs specified by this instance . * @ throws Exception on Validation fail */ public void validate ( ) throws Exception { } }
if ( crawlStorageFolder == null ) { throw new Exception ( "Crawl storage folder is not set in the CrawlConfig." ) ; } if ( politenessDelay < 0 ) { throw new Exception ( "Invalid value for politeness delay: " + politenessDelay ) ; } if ( maxDepthOfCrawling < - 1 ) { throw new Exception ( "Maximum crawl depth should be either a positive number or -1 for unlimited depth" + "." ) ; } if ( maxDepthOfCrawling > Short . MAX_VALUE ) { throw new Exception ( "Maximum value for crawl depth is " + Short . MAX_VALUE ) ; }
public class CSSOMParser { /** * Parses a input string into a CSSOM style declaration . * @ param styleDecl the input string * @ param sd the CSSOM style declaration * @ throws IOException if the underlying SAC parser throws an IOException */ public void parseStyleDeclaration ( final CSSStyleDeclarationImpl sd , final String styleDecl ) throws IOException { } }
try ( InputSource source = new InputSource ( new StringReader ( styleDecl ) ) ) { final Stack < Object > nodeStack = new Stack < > ( ) ; nodeStack . push ( sd ) ; final CSSOMHandler handler = new CSSOMHandler ( nodeStack ) ; parser_ . setDocumentHandler ( handler ) ; parser_ . parseStyleDeclaration ( source ) ; }
public class AssociateMemberToGroupRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( AssociateMemberToGroupRequest associateMemberToGroupRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( associateMemberToGroupRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( associateMemberToGroupRequest . getOrganizationId ( ) , ORGANIZATIONID_BINDING ) ; protocolMarshaller . marshall ( associateMemberToGroupRequest . getGroupId ( ) , GROUPID_BINDING ) ; protocolMarshaller . marshall ( associateMemberToGroupRequest . getMemberId ( ) , MEMBERID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ClassEnvy { /** * overrides the visitor to look for method calls , and populate a class access count map based on the owning class of methods called . * @ param seen * the opcode currently being parsed */ @ Override public void sawOpcode ( final int seen ) { } }
try { stack . precomputation ( this ) ; if ( OpcodeUtils . isStandardInvoke ( seen ) ) { String calledClass = getDottedClassConstantOperand ( ) ; if ( seen == Const . INVOKEINTERFACE ) { int parmCount = SignatureUtils . getNumParameters ( this . getSigConstantOperand ( ) ) ; if ( ! countClassAccess ( parmCount ) ) { countClassAccess ( calledClass ) ; } } else { countClassAccess ( calledClass ) ; } } else if ( seen == Const . PUTFIELD ) { countClassAccess ( 1 ) ; } else if ( seen == Const . GETFIELD ) { countClassAccess ( 0 ) ; } else if ( ( seen == Const . PUTSTATIC ) || ( seen == Const . GETSTATIC ) ) { countClassAccess ( getDottedClassConstantOperand ( ) ) ; } else if ( ( seen == Const . ALOAD_0 ) && ( ! methodIsStatic ) ) { countClassAccess ( clsName ) ; } } finally { stack . sawOpcode ( this , seen ) ; }
public class PharmacophoreUtils { /** * Write out one or more pharmacophore queries in the CDK XML format . * @ param queries The pharmacophore queries * @ param out The OutputStream to write to * @ throws IOException if there is a problem writing the XML document */ public static void writePharmacophoreDefinition ( PharmacophoreQuery [ ] queries , OutputStream out ) throws IOException { } }
Element root = new Element ( "pharmacophoreContainer" ) ; root . addAttribute ( new Attribute ( "version" , "1.0" ) ) ; for ( PharmacophoreQuery query : queries ) { Element pcore = new Element ( "pharmacophore" ) ; Object description = query . getProperty ( "description" ) ; if ( description != null ) pcore . addAttribute ( new Attribute ( "description" , ( String ) description ) ) ; Object name = query . getTitle ( ) ; if ( name != null ) pcore . addAttribute ( new Attribute ( "name" , ( String ) name ) ) ; // we add the pcore groups for this query as local to the group for ( IAtom atom : query . atoms ( ) ) { Element group = new Element ( "group" ) ; group . addAttribute ( new Attribute ( "id" , atom . getSymbol ( ) ) ) ; group . appendChild ( ( ( PharmacophoreQueryAtom ) atom ) . getSmarts ( ) ) ; pcore . appendChild ( group ) ; } // now add the constraints for ( IBond bond : query . bonds ( ) ) { Element elem = null ; if ( bond instanceof PharmacophoreQueryBond ) { PharmacophoreQueryBond dbond = ( PharmacophoreQueryBond ) bond ; elem = new Element ( "distanceConstraint" ) ; elem . addAttribute ( new Attribute ( "lower" , String . valueOf ( dbond . getLower ( ) ) ) ) ; elem . addAttribute ( new Attribute ( "upper" , String . valueOf ( dbond . getUpper ( ) ) ) ) ; elem . addAttribute ( new Attribute ( "units" , "A" ) ) ; } else if ( bond instanceof PharmacophoreQueryAngleBond ) { PharmacophoreQueryAngleBond dbond = ( PharmacophoreQueryAngleBond ) bond ; elem = new Element ( "angleConstraint" ) ; elem . addAttribute ( new Attribute ( "lower" , String . valueOf ( dbond . getLower ( ) ) ) ) ; elem . addAttribute ( new Attribute ( "upper" , String . valueOf ( dbond . getUpper ( ) ) ) ) ; elem . addAttribute ( new Attribute ( "units" , "degrees" ) ) ; } // now add the group associated with this constraint for ( IAtom iAtom : bond . atoms ( ) ) { PharmacophoreQueryAtom atom = ( PharmacophoreQueryAtom ) iAtom ; Element gelem = new Element ( "groupRef" ) ; gelem . addAttribute ( new Attribute ( "id" , atom . getSymbol ( ) ) ) ; if ( elem != null ) { elem . appendChild ( gelem ) ; } } pcore . appendChild ( elem ) ; } root . appendChild ( pcore ) ; } Document doc = new Document ( root ) ; Serializer serializer = new Serializer ( out , "ISO-8859-1" ) ; serializer . setIndent ( 4 ) ; serializer . setMaxLength ( 128 ) ; serializer . write ( doc ) ;
public class CmsLockReportDialog { /** * Unlocks the resource and all descending resources . < p > */ protected void unlock ( ) { } }
CmsRpcAction < Void > action = new CmsRpcAction < Void > ( ) { @ Override public void execute ( ) { CmsCoreProvider . getVfsService ( ) . forceUnlock ( getStructureId ( ) , this ) ; } @ Override public void onFailure ( Throwable t ) { hide ( ) ; super . onFailure ( t ) ; } @ Override protected void onResponse ( Void result ) { onUnlock ( ) ; } } ; m_closeButton . disable ( Messages . get ( ) . key ( Messages . GUI_LOADING_0 ) ) ; m_unlockButton . disable ( Messages . get ( ) . key ( Messages . GUI_LOADING_0 ) ) ; action . execute ( ) ;
public class Util { /** * Given a query result from a SPARQL query , obtain the given variable value * as a URL * @ param resultRow the result from a SPARQL query * @ param variableName the name of the variable to obtain * @ return the value or null if it could not be obtained */ private static URL getUrlValueFromRow ( QuerySolution resultRow , String variableName ) { } }
// Check the input and exit immediately if null if ( resultRow == null ) { return null ; } URL result = null ; Resource res = resultRow . getResource ( variableName ) ; // Ignore and track services that are blank nodes if ( res . isAnon ( ) ) { log . warn ( "Blank node found and ignored " + res . toString ( ) ) ; } else if ( res . isURIResource ( ) ) { try { result = new URL ( res . getURI ( ) ) ; } catch ( MalformedURLException e ) { log . error ( "Malformed URL for node" , e ) ; } catch ( ClassCastException e ) { log . error ( "The node is not a URI" , e ) ; } } return result ;
public class BundleAdjustmentMetricSchurJacobian { /** * Internal matrix type agnostic process function . * @ param input Input parameters describing the current state of the optimization * @ param leftPoint Storage for left Jacobian * @ param rightView Storage for right Jacobian */ public void internalProcess ( double [ ] input , DMatrix leftPoint , DMatrix rightView ) { } }
int numRows = getNumOfOutputsM ( ) ; // number of parameters on left . All points int numPointParam = structure . points . length * lengthPoint + numRigidUnknown * lengthSE3 ; // Number of paramters on right . views + camera int numViewParam = numParameters - numPointParam ; // view + camera ( ( ReshapeMatrix ) leftPoint ) . reshape ( numRows , numPointParam ) ; ( ( ReshapeMatrix ) rightView ) . reshape ( numRows , numViewParam ) ; leftPoint . zero ( ) ; rightView . zero ( ) ; // parse parameters for rigid bodies . the translation + rotation is the same for all views for ( int rigidIndex = 0 ; rigidIndex < structure . rigids . length ; rigidIndex ++ ) { if ( ! structure . rigids [ rigidIndex ] . known ) { jacRigidS03 [ rigidIndex ] . setParameters ( input , indexFirstRigid + rigidParameterIndexes [ rigidIndex ] ) ; } } int observationIndex = 0 ; // first decode the transformation for ( int viewIndex = 0 ; viewIndex < structure . views . length ; viewIndex ++ ) { SceneStructureMetric . View view = structure . views [ viewIndex ] ; SceneStructureMetric . Camera camera = structure . cameras [ view . camera ] ; if ( ! view . known ) { int paramIndex = viewParameterIndexes [ viewIndex ] + indexFirstView ; jacSO3 . setParameters ( input , paramIndex ) ; paramIndex += jacSO3 . getParameterLength ( ) ; worldToView . T . x = input [ paramIndex ] ; worldToView . T . y = input [ paramIndex + 1 ] ; worldToView . T . z = input [ paramIndex + 2 ] ; worldToView . getR ( ) . set ( jacSO3 . getRotationMatrix ( ) ) ; } else { worldToView . set ( view . worldToView ) ; } int cameraParamStartIndex = cameraParameterIndexes [ view . camera ] ; if ( ! camera . known ) { camera . model . setIntrinsic ( input , indexLastView + cameraParamStartIndex ) ; } observationIndex = computeGeneralPoints ( leftPoint , rightView , input , observationIndex , viewIndex , view , camera , cameraParamStartIndex ) ; if ( observations . viewsRigid != null ) observationIndex = computeRigidPoints ( leftPoint , rightView , observationIndex , viewIndex , view , camera , cameraParamStartIndex ) ; }
public class base_resource { /** * Use this method to perform a stat operation on netscaler resources . * @ param service nitro _ service object . * @ param option options class object . * @ return Array of nitro stat resources of specified type . * @ throws Exception Nitro exception is thrown . */ protected base_resource [ ] stat_resources ( nitro_service service , options option ) throws Exception { } }
if ( ! service . isLogin ( ) ) service . login ( ) ; base_resource [ ] response = stat_request ( service , option ) ; return response ;
public class StreamletImpl { /** * Return a new Streamlet accumulating tuples of this streamlet over a Window defined by * windowCfg and applying reduceFn on those tuples . For each window , the value identity is used * as a initial value . All the matching tuples are reduced using reduceFn starting from this * initial value . * @ param keyExtractor The function applied to a tuple of this streamlet to get the key * @ param windowCfg This is a specification of what kind of windowing strategy you like to have . * Typical windowing strategies are sliding windows and tumbling windows * @ param identity The identity element is both the initial value inside the reduction window * and the default result if there are no elements in the window * @ param reduceFn The reduce function takes two parameters : a partial result of the reduction * and the next element of the stream . It returns a new partial result . */ @ Override public < K , T > KVStreamlet < KeyedWindow < K > , T > reduceByKeyAndWindow ( SerializableFunction < R , K > keyExtractor , WindowConfig windowCfg , T identity , SerializableBiFunction < T , R , ? extends T > reduceFn ) { } }
checkNotNull ( keyExtractor , "keyExtractor cannot be null" ) ; checkNotNull ( windowCfg , "windowCfg cannot be null" ) ; checkNotNull ( identity , "identity cannot be null" ) ; checkNotNull ( reduceFn , "reduceFn cannot be null" ) ; GeneralReduceByKeyAndWindowStreamlet < R , K , T > retval = new GeneralReduceByKeyAndWindowStreamlet < > ( this , keyExtractor , windowCfg , identity , reduceFn ) ; addChild ( retval ) ; return new KVStreamletShadow < KeyedWindow < K > , T > ( retval ) ;
public class DevicesManagementApi { /** * Updates a task for all devices - For now just allows changing the state to cancelled . * Updates a task for all devices - For now just allows changing the state to cancelled . * @ param tid Task ID . ( required ) * @ param taskUpdateRequest Task update request ( required ) * @ return ApiResponse & lt ; TaskUpdateResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < TaskUpdateResponse > updateTaskWithHttpInfo ( String tid , TaskUpdateRequest taskUpdateRequest ) throws ApiException { } }
com . squareup . okhttp . Call call = updateTaskValidateBeforeCall ( tid , taskUpdateRequest , null , null ) ; Type localVarReturnType = new TypeToken < TaskUpdateResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class Nucleotide { /** * get the base monomer , the return value could be null if this nucleotide * does not have a base * @ param monomerStore - store in which base monomer is located * @ return base monomer , could be null */ public Monomer getBaseMonomer ( MonomerStore monomerStore ) { } }
String baseSymbol = getBaseSymbol ( ) ; if ( baseSymbol != null && ! baseSymbol . equalsIgnoreCase ( "" ) ) { try { Map < String , Monomer > monomers = monomerStore . getMonomers ( Monomer . NUCLIEC_ACID_POLYMER_TYPE ) ; Monomer m = monomers . get ( baseSymbol ) ; return m ; } catch ( Exception ex ) { LOG . info ( "Unable to get base monomer for " + baseSymbol ) ; return null ; } } else { return null ; }
public class Type3Font { /** * Defines a glyph . If the character was already defined it will return the same content * @ param c the character to match this glyph . * @ param wx the advance this character will have * @ param llx the X lower left corner of the glyph bounding box . If the < CODE > colorize < / CODE > option is * < CODE > true < / CODE > the value is ignored * @ param lly the Y lower left corner of the glyph bounding box . If the < CODE > colorize < / CODE > option is * < CODE > true < / CODE > the value is ignored * @ param urx the X upper right corner of the glyph bounding box . If the < CODE > colorize < / CODE > option is * < CODE > true < / CODE > the value is ignored * @ param ury the Y upper right corner of the glyph bounding box . If the < CODE > colorize < / CODE > option is * < CODE > true < / CODE > the value is ignored * @ return a content where the glyph can be defined */ public PdfContentByte defineGlyph ( char c , float wx , float llx , float lly , float urx , float ury ) { } }
if ( c == 0 || c > 255 ) throw new IllegalArgumentException ( "The char " + ( int ) c + " doesn't belong in this Type3 font" ) ; usedSlot [ c ] = true ; Integer ck = Integer . valueOf ( c ) ; Type3Glyph glyph = ( Type3Glyph ) char2glyph . get ( ck ) ; if ( glyph != null ) return glyph ; widths3 . put ( c , ( int ) wx ) ; if ( ! colorized ) { if ( Float . isNaN ( this . llx ) ) { this . llx = llx ; this . lly = lly ; this . urx = urx ; this . ury = ury ; } else { this . llx = Math . min ( this . llx , llx ) ; this . lly = Math . min ( this . lly , lly ) ; this . urx = Math . max ( this . urx , urx ) ; this . ury = Math . max ( this . ury , ury ) ; } } glyph = new Type3Glyph ( writer , pageResources , wx , llx , lly , urx , ury , colorized ) ; char2glyph . put ( ck , glyph ) ; return glyph ;
public class HeatChart { /** * Generates and returns a new chart < code > Image < / code > configured according to this object ' s * currently held settings . The given parameter determines whether transparency should be enabled * for the generated image . * No chart will be generated until this or the related < code > saveToFile ( File ) < / code > method are * called . All successive calls will result in the generation of a new chart image , no caching is * used . * @ param alpha whether to enable transparency . * @ return A newly generated chart < code > Image < / code > . The returned image is a * < code > BufferedImage < / code > . */ public Image getChartImage ( boolean alpha ) { } }
// Calculate all unknown dimensions . measureComponents ( ) ; updateCoordinates ( ) ; // Determine image type based upon whether require alpha or not . // Using BufferedImage . TYPE _ INT _ ARGB seems to break on jpg . int imageType = ( alpha ? BufferedImage . TYPE_4BYTE_ABGR : BufferedImage . TYPE_3BYTE_BGR ) ; // Create our chart image which we will eventually draw everything on . BufferedImage chartImage = new BufferedImage ( chartSize . width , chartSize . height , imageType ) ; Graphics2D chartGraphics = chartImage . createGraphics ( ) ; // Use anti - aliasing where ever possible . chartGraphics . setRenderingHint ( RenderingHints . KEY_ANTIALIASING , RenderingHints . VALUE_ANTIALIAS_ON ) ; // Set the background . chartGraphics . setColor ( backgroundColour ) ; chartGraphics . fillRect ( 0 , 0 , chartSize . width , chartSize . height ) ; // Draw the title . drawTitle ( chartGraphics ) ; // Draw the heatmap image . drawHeatMap ( chartGraphics , zValues ) ; // Draw the axis labels . drawXLabel ( chartGraphics ) ; drawYLabel ( chartGraphics ) ; // Draw the axis bars . drawAxisBars ( chartGraphics ) ; // Draw axis values . drawXValues ( chartGraphics ) ; drawYValues ( chartGraphics ) ; return chartImage ;
public class WDTD { /** * Accessors */ @ Override public List < EntityDeclaration > getEntities ( ) { } }
if ( mEntities == null && ( mSubset != null ) ) { /* Better make a copy , so that caller can not modify list * DTD has , which may be shared ( since DTD subset instances * are cached and reused ) */ mEntities = new ArrayList < EntityDeclaration > ( mSubset . getGeneralEntityList ( ) ) ; } return mEntities ;
public class PairingHeap { /** * Decrease the key of a node . * @ param n * the node * @ param newKey * the new key */ @ SuppressWarnings ( "unchecked" ) private void decreaseKey ( Node < K , V > n , K newKey ) { } }
int c ; if ( comparator == null ) { c = ( ( Comparable < ? super K > ) newKey ) . compareTo ( n . key ) ; } else { c = comparator . compare ( newKey , n . key ) ; } if ( c > 0 ) { throw new IllegalArgumentException ( "Keys can only be decreased!" ) ; } n . key = newKey ; if ( c == 0 || root == n ) { return ; } if ( n . o_s == null ) { throw new IllegalArgumentException ( "Invalid handle!" ) ; } // unlink from parent if ( n . y_s != null ) { n . y_s . o_s = n . o_s ; } if ( n . o_s . o_c == n ) { // I am the oldest : ( n . o_s . o_c = n . y_s ; } else { // I have an older sibling ! n . o_s . y_s = n . y_s ; } n . y_s = null ; n . o_s = null ; // merge with root if ( comparator == null ) { root = link ( root , n ) ; } else { root = linkWithComparator ( root , n ) ; }
public class ST_GeomFromText { /** * Convert well known text parameter into a Geometry * @ param wkt Well known text * @ return Geometry instance or null if parameter is null * @ throws ParseException If wkt is invalid */ public static Geometry toGeometry ( String wkt ) throws SQLException { } }
if ( wkt == null ) { return null ; } WKTReader wktReader = new WKTReader ( ) ; try { return wktReader . read ( wkt ) ; } catch ( ParseException ex ) { throw new SQLException ( "Cannot parse the WKT." , ex ) ; }
public class YearMonth { /** * Obtains an instance of { @ code YearMonth } from a year and month . * @ param year the year to represent , from MIN _ YEAR to MAX _ YEAR * @ param month the month - of - year to represent , not null * @ return the year - month , not null * @ throws DateTimeException if the year value is invalid */ public static YearMonth of ( int year , Month month ) { } }
Objects . requireNonNull ( month , "month" ) ; return of ( year , month . getValue ( ) ) ;
public class VariableInitializer { /** * Sets the node type . * @ throws IllegalArgumentException if { @ code nodeType } is not one of * { @ link Token # VAR } , { @ link Token # CONST } , or { @ link Token # LET } */ public void setNodeType ( int nodeType ) { } }
if ( nodeType != Token . VAR && nodeType != Token . CONST && nodeType != Token . LET ) throw new IllegalArgumentException ( "invalid node type" ) ; setType ( nodeType ) ;
public class JMMap { /** * New changed value with entry map map . * @ param < K > the type parameter * @ param < V > the type parameter * @ param < NV > the type parameter * @ param map the map * @ param changingValueFunction the changing value function * @ return the map */ public static < K , V , NV > Map < K , NV > newChangedValueWithEntryMap ( Map < K , V > map , Function < Entry < K , V > , NV > changingValueFunction ) { } }
return buildEntryStream ( map ) . collect ( toMap ( Entry :: getKey , changingValueFunction ) ) ;
public class MtasSpanSequenceSpans { /** * Reset queue . */ void resetQueue ( ) { } }
currentPosition = - 1 ; queueMatches . clear ( ) ; for ( QueueItem item : queueSpans ) { item . reset ( ) ; } currentMatch = null ;
public class AbstractAggregatingDefaultQueryPersonAttributeDao { /** * Set the { @ link List } of delegates which we will poll for attributes . * @ param daos The personAttributeDaos to set . * @ throws IllegalArgumentException If daos is < code > null < / code > . */ @ Required public final void setPersonAttributeDaos ( final List < IPersonAttributeDao > daos ) { } }
Validate . notNull ( daos , "The IPersonAttributeDao List cannot be null" ) ; this . personAttributeDaos = Collections . unmodifiableList ( daos ) ;
public class CommonOps_DDF2 { /** * Performs an in - place transpose . This algorithm is only efficient for square * matrices . * @ param m The matrix that is to be transposed . Modified . */ public static void transpose ( DMatrix2x2 m ) { } }
double tmp ; tmp = m . a12 ; m . a12 = m . a21 ; m . a21 = tmp ;
public class ProtoParser { /** * Delegated rules */ public final boolean synpred1_ProtoParser ( ) { } }
state . backtracking ++ ; int start = input . mark ( ) ; try { synpred1_ProtoParser_fragment ( ) ; // can never throw exception } catch ( RecognitionException re ) { System . err . println ( "impossible: " + re ) ; } boolean success = ! state . failed ; input . rewind ( start ) ; state . backtracking -- ; state . failed = false ; return success ;
public class RecoverableUnitImpl { /** * Informs the recoverable unit that it is being deleted . This methods writes a * special record to the underlying recovery log that indicates this event occured , * and allows ' old ' information to be ignored during recovery . * Caller MUST hold the shared lock before invoking this method . * @ exception InternalLogException An unexpected exception has occured */ void remove ( ) throws InternalLogException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "remove" , this ) ; // If the parent recovery log instance has experienced a serious internal error then prevent // this operation from executing . if ( _recLog . failed ( ) ) { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "remove" , this ) ; throw new InternalLogException ( null ) ; } if ( _storedOnDisk ) { // There is information relating to this recoverable unit stored in the underlying // recovery log . We must write a deletion record to indicate that this is no longer // valid . try { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Creating deletion record for recoverable unit '" + _identity + "'" ) ; final WriteableLogRecord logRecord = _logHandle . getWriteableLogRecord ( _removalHeaderSize ) ; // A null log record returned by the log handle indicates that the request to get // a new writeable log record resulted in a keypoint occuring . As this recoverable // unit has already been removed from the map it will not have been written as part // of the keypoint process and no longer exists in the log . Therefore there is no // need to write the deletion record for this recoverable unit . if ( logRecord != null ) { writeRecordHeader ( logRecord , RECORDTYPEDELETED ) ; // Tell the WritableLogRecord that we have finished building the removal record . This // will cause it to add the appropriate record tail to the underlying recovery log . logRecord . close ( ) ; _logHandle . writeLogRecord ( logRecord ) ; } } catch ( Throwable exc ) { FFDCFilter . processException ( exc , "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.remove" , "801" , this ) ; if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "An unexpected error occurred whilst formatting the recovery log buffer" ) ; _recLog . markFailed ( exc ) ; /* @ MD19484C */ if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "remove" , "InternalLogException" ) ; throw new InternalLogException ( exc ) ; } } else { // There is no trace of this recoverable unit in the underlying recovery log and so there is // no further action to take . if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Not writing deletion record because RecoverableUnit '" + _identity + "' is not on disk" ) ; } // Inform the recovery log of the reduction in the data payload due to this unit being deleted . if ( _unwrittenDataSize > 0 ) { _recLog . payloadDeleted ( _totalDataSize + _totalHeaderSize , _unwrittenDataSize + _totalHeaderSize ) ; } else { _recLog . payloadDeleted ( _totalDataSize + _totalHeaderSize , _unwrittenDataSize ) ; } // Next , " forget " all stored recoverable unit sections . This will ensure that no further // reference to this recoverable unit can be written to disk even if the client service // invokes a write or force method on it in the future . We also need to clear out the // total and unwritten data size fields to ensure that we don ' t attempt to begin // writing even when there are no sections to write . if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "Remove completed for recoverable unit " + _identity + ". Clear internal state" ) ; _recoverableUnitSections . clear ( ) ; _totalDataSize = 0 ; _unwrittenDataSize = 0 ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "remove" ) ;
public class ProjectCalendarWeek { /** * Used to add working hours to the calendar . Note that the MPX file * definition allows a maximum of 7 calendar hours records to be added to * a single calendar . * @ param day day number * @ return new ProjectCalendarHours instance */ public ProjectCalendarHours addCalendarHours ( Day day ) { } }
ProjectCalendarHours bch = new ProjectCalendarHours ( this ) ; bch . setDay ( day ) ; m_hours [ day . getValue ( ) - 1 ] = bch ; return ( bch ) ;
public class RedisClusterClient { /** * Reload partitions and re - initialize the distribution table . */ public void reloadPartitions ( ) { } }
if ( partitions == null ) { initializePartitions ( ) ; partitions . updateCache ( ) ; } else { Partitions loadedPartitions = loadPartitions ( ) ; if ( TopologyComparators . isChanged ( getPartitions ( ) , loadedPartitions ) ) { logger . debug ( "Using a new cluster topology" ) ; List < RedisClusterNode > before = new ArrayList < RedisClusterNode > ( getPartitions ( ) ) ; List < RedisClusterNode > after = new ArrayList < RedisClusterNode > ( loadedPartitions ) ; getResources ( ) . eventBus ( ) . publish ( new ClusterTopologyChangedEvent ( before , after ) ) ; } this . partitions . reload ( loadedPartitions . getPartitions ( ) ) ; } updatePartitionsInConnections ( ) ;
public class Signatures { /** * Selects the best equally - matching methods for the given argument types . * @ param methods * @ param argTypes * @ return methods */ public static Method [ ] candidateMethods ( Method [ ] methods , Class < ? > [ ] argTypes ) { } }
return candidates ( methods , collectSignatures ( methods ) , collectVarArgs ( methods ) , argTypes ) ;
public class IndexTable { /** * Adjust this index table to contain the sorted index order for the given * list * @ param < T > the data type * @ param list the list of objects */ public < T extends Comparable < T > > void sort ( List < T > list ) { } }
sort ( list , defaultComp ) ;
public class CollocationFinder { /** * This method does the work of traversing the tree and writing collocations * to the CollocationCollector ( an internal data structure ) . * @ param t Tree to get collocations from . */ private void getCollocationsList ( Tree t , boolean threadSafe ) { } }
int leftMostLeaf = Trees . leftEdge ( t , qTree ) ; if ( t . isPreTerminal ( ) ) return ; List < Tree > children = t . getChildrenAsList ( ) ; if ( children . isEmpty ( ) ) return ; // TODO : fix determineHead // - in phrases like " World Trade Organization ' s " the head of the parent NP is " POS " . // - this is problematic for the collocationFinder which assigns this head // as the POS for the collocation " World _ Trade _ Organization " ! Label headLabel = hf . determineHead ( t ) . label ( ) ; StringBuffer testString = null ; Integer leftSistersBuffer = 0 ; // measures the length of sisters in words when reading for ( int i = 0 ; i < children . size ( ) ; i ++ ) { ArrayList < Integer > childConstituents = new ArrayList < Integer > ( ) ; childConstituents . add ( i ) ; Tree subtree = children . get ( i ) ; Integer currWindowLength = 0 ; // measures the length in words of the current collocation . getCollocationsList ( subtree , threadSafe ) ; // recursive call to get colls in subtrees . testString = new StringBuffer ( 160 ) ; testString . append ( treeAsStemmedCollocation ( subtree , threadSafe ) ) ; testString . append ( "_" ) ; Integer thisSubtreeLength = subtree . yield ( ) . size ( ) ; currWindowLength += thisSubtreeLength ; StringBuffer testStringNonStemmed = new StringBuffer ( 160 ) ; testStringNonStemmed . append ( treeAsNonStemmedCollocation ( subtree ) ) ; testStringNonStemmed . append ( "_" ) ; // for each subtree i , we iteratively append word yields of succeeding sister // subtrees j and check their wordnet entries . if they exist we write them to // the global collocationCollector pair by the indices of the leftmost and // rightmost words in the collocation . for ( int j = i + 1 ; j < children . size ( ) ; j ++ ) { Tree sisterNode = children . get ( j ) ; childConstituents . add ( j ) ; testString . append ( treeAsStemmedCollocation ( sisterNode , threadSafe ) ) ; testStringNonStemmed . append ( treeAsNonStemmedCollocation ( sisterNode ) ) ; currWindowLength += sisterNode . yield ( ) . size ( ) ; if ( DEBUG ) { // err . println ( " Testing string w / reported indices : " + testString . toString ( ) // + " ( " + ( leftMostLeaf + leftSistersBuffer ) + " , " + ( leftMostLeaf + leftSistersBuffer + currWindowLength - 1 ) + " ) " ) ; } // ignore collocations beginning with " the " or " a " if ( StringUtils . lookingAt ( testString . toString ( ) , "(?:[Tt]he|THE|[Aa][Nn]?)[ _]" ) ) { if ( false ) { err . println ( "CollocationFinder: Not collapsing the/a word: " + testString ) ; } } else if ( wordNetContains ( testString . toString ( ) ) ) { Pair < Integer , Integer > c = new Pair < Integer , Integer > ( leftMostLeaf + leftSistersBuffer , leftMostLeaf + leftSistersBuffer + currWindowLength - 1 ) ; Collocation col = new Collocation ( c , t , ( ArrayList < Integer > ) childConstituents . clone ( ) , testString . toString ( ) , headLabel ) ; collocationCollector . add ( col ) ; if ( DEBUG ) { err . println ( "Found collocation in wordnet: " + testString . toString ( ) ) ; err . println ( " Span of collocation is: " + c + "; childConstituents is: " + c ) ; } } testString . append ( "_" ) ; if ( StringUtils . lookingAt ( testStringNonStemmed . toString ( ) , "(?:[Tt]he|THE|[Aa][Nn]?)[ _]" ) ) { if ( false ) { err . println ( "CollocationFinder: Not collapsing the/a word: " + testStringNonStemmed ) ; } } else if ( wordNetContains ( testStringNonStemmed . toString ( ) ) ) { Pair < Integer , Integer > c = new Pair < Integer , Integer > ( leftMostLeaf + leftSistersBuffer , leftMostLeaf + leftSistersBuffer + currWindowLength - 1 ) ; Collocation col = new Collocation ( c , t , ( ArrayList < Integer > ) childConstituents . clone ( ) , testStringNonStemmed . toString ( ) , headLabel ) ; collocationCollector . add ( col ) ; if ( DEBUG ) { err . println ( "Found collocation in wordnet: " + testStringNonStemmed . toString ( ) ) ; err . println ( " Span of collocation is: " + c + "; childConstituents is: " + c ) ; } } testStringNonStemmed . append ( "_" ) ; } leftSistersBuffer += thisSubtreeLength ; }
public class MapTileViewerModel { /** * Render horizontal tiles . * @ param g The graphic output . * @ param ty The current vertical tile location . * @ param viewY The vertical view offset . */ private void renderHorizontal ( Graphic g , int ty , double viewY ) { } }
final int inTileWidth = ( int ) Math . ceil ( viewer . getWidth ( ) / ( double ) map . getTileWidth ( ) ) ; final int sx = ( int ) Math . floor ( ( viewer . getX ( ) + viewer . getViewX ( ) ) / map . getTileWidth ( ) ) ; final double viewX = viewer . getX ( ) ; for ( int h = 0 ; h <= inTileWidth ; h ++ ) { final int tx = h + sx ; if ( ! ( tx < 0 || tx >= map . getInTileWidth ( ) ) ) { renderTile ( g , tx , ty , viewX , viewY ) ; } }
public class MsgCompiler { /** * Compiles the given { @ link MsgNode } to a statement with the given escaping directives applied . * < p > The returned statement must be written to a location with a stack depth of zero , since * placeholder formatting may require detach logic . * @ param partsAndId The computed msg id * @ param msg The msg node * @ param escapingDirectives The set of escaping directives to apply . */ Statement compileMessage ( MsgPartsAndIds partsAndId , MsgNode msg , ImmutableList < SoyPrintDirective > escapingDirectives ) { } }
Expression soyMsgDefaultParts = compileDefaultMessagePartsConstant ( partsAndId ) ; Expression soyMsgParts = parameterLookup . getRenderContext ( ) . getSoyMsgParts ( partsAndId . id , soyMsgDefaultParts ) ; Statement printMsg ; if ( msg . isRawTextMsg ( ) ) { // Simplest case , just a static string translation printMsg = handleBasicTranslation ( escapingDirectives , soyMsgParts ) ; } else { // String translation + placeholders printMsg = handleTranslationWithPlaceholders ( msg , escapingDirectives , soyMsgParts , parameterLookup . getPluginContext ( ) . getULocale ( ) , partsAndId ) ; } return Statement . concat ( printMsg . withSourceLocation ( msg . getSourceLocation ( ) ) , detachState . detachLimited ( appendableExpression ) ) ;
public class appflowpolicy_csvserver_binding { /** * Use this API to fetch appflowpolicy _ csvserver _ binding resources of given name . */ public static appflowpolicy_csvserver_binding [ ] get ( nitro_service service , String name ) throws Exception { } }
appflowpolicy_csvserver_binding obj = new appflowpolicy_csvserver_binding ( ) ; obj . set_name ( name ) ; appflowpolicy_csvserver_binding response [ ] = ( appflowpolicy_csvserver_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class ClinvarParser { /** * Checks if XML info path exists and loads it * @ throws javax . xml . bind . JAXBException * @ throws java . io . IOException */ public static Object loadXMLInfo ( String filename , String clinvarVersion ) throws JAXBException , IOException { } }
Object obj = null ; JAXBContext jaxbContext = JAXBContext . newInstance ( clinvarVersion ) ; Unmarshaller unmarshaller = jaxbContext . createUnmarshaller ( ) ; // Reading GZip input stream InputStream inputStream ; if ( filename . endsWith ( ".gz" ) ) { inputStream = new GZIPInputStream ( new FileInputStream ( new File ( filename ) ) ) ; } else { inputStream = Files . newInputStream ( Paths . get ( filename ) ) ; } obj = unmarshaller . unmarshal ( inputStream ) ; return obj ;
public class MalmoEnvServer { /** * Handler for < Status > messages . */ private void status ( String command , Socket socket ) throws IOException { } }
lock . lock ( ) ; try { String status = "{}" ; // TODO Possibly have something more interesting to report . DataOutputStream dout = new DataOutputStream ( socket . getOutputStream ( ) ) ; byte [ ] statusBytes = status . getBytes ( utf8 ) ; dout . writeInt ( statusBytes . length ) ; dout . write ( statusBytes ) ; dout . flush ( ) ; } finally { lock . unlock ( ) ; }
public class CmsJspTagContentLoad { /** * Sets the locale . < p > * @ param locale the locale to set */ public void setLocale ( String locale ) { } }
if ( CmsStringUtil . isEmpty ( locale ) ) { m_locale = null ; m_contentLocale = null ; } else { m_locale = CmsLocaleManager . getLocale ( locale ) ; m_contentLocale = m_locale ; }
public class ResolveWithDeps { /** * Collect dependencies in the enclosing class * @ param from The enclosing class sym * @ param to The enclosing classes references this sym . */ @ Override public void reportDependence ( Symbol from , Symbol to ) { } }
// Capture dependencies between the packages . deps . collect ( from . packge ( ) . fullname , to . packge ( ) . fullname ) ;
public class DefaultJerseyOptions { /** * Optional list of singleton instances to be registered ( hk2 binders etc . ) * @ return */ @ Override public Set < Object > getInstances ( ) { } }
Set < Object > set = new HashSet < > ( ) ; ClassLoader cl = Thread . currentThread ( ) . getContextClassLoader ( ) ; Consumer < JsonArray > reader = array -> { if ( array != null && array . size ( ) > 0 ) { for ( int i = 0 ; i < array . size ( ) ; i ++ ) { try { Class < ? > clazz = cl . loadClass ( array . getString ( i ) ) ; set . add ( clazz . newInstance ( ) ) ; } catch ( ClassNotFoundException | InstantiationException | IllegalAccessException e ) { throw new RuntimeException ( e ) ; } } } } ; JsonArray binders = config . getJsonArray ( CONFIG_BINDERS , null ) ; JsonArray instances = config . getJsonArray ( CONFIG_INSTANCES , null ) ; reader . accept ( binders ) ; reader . accept ( instances ) ; return set ;
public class JDBCCallableStatement { /** * # ifdef JAVA4 */ public synchronized Timestamp getTimestamp ( String parameterName , Calendar cal ) throws SQLException { } }
return getTimestamp ( findParameterIndex ( parameterName ) , cal ) ;
public class ClassPath { /** * Returns all top level classes whose package name is { @ code packageName } or starts with * { @ code packageName } followed by a ' . ' . */ public ImmutableSet < ClassInfo > getTopLevelClassesRecursive ( String packageName ) { } }
checkNotNull ( packageName ) ; String packagePrefix = packageName + '.' ; ImmutableSet . Builder < ClassInfo > builder = ImmutableSet . builder ( ) ; ImmutableSet < ClassInfo > topLevelClasses = getTopLevelClasses ( ) ; List < String > debugData = Lists . newLinkedList ( ) ; for ( ClassInfo classInfo : topLevelClasses ) { if ( classInfo . getName ( ) . startsWith ( packagePrefix ) ) { builder . add ( classInfo ) ; debugData . add ( classInfo . getResourceName ( ) + "\n" ) ; } } return builder . build ( ) ;
public class SessionManager { /** * forceSessionRetrieval can only be true when using applicationSessions */ protected Object getSession ( String id , int version , boolean isSessionAccess , boolean forceSessionRetrieval , Object xdCorrelator ) { } }
if ( isSessionAccess ) { if ( version == - 1 ) { _store . refreshSession ( id , xdCorrelator ) ; } else { _store . refreshSession ( id , version , xdCorrelator ) ; } } ISession iSession = getSessionFromStore ( id , version , isSessionAccess , forceSessionRetrieval , xdCorrelator ) ; if ( iSession != null ) { if ( isSessionAccess ) { boolean stillValid = _store . checkSessionStillValid ( iSession , iSession . getLastAccessedTime ( ) ) ; if ( stillValid ) { iSession . incrementRefCount ( ) ; _sessionEventDispatcher . sessionAccessed ( iSession ) ; } else { iSession = null ; } } } else { if ( isSessionAccess ) { _sessionEventDispatcher . sessionAccessUnknownKey ( id ) ; } } return iSession ;
public class BugTreeModel { /** * Swaps in a new BugTreeModel and a new JTree */ public void rebuild ( ) { } }
if ( TRACE ) { System . out . println ( "rebuilding bug tree model" ) ; } NewFilterFromBug . closeAll ( ) ; // If this thread is not interrupting a previous thread , set the paths // to be opened when the new tree is complete // If the thread is interrupting another thread , don ' t do this , because // you don ' t have the tree with the correct paths selected // As of now , it should be impossible to interrupt a rebuilding thread , // in another version this may change , so this if statement check is // left in , even though it should always be true . if ( rebuildingThread == null ) { setOldSelectedBugs ( ) ; } Debug . println ( "Please Wait called right before starting rebuild thread" ) ; mainFrame . acquireDisplayWait ( ) ; rebuildingThread = edu . umd . cs . findbugs . util . Util . runInDameonThread ( new Runnable ( ) { BugTreeModel newModel ; @ Override public void run ( ) { try { newModel = new BugTreeModel ( BugTreeModel . this ) ; newModel . listeners = listeners ; newModel . resetData ( ) ; newModel . bugSet . sortList ( ) ; } finally { rebuildingThread = null ; SwingUtilities . invokeLater ( ( ) -> { if ( newModel != null ) { JTree newTree = new JTree ( newModel ) ; newModel . tree = newTree ; mainFrame . mainFrameTree . newTree ( newTree , newModel ) ; mainFrame . releaseDisplayWait ( ) ; } getOffListenerList ( ) ; } ) ; } } } , "Rebuilding thread" ) ;
public class ProjectsSender { /** * / * - - - Public methods - - - */ public Pair < String , StatusCode > sendRequest ( ProjectsDetails projectsDetails ) { } }
// send request logger . info ( "Initializing WhiteSource Client" ) ; Collection < AgentProjectInfo > projects = projectsDetails . getProjects ( ) ; if ( checkDependenciesUpbound ( projects ) ) { return new Pair < > ( "Number of dependencies exceeded the maximum supported" , StatusCode . SERVER_FAILURE ) ; } WhitesourceService service = createService ( ) ; String resultInfo = Constants . EMPTY_STRING ; if ( offlineConfig . isOffline ( ) ) { resultInfo = offlineUpdate ( service , projects ) ; return new Pair < > ( resultInfo , this . prepStepStatusCode ) ; } else { // update type UpdateType updateType = UpdateType . OVERRIDE ; String updateTypeValue = senderConfig . getUpdateTypeValue ( ) ; try { updateType = UpdateType . valueOf ( updateTypeValue ) ; } catch ( Exception e ) { logger . info ( "Invalid value {} for updateType, defaulting to {}" , updateTypeValue , UpdateType . OVERRIDE ) ; } logger . info ( "UpdateType set to {} " , updateTypeValue ) ; StatusCode statusCode = StatusCode . SUCCESS ; if ( senderConfig . isEnableImpactAnalysis ( ) ) { runViaAnalysis ( projectsDetails , service ) ; } else if ( ! senderConfig . isEnableImpactAnalysis ( ) ) { // todo return logs when needed would be enabled for all WSE - 342 } int retries = senderConfig . getConnectionRetries ( ) ; while ( retries -- > - 1 ) { try { statusCode = checkPolicies ( service , projects ) ; if ( senderConfig . isUpdateInventory ( ) ) { if ( statusCode == StatusCode . SUCCESS || ( senderConfig . isForceUpdate ( ) && senderConfig . isForceUpdateFailBuildOnPolicyViolation ( ) ) ) { resultInfo = update ( service , projects ) ; } } break ; } catch ( WssServiceException e ) { if ( e . getCause ( ) != null && e . getCause ( ) . getClass ( ) . getCanonicalName ( ) . substring ( 0 , e . getCause ( ) . getClass ( ) . getCanonicalName ( ) . lastIndexOf ( Constants . DOT ) ) . equals ( Constants . JAVA_NETWORKING ) ) { statusCode = StatusCode . CONNECTION_FAILURE ; logger . error ( "Trying " + ( retries + 1 ) + " more time" + ( retries != 0 ? "s" : Constants . EMPTY_STRING ) ) ; } else { statusCode = StatusCode . SERVER_FAILURE ; retries = - 1 ; } resultInfo = "Failed to send request to WhiteSource server: " + e . getMessage ( ) ; logger . error ( resultInfo , e . getMessage ( ) ) ; logger . debug ( resultInfo , e ) ; if ( retries > - 1 ) { try { Thread . sleep ( senderConfig . getConnectionRetriesIntervals ( ) ) ; } catch ( InterruptedException e1 ) { logger . error ( "Failed to sleep while retrying to connect to server " + e1 . getMessage ( ) , e1 ) ; } } String requestToken = e . getRequestToken ( ) ; if ( StringUtils . isNotBlank ( requestToken ) ) { resultInfo += Constants . NEW_LINE + "Support token: " + requestToken ; logger . info ( "Support token: {}" , requestToken ) ; } } } if ( service != null ) { service . shutdown ( ) ; } if ( statusCode == StatusCode . SUCCESS ) { return new Pair < > ( resultInfo , this . prepStepStatusCode ) ; } return new Pair < > ( resultInfo , statusCode ) ; }
public class TableEntry { /** * Creates a new instance of the TableEntry class that indicates the Key must not previously exist . * @ param key The Key . * @ param value The Value . * @ return newly created TableEntry if one for the key does not already exist . */ public static TableEntry notExists ( @ NonNull ArrayView key , @ NonNull ArrayView value ) { } }
return new TableEntry ( TableKey . notExists ( key ) , value ) ;
public class ResourceGroovyMethods { /** * Creates a new OutputStream for this file and passes it into the closure . * This method ensures the stream is closed after the closure returns . * @ param file a File * @ param closure a closure * @ return the value returned by the closure * @ throws IOException if an IOException occurs . * @ see IOGroovyMethods # withStream ( java . io . OutputStream , groovy . lang . Closure ) * @ since 1.5.2 */ public static Object withOutputStream ( File file , @ ClosureParams ( value = SimpleType . class , options = "java.io.OutputStream" ) Closure closure ) throws IOException { } }
return IOGroovyMethods . withStream ( newOutputStream ( file ) , closure ) ;
public class Matrix4x3d { /** * / * ( non - Javadoc ) * @ see org . joml . Matrix4x3dc # getTransposed ( int , java . nio . FloatBuffer ) */ public FloatBuffer getTransposed ( int index , FloatBuffer buffer ) { } }
MemUtil . INSTANCE . putfTransposed ( this , index , buffer ) ; return buffer ;
public class JDBCRecordReader { /** * Initialize all required jdbc elements and make the reader ready for iteration . * @ param split not handled yet , will be discarded */ @ Override public void initialize ( InputSplit split ) throws IOException , InterruptedException { } }
if ( dataSource == null ) { throw new IllegalStateException ( "Cannot initialize : no datasource" ) ; } initializeJdbc ( ) ;
public class Utility { /** * MD5加密 * @ param bs 待加密数据 * @ return md5值 */ public static byte [ ] md5Bytes ( byte [ ] bs ) { } }
if ( bs == null ) return null ; MessageDigest md5 ; try { md5 = MessageDigest . getInstance ( "MD5" ) ; } catch ( NoSuchAlgorithmException ex ) { throw new RuntimeException ( ex ) ; } return md5 . digest ( bs ) ;
public class ImmutableCollections { /** * If the existing map has entries that collide with the new map , those old entries will be * removed , but the new entries will appear at their new position in the iteration order of the * resulting map . Example : * < pre > { @ code * concat ( { a = > 1 , b = > 2 , c = > 3 } , { c = > 4 , a = > 5 } ) = = { b = > 2 , c = > 4 , a = > 5} * } < / pre > */ @ NonNull public static < K , V > ImmutableMap < K , V > concat ( @ NonNull ImmutableMap < K , V > map1 , @ NonNull Map < K , V > map2 ) { } }
ImmutableMap . Builder < K , V > builder = ImmutableMap . builder ( ) ; for ( Map . Entry < K , V > entry : map1 . entrySet ( ) ) { if ( ! map2 . containsKey ( entry . getKey ( ) ) ) { builder . put ( entry ) ; } } builder . putAll ( map2 ) ; return builder . build ( ) ;
public class LeftTupleSource { /** * Adds the < code > TupleSink < / code > so that it may receive * < code > Tuples < / code > propagated from this < code > TupleSource < / code > . * @ param tupleSink * The < code > TupleSink < / code > to receive propagated * < code > Tuples < / code > . */ public void addTupleSink ( final LeftTupleSink tupleSink , final BuildContext context ) { } }
this . sink = addTupleSink ( this . sink , tupleSink , context ) ;
public class VisibleAssertions { /** * Log a contextual message , in the style of a ' dividing line ' in the test output . * The output will be in grey , surrounded by a horizontal line the full width of the current terminal ( or 80 chars ) . * @ param context contextual message to output * @ param indent number of space characters to indent this line by */ public static void context ( CharSequence context , int indent ) { } }
if ( Boolean . getBoolean ( "visibleassertions.silence" ) ) { return ; } StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < indent ; i ++ ) sb . append ( " " ) ; for ( int i = 0 ; i < 4 ; i ++ ) sb . append ( CONTEXT_MARK ) ; sb . append ( " " ) ; sb . append ( context ) ; int terminalWidth = terminalWidth ( ) ; sb . append ( " " ) ; for ( int i = sb . length ( ) ; i < terminalWidth ; i ++ ) { sb . append ( CONTEXT_MARK ) ; } System . out . println ( dim ( sb . toString ( ) ) ) ;