signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class JSTypeRegistry { /** * Creates a function type . The last parameter type of the function is * considered a variable length argument . * @ param returnType the function ' s return type * @ param parameterTypes the parameters ' types */ private FunctionType createNativeFunctionTypeWithVarArgs ( JSType returnType , JSType ... parameterTypes ) { } }
return createNativeFunctionType ( returnType , createParametersWithVarArgs ( parameterTypes ) ) ;
public class CredentialsManager { /** * Stores the given credentials in the storage . Must have an access _ token or id _ token and a expires _ in value . * @ param credentials the credentials to save in the storage . */ public void saveCredentials ( @ NonNull Credentials credentials ) { } }
if ( ( isEmpty ( credentials . getAccessToken ( ) ) && isEmpty ( credentials . getIdToken ( ) ) ) || credentials . getExpiresAt ( ) == null ) { throw new CredentialsManagerException ( "Credentials must have a valid date of expiration and a valid access_token or id_token value." ) ; } storage . store ( KEY_ACCESS_TOKEN , credentials . getAccessToken ( ) ) ; storage . store ( KEY_REFRESH_TOKEN , credentials . getRefreshToken ( ) ) ; storage . store ( KEY_ID_TOKEN , credentials . getIdToken ( ) ) ; storage . store ( KEY_TOKEN_TYPE , credentials . getType ( ) ) ; storage . store ( KEY_EXPIRES_AT , credentials . getExpiresAt ( ) . getTime ( ) ) ; storage . store ( KEY_SCOPE , credentials . getScope ( ) ) ;
public class AmazonElastiCacheClient { /** * Returns information about reserved cache nodes for this account , or about a specified reserved cache node . * @ param describeReservedCacheNodesRequest * Represents the input of a < code > DescribeReservedCacheNodes < / code > operation . * @ return Result of the DescribeReservedCacheNodes operation returned by the service . * @ throws ReservedCacheNodeNotFoundException * The requested reserved cache node was not found . * @ throws InvalidParameterValueException * The value for a parameter is invalid . * @ throws InvalidParameterCombinationException * Two or more incompatible parameters were specified . * @ sample AmazonElastiCache . DescribeReservedCacheNodes * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticache - 2015-02-02 / DescribeReservedCacheNodes " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeReservedCacheNodesResult describeReservedCacheNodes ( DescribeReservedCacheNodesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeReservedCacheNodes ( request ) ;
public class CachedDateTimeZone { /** * Returns a new CachedDateTimeZone unless given zone is already cached . */ public static CachedDateTimeZone forZone ( DateTimeZone zone ) { } }
if ( zone instanceof CachedDateTimeZone ) { return ( CachedDateTimeZone ) zone ; } return new CachedDateTimeZone ( zone ) ;
public class BrokerUtil { /** * Converts a list of objects to a string of M subscripts . * @ param subscripts List to convert . * @ return List of subscripts in M format . */ public static String buildSubscript ( Iterable < Object > subscripts ) { } }
StringBuilder sb = new StringBuilder ( ) ; for ( Object subscript : subscripts ) { String value = toString ( subscript ) ; if ( value . isEmpty ( ) ) { throw new RuntimeException ( "Null subscript not allowed." ) ; } if ( sb . length ( ) > 0 ) { sb . append ( "," ) ; } if ( StringUtils . isNumeric ( value ) ) { sb . append ( value ) ; } else { sb . append ( QT ) ; sb . append ( value . replaceAll ( QT , QT2 ) ) ; sb . append ( QT ) ; } } return sb . toString ( ) ;
public class ClientManager { /** * This method attempts to retrieve and process the instrumentation information contained * within the collector configuration . * @ return Whether the configuration has been retrieved and processed */ protected static boolean processConfig ( ) { } }
// Read configuration CollectorConfiguration config = configService . getCollector ( null , null , null , null ) ; if ( config != null ) { try { updateInstrumentation ( config ) ; } catch ( Exception e ) { log . severe ( "Failed to update instrumentation rules: " + e ) ; } } return config != null ;
public class ResourceHelper { /** * Optimize the resource chain by trying to combine two resources to a new one * @ param securityContext * @ param request * @ param resourceMap * @ param propertyView * @ return finalResource * @ throws FrameworkException */ public static Resource optimizeNestedResourceChain ( final SecurityContext securityContext , final HttpServletRequest request , final Map < Pattern , Class < ? extends Resource > > resourceMap , final Value < String > propertyView ) throws FrameworkException { } }
final List < Resource > resourceChain = ResourceHelper . parsePath ( securityContext , request , resourceMap , propertyView ) ; ViewFilterResource view = null ; int num = resourceChain . size ( ) ; boolean found = false ; do { for ( Iterator < Resource > it = resourceChain . iterator ( ) ; it . hasNext ( ) ; ) { Resource constr = it . next ( ) ; if ( constr instanceof ViewFilterResource ) { view = ( ViewFilterResource ) constr ; it . remove ( ) ; } } found = false ; try { for ( int i = 0 ; i < num ; i ++ ) { Resource firstElement = resourceChain . get ( i ) ; Resource secondElement = resourceChain . get ( i + 1 ) ; Resource combinedConstraint = firstElement . tryCombineWith ( secondElement ) ; if ( combinedConstraint != null ) { // remove source constraints resourceChain . remove ( firstElement ) ; resourceChain . remove ( secondElement ) ; // add combined constraint resourceChain . add ( i , combinedConstraint ) ; // signal success found = true ; } } } catch ( Throwable t ) { // ignore exceptions thrown here but make it possible to set a breakpoint final boolean test = false ; } } while ( found ) ; if ( resourceChain . size ( ) == 1 ) { Resource finalResource = resourceChain . get ( 0 ) ; if ( view != null ) { finalResource = finalResource . tryCombineWith ( view ) ; } if ( finalResource == null ) { // fall back to original resource finalResource = resourceChain . get ( 0 ) ; } return finalResource ; } else { logger . warn ( "Resource chain evaluation for path {} resulted in {} entries, returning status code 400." , new Object [ ] { request . getPathInfo ( ) , resourceChain . size ( ) } ) ; } throw new IllegalPathException ( "Cannot resolve URL path" ) ;
public class Pipeline { /** * This is a migration method for global to processor options . Currently used * by the merge - processor which gets the ' source : once ' option from the global * scope . */ private Map < String , Object > injectGlobalOptionsFallback ( final Version version , final Manifest manifest , final String name , final Map < String , Object > options ) { } }
final Map < String , Object > copy = new HashMap < String , Object > ( options ) ; copy . put ( "version" , version . toString ( ) ) ; if ( manifest != null ) { if ( "merge" . equals ( name ) ) { copy . put ( "source" , GlobalOptions . isSourceOnce ( manifest ) ? "once" : "" ) ; } } return copy ;
public class PersonConfidentialVisitor { /** * Visit an Attribute . Certain Attributes contribute interest data . * @ see GedObjectVisitor # visit ( Attribute ) */ @ Override public void visit ( final Attribute attribute ) { } }
if ( "Restriction" . equals ( attribute . getString ( ) ) && "confidential" . equals ( attribute . getTail ( ) ) ) { isConfidential = true ; }
public class DatabaseHashMap { /** * Get the session table definition if exists . It also checks * If existing table matches what session manager is looking for */ private boolean getTableDefinition ( Connection tbcon ) throws SQLException { } }
boolean defExists = false ; boolean smallExists = false ; boolean mediumExists = false ; boolean largeExists = false ; // informix size calculation doesn ' t // work . i . e rs1 . getInt ( " COLUMN _ SIZE " ) doesn ' t work // if ( usingInformix ) // return false ; DatabaseMetaData dmd = tbcon . getMetaData ( ) ; String tbName = tableName ; String qualifierName = null ; if ( usingAS400DB2 || usingDB2Connect ) { tbName = TABLE_NAME . toUpperCase ( ) ; if ( collectionName != null ) qualifierName = collectionName ; } else if ( usingDB2 || usingDerby || usingOracle ) { // cmd 162172 tbName = tbName . toUpperCase ( ) ; if ( dbid != null ) { qualifierName = dbid . toUpperCase ( ) ; // cmd PQ81615 } if ( _smc . isUsingCustomSchemaName ( ) ) { // PM27191 if ( usingDB2 ) { java . sql . Statement s = null ; ResultSet rs1 = null ; s = tbcon . createStatement ( ) ; s . execute ( "VALUES (CURRENT SCHEMA)" ) ; rs1 = s . getResultSet ( ) ; while ( rs1 . next ( ) ) { qualifierNameWhenCustomSchemaIsSet = rs1 . getString ( "1" ) ; } if ( qualifierNameWhenCustomSchemaIsSet != null ) { qualifierName = qualifierNameWhenCustomSchemaIsSet ; if ( com . ibm . websphere . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_WAS . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_WAS . logp ( Level . FINE , methodClassName , methodNames [ GET_TABLE_DEFINITION ] , "Database being used is DB2 and UsingCustomSchemaName is set to true. The following qualifier name obtained from " + "running the query VALUES (CURRENT SCHEMA) will be used for subsequent queries in this method: " + qualifierNameWhenCustomSchemaIsSet ) ; } } } // Oracle case to be handled later } // PM27191 END } ResultSet rs1 = dmd . getColumns ( null , qualifierName , tbName , "%" ) ; try { while ( rs1 . next ( ) ) { String columnname = rs1 . getString ( "COLUMN_NAME" ) ; int columnsize = rs1 . getInt ( "COLUMN_SIZE" ) ; if ( com . ibm . websphere . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_WAS . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_WAS . logp ( Level . FINE , methodClassName , methodNames [ GET_TABLE_DEFINITION ] , "COLUMN_NAME = " + columnname + " COLUMN_SIZE = " + Integer . toString ( columnsize ) ) ; } if ( columnname . equalsIgnoreCase ( "SMALL" ) ) { smallColSize = columnsize ; smallExists = true ; } if ( columnname . equalsIgnoreCase ( "MEDIUM" ) ) { if ( ! usingOracle ) { // cmd 162172 long raw or Blob // does not give proper size from // COLUMN _ SIZE data for Oracle mediumColSize = columnsize ; } mediumExists = true ; } if ( columnname . equalsIgnoreCase ( "LARGE" ) ) { largeColSize = columnsize ; largeExists = true ; } defExists = true ; } if ( defExists ) { if ( smallExists && mediumExists && largeExists ) { if ( com . ibm . websphere . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_WAS . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_WAS . logp ( Level . FINE , methodClassName , methodNames [ GET_TABLE_DEFINITION ] , "Table exists with all the required columns" ) ; } } else { // Flag the error LoggingUtil . SESSION_LOGGER_WAS . logp ( Level . SEVERE , methodClassName , methodNames [ GET_TABLE_DEFINITION ] , "DatabaseHashMap.wrongTableDef" ) ; } } } finally { closeResultSet ( rs1 ) ; } return defExists ;
public class Utils { /** * Converts MQTT message type to a textual description . */ public static String msgType2String ( int type ) { } }
switch ( type ) { case AbstractMessage . CONNECT : return "CONNECT" ; case AbstractMessage . CONNACK : return "CONNACK" ; case AbstractMessage . PUBLISH : return "PUBLISH" ; case AbstractMessage . PUBACK : return "PUBACK" ; case AbstractMessage . PUBREC : return "PUBREC" ; case AbstractMessage . PUBREL : return "PUBREL" ; case AbstractMessage . PUBCOMP : return "PUBCOMP" ; case AbstractMessage . SUBSCRIBE : return "SUBSCRIBE" ; case AbstractMessage . SUBACK : return "SUBACK" ; case AbstractMessage . UNSUBSCRIBE : return "UNSUBSCRIBE" ; case AbstractMessage . UNSUBACK : return "UNSUBACK" ; case AbstractMessage . PINGREQ : return "PINGREQ" ; case AbstractMessage . PINGRESP : return "PINGRESP" ; case AbstractMessage . DISCONNECT : return "DISCONNECT" ; default : throw new RuntimeException ( "Can't decode message type " + type ) ; }
public class ExecutionGraph { /** * Identifies an execution by the specified channel ID and returns it . * @ param id * the channel ID to identify the vertex with * @ return the execution vertex which has a channel with ID < code > id < / code > or < code > null < / code > if no such vertex * exists in the execution graph */ public ExecutionVertex getVertexByChannelID ( final ChannelID id ) { } }
final ExecutionEdge edge = this . edgeMap . get ( id ) ; if ( edge == null ) { return null ; } if ( id . equals ( edge . getOutputChannelID ( ) ) ) { return edge . getOutputGate ( ) . getVertex ( ) ; } return edge . getInputGate ( ) . getVertex ( ) ;
public class BeanDefinitionParser { /** * Parse an array element . * @ param arrayEle a { @ link org . w3c . dom . Element } object . * @ param bd a { @ link org . springframework . beans . factory . config . BeanDefinition } object . * @ return a { @ link java . lang . Object } object . */ public Object parseArrayElement ( Element arrayEle , BeanDefinition bd ) { } }
String elementType = arrayEle . getAttribute ( VALUE_TYPE_ATTRIBUTE ) ; NodeList nl = arrayEle . getChildNodes ( ) ; ManagedArray target = new ManagedArray ( elementType , nl . getLength ( ) ) ; target . setSource ( extractSource ( arrayEle ) ) ; target . setElementTypeName ( elementType ) ; target . setMergeEnabled ( parseMergeAttribute ( arrayEle ) ) ; parseCollectionElements ( nl , target , bd , elementType ) ; return target ;
public class BTools { /** * < b > getSDbl < / b > < br > * public static String getSDbl ( double Value , int DecPrec , boolean ShowPlusSign ) < br > * Returns double converted to string . < br > * If Value is Double . NaN returns " NaN " . < br > * If DecPrec is < 0 is DecPrec set 0 . < br > * If ShowPlusSign is true : < br > * - If Value is > 0 sign is ' + ' . < br > * - If Value is 0 sign is ' ' . < br > * @ param Value - value * @ param DecPrec - decimal precision * @ param ShowPlusSign - show plus sign * @ return double as string */ public static String getSDbl ( double Value , int DecPrec , boolean ShowPlusSign ) { } }
String PlusSign = "" ; if ( ShowPlusSign && Value > 0 ) PlusSign = "+" ; if ( ShowPlusSign && Value == 0 ) PlusSign = " " ; return PlusSign + getSDbl ( Value , DecPrec ) ;
public class SegmentMeanShiftSearchGray { /** * Performs mean - shift clustering on the input image * @ param image Input image */ @ Override public void process ( T image ) { } }
// initialize data structures this . image = image ; this . stopRequested = false ; modeLocation . reset ( ) ; modeColor . reset ( ) ; modeMemberCount . reset ( ) ; interpolate . setImage ( image ) ; pixelToMode . reshape ( image . width , image . height ) ; quickMode . reshape ( image . width , image . height ) ; // mark as - 1 so it knows which pixels have been assigned a mode already and can skip them ImageMiscOps . fill ( pixelToMode , - 1 ) ; // mark all pixels are not being a mode ImageMiscOps . fill ( quickMode , - 1 ) ; // use mean shift to find the peak of each pixel in the image int indexImg = 0 ; for ( int y = 0 ; y < image . height && ! stopRequested ; y ++ ) { for ( int x = 0 ; x < image . width ; x ++ , indexImg ++ ) { if ( pixelToMode . data [ indexImg ] != - 1 ) { int peakIndex = pixelToMode . data [ indexImg ] ; modeMemberCount . data [ peakIndex ] ++ ; continue ; } float meanColor = interpolate . get ( x , y ) ; findPeak ( x , y , meanColor ) ; // convert mean - shift location into pixel index int modeX = ( int ) ( this . modeX + 0.5f ) ; int modeY = ( int ) ( this . modeY + 0.5f ) ; int modePixelIndex = modeY * image . width + modeX ; // get index in the list of peaks int modeIndex = quickMode . data [ modePixelIndex ] ; // If the mode is new add it to the list if ( modeIndex < 0 ) { modeIndex = this . modeLocation . size ( ) ; this . modeLocation . grow ( ) . set ( modeX , modeY ) ; // Save the peak ' s color modeColor . grow ( ) [ 0 ] = meanGray ; // Mark the mode in the segment image quickMode . data [ modePixelIndex ] = modeIndex ; // Set the initial count to zero . This will be incremented when it is traversed later on modeMemberCount . add ( 0 ) ; } // add this pixel to the membership list modeMemberCount . data [ modeIndex ] ++ ; // Add all pixels it traversed through to the membership of this mode // This is an approximate of mean - shift for ( int i = 0 ; i < history . size ; i ++ ) { Point2D_F32 p = history . get ( i ) ; int px = ( int ) ( p . x + 0.5f ) ; int py = ( int ) ( p . y + 0.5f ) ; int index = pixelToMode . getIndex ( px , py ) ; if ( pixelToMode . data [ index ] == - 1 ) { pixelToMode . data [ index ] = modeIndex ; } } } }
public class dnssrvrec { /** * Use this API to add dnssrvrec resources . */ public static base_responses add ( nitro_service client , dnssrvrec resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { dnssrvrec addresources [ ] = new dnssrvrec [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { addresources [ i ] = new dnssrvrec ( ) ; addresources [ i ] . domain = resources [ i ] . domain ; addresources [ i ] . target = resources [ i ] . target ; addresources [ i ] . priority = resources [ i ] . priority ; addresources [ i ] . weight = resources [ i ] . weight ; addresources [ i ] . port = resources [ i ] . port ; addresources [ i ] . ttl = resources [ i ] . ttl ; } result = add_bulk_request ( client , addresources ) ; } return result ;
public class DocumentTransformer { /** * Split a string into pieces based on delimiters . Similar to the perl function of the same name . The delimiters are not * included in the returned strings . * @ param str Full string * @ param splitter Characters to split on * @ return List of String pieces from full string */ private static List < String > split ( String str , String splitter ) { } }
StringTokenizer tokens = new StringTokenizer ( str , splitter ) ; ArrayList < String > l = new ArrayList < > ( tokens . countTokens ( ) ) ; while ( tokens . hasMoreTokens ( ) ) { l . add ( tokens . nextToken ( ) ) ; } return l ;
public class authenticationauthnprofile { /** * Use this API to fetch filtered set of authenticationauthnprofile resources . * filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */ public static authenticationauthnprofile [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
authenticationauthnprofile obj = new authenticationauthnprofile ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; authenticationauthnprofile [ ] response = ( authenticationauthnprofile [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class SESNotificationManager { /** * / * ( non - Javadoc ) * @ see org . duracloud . mill . notification . NotificationManager # sendEmail ( java . lang . String , java . lang . String ) */ @ Override public void sendEmail ( String subject , String body ) { } }
if ( ArrayUtils . isEmpty ( this . recipientEmailAddresses ) ) { log . warn ( "No recipients configured - no one to notify: ignoring..." ) ; return ; } SendEmailRequest email = new SendEmailRequest ( ) ; try { Destination destination = new Destination ( ) ; destination . setToAddresses ( Arrays . asList ( this . recipientEmailAddresses ) ) ; email . setDestination ( destination ) ; email . setSource ( System . getProperty ( "notification.sender" , "no-sender-specified" ) ) ; Message message = new Message ( new Content ( subject ) , new Body ( new Content ( body ) ) ) ; email . setMessage ( message ) ; client . sendEmail ( email ) ; log . info ( "email sent: {}" , email ) ; } catch ( Exception e ) { log . error ( "failed to send " + email + ": " + e . getMessage ( ) , e ) ; }
public class DescribeReservedNodesResult { /** * The list of < code > ReservedNode < / code > objects . * @ param reservedNodes * The list of < code > ReservedNode < / code > objects . */ public void setReservedNodes ( java . util . Collection < ReservedNode > reservedNodes ) { } }
if ( reservedNodes == null ) { this . reservedNodes = null ; return ; } this . reservedNodes = new com . amazonaws . internal . SdkInternalList < ReservedNode > ( reservedNodes ) ;
public class Preconditions { /** * Checks that the given string is not blank and throws a customized * { @ link NullPointerException } if it is { @ code null } , and a customized * { @ link IllegalArgumentException } if it is empty or whitespace . Intended for doing parameter * validation in methods and constructors , e . g . : * < blockquote > < pre > * public void foo ( String text , String id ) { * checkNotBlank ( * text , * " The text for % s must not be null , empty or whitespace . " , * id ) ; * < / pre > < / blockquote > * @ param str the string to check for being blank * @ param messageFormat a { @ link Formatter format } string for the detail message to be used in * the event that an exception is thrown . * @ param messageArgs the arguments referenced by the format specifiers in the * { @ code messageFormat } * @ return { @ code str } if not { @ code null } * @ throws NullPointerException if { @ code str } is { @ code null } * @ throws IllegalArgumentException if { @ code str } is empty or whitespace */ public static String checkNotBlank ( String str , String messageFormat , Object ... messageArgs ) { } }
checkNotNull ( str , messageFormat , messageArgs ) ; checkArgument ( Strings . isNotBlank ( str ) , messageFormat , messageArgs ) ; return str ;
public class CmsCloneModuleThread { /** * Copies the explorer type icons . < p > * @ param iconPaths the path to the location where the icons are located * @ throws CmsException if something goes wrong */ private void cloneExplorerTypeIcons ( Map < String , String > iconPaths ) throws CmsException { } }
for ( Map . Entry < String , String > entry : iconPaths . entrySet ( ) ) { String source = ICON_PATH + entry . getKey ( ) ; String target = ICON_PATH + entry . getValue ( ) ; if ( getCms ( ) . existsResource ( source ) && ! getCms ( ) . existsResource ( target ) ) { getCms ( ) . copyResource ( source , target ) ; } }
public class VanityPharma { /** * Gets the vanityPharmaText value for this VanityPharma . * @ return vanityPharmaText * The text that will be displayed in display URL of the text * ad when website description * is the selected display mode for vanity pharma URLs . */ public com . google . api . ads . adwords . axis . v201809 . cm . VanityPharmaText getVanityPharmaText ( ) { } }
return vanityPharmaText ;
public class JTimePopup { /** * Create this calendar in a popup menu and synchronize the text field on change . * @ param strDateParam The name of the date property ( defaults to " date " ) . * @ param dateTarget The initial date for this button . */ public static JButton createCalendarButton ( String strDateParam , Date dateTarget ) { } }
JTimeButton button = new JTimeButton ( strDateParam , dateTarget ) ; // button . setMargin ( NO _ INSETS ) ; button . setOpaque ( false ) ; return button ;
public class TopLevelDomainsInner { /** * Gets all legal agreements that user needs to accept before purchasing a domain . * Gets all legal agreements that user needs to accept before purchasing a domain . * @ param name Name of the top - level domain . * @ param agreementOption Domain agreement options . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; TldLegalAgreementInner & gt ; object */ public Observable < Page < TldLegalAgreementInner > > listAgreementsAsync ( final String name , final TopLevelDomainAgreementOption agreementOption ) { } }
return listAgreementsWithServiceResponseAsync ( name , agreementOption ) . map ( new Func1 < ServiceResponse < Page < TldLegalAgreementInner > > , Page < TldLegalAgreementInner > > ( ) { @ Override public Page < TldLegalAgreementInner > call ( ServiceResponse < Page < TldLegalAgreementInner > > response ) { return response . body ( ) ; } } ) ;
public class RaftAgent { /** * Setup serialization and deserialization for Jackson - annotated { @ code Command } objects . * This method should < strong > only < / strong > be called once . * @ param commandSubclassKlass the base class of the Jackson - annotated { @ code Command } classes * @ param < CommandSubclass > the base - class type of the Jackson - annotated { @ code Command } classes * @ throws IllegalStateException if this method is called multiple times * @ see RaftRPC # setupJacksonAnnotatedCommandSerializationAndDeserialization ( ObjectMapper , Class ) */ public synchronized < CommandSubclass extends Command > void setupJacksonAnnotatedCommandSerializationAndDeserialization ( Class < CommandSubclass > commandSubclassKlass ) { } }
checkState ( ! running ) ; checkState ( ! initialized ) ; checkState ( ! setupConversion ) ; RaftRPC . setupJacksonAnnotatedCommandSerializationAndDeserialization ( mapper , commandSubclassKlass ) ; setupConversion = true ;
public class Indexer { /** * Index the given mutation , adding mutations to the index and metrics table * Like typical use of a BatchWriter , this method does not flush mutations to the underlying index table . * For higher throughput the modifications to the metrics table are tracked in memory and added to the metrics table when the indexer is flushed or closed . * @ param mutation Mutation to index */ public void index ( Mutation mutation ) { } }
// Increment the cardinality for the number of rows in the table metrics . get ( METRICS_TABLE_ROW_COUNT ) . incrementAndGet ( ) ; // Set the first and last row values of the table based on existing row IDs if ( firstRow == null || byteArrayComparator . compare ( mutation . getRow ( ) , firstRow ) < 0 ) { firstRow = mutation . getRow ( ) ; } if ( lastRow == null || byteArrayComparator . compare ( mutation . getRow ( ) , lastRow ) > 0 ) { lastRow = mutation . getRow ( ) ; } // For each column update in this mutation for ( ColumnUpdate columnUpdate : mutation . getUpdates ( ) ) { // Get the column qualifiers we want to index for this column family ( if any ) ByteBuffer family = wrap ( columnUpdate . getColumnFamily ( ) ) ; Collection < ByteBuffer > indexQualifiers = indexColumns . get ( family ) ; // If we have column qualifiers we want to index for this column family if ( indexQualifiers != null ) { // Check if we want to index this particular qualifier ByteBuffer qualifier = wrap ( columnUpdate . getColumnQualifier ( ) ) ; if ( indexQualifiers . contains ( qualifier ) ) { // If so , create a mutation using the following mapping : // Row ID = column value // Column Family = columnqualifier _ columnfamily // Column Qualifier = row ID // Value = empty ByteBuffer indexFamily = getIndexColumnFamily ( columnUpdate . getColumnFamily ( ) , columnUpdate . getColumnQualifier ( ) ) ; Type type = indexColumnTypes . get ( family ) . get ( qualifier ) ; ColumnVisibility visibility = new ColumnVisibility ( columnUpdate . getColumnVisibility ( ) ) ; // If this is an array type , then index each individual element in the array if ( Types . isArrayType ( type ) ) { Type elementType = Types . getElementType ( type ) ; List < ? > elements = serializer . decode ( type , columnUpdate . getValue ( ) ) ; for ( Object element : elements ) { addIndexMutation ( wrap ( serializer . encode ( elementType , element ) ) , indexFamily , visibility , mutation . getRow ( ) ) ; } } else { addIndexMutation ( wrap ( columnUpdate . getValue ( ) ) , indexFamily , visibility , mutation . getRow ( ) ) ; } } } }
public class ErrorReporter { /** * Writes the error to the specified < code > PrintWriter < / code > . */ public void write ( PrintWriter writer ) { } }
this . output = writer ; dispatch ( base , false ) ; writer . flush ( ) ;
public class ApiKeyResource1 { /** * RESTful endpoint for updating an API key . Note that all attributes of the key will be updated to match the * provided object . */ @ PUT @ Consumes ( MediaType . APPLICATION_JSON ) @ Path ( "{id}" ) public SuccessResponse updateApiKey ( @ PathParam ( "id" ) String id , EmoApiKey emoApiKey , @ Authenticated Subject subject ) { } }
checkArgument ( emoApiKey . getId ( ) . equals ( id ) , "Body contains conflicting API key identifier" ) ; return updateApiKey ( id , new UpdateEmoApiKeyRequest ( ) . setOwner ( emoApiKey . getOwner ( ) ) . setDescription ( emoApiKey . getDescription ( ) ) . setAssignedRoles ( emoApiKey . getRoles ( ) ) . setUnassignOtherRoles ( true ) , subject ) ;
public class SqlAnalyzer { /** * Extract from value string every placeholder : { } , replace it with ? and then convert every field typeName with column typeName . The result is a pair : the first value is the elaborated string . The second is the list of parameters associated to * ? . This second parameter is the list of parameters and replaced with ? . * @ param elementUtils the element utils * @ param method the method * @ param sqlStatement the sql statement */ public void execute ( Elements elementUtils , SQLiteModelMethod method , String sqlStatement ) { } }
SQLiteEntity entity = method . getEntity ( ) ; usedMethodParameters = new HashSet < String > ( ) ; paramNames = new ArrayList < String > ( ) ; paramGetters = new ArrayList < String > ( ) ; usedBeanPropertyNames = new ArrayList < String > ( ) ; paramTypeNames = new ArrayList < TypeName > ( ) ; // replace placeholder : { } with ? { Matcher matcher = PARAMETER . matcher ( sqlStatement ) ; StringBuffer buffer = new StringBuffer ( ) ; while ( matcher . find ( ) ) { matcher . appendReplacement ( buffer , "?" ) ; paramNames . add ( extractParamName ( matcher ) ) ; } matcher . appendTail ( buffer ) ; sqlStatement = buffer . toString ( ) ; } // replace property typeName to column typeName { Matcher matcher = WORD . matcher ( sqlStatement ) ; StringBuffer buffer = new StringBuffer ( ) ; while ( matcher . find ( ) ) { SQLProperty property = entity . findPropertyByName ( matcher . group ( 1 ) ) ; if ( property != null ) { matcher . appendReplacement ( buffer , property . columnName ) ; } } matcher . appendTail ( buffer ) ; sqlStatement = buffer . toString ( ) ; } TypeName rawNameType ; // analyze parametersName String effectiveName ; for ( String rawName : paramNames ) { JQLParameterName pName = JQLParameterName . parse ( rawName ) ; if ( ! pName . isNested ( ) ) { effectiveName = method . findParameterNameByAlias ( pName . getValue ( ) ) ; rawNameType = method . findParameterTypeByAliasOrName ( effectiveName ) ; if ( rawNameType == null ) { throw new MethodParameterNotFoundException ( method , effectiveName ) ; } paramGetters . add ( effectiveName ) ; paramTypeNames . add ( rawNameType ) ; usedMethodParameters . add ( effectiveName ) ; usedBeanPropertyNames . add ( null ) ; } else { if ( method . findParameterTypeByAliasOrName ( pName . getBeanName ( ) ) == null ) { throw new MethodParameterNotFoundException ( method , pName . getBeanName ( ) ) ; } if ( TypeUtility . isEquals ( method . findParameterTypeByAliasOrName ( pName . getBeanName ( ) ) , entity ) && entity . contains ( pName . getValue ( ) ) ) { // there are nested property invocation paramGetters . add ( method . findParameterNameByAlias ( pName . getBeanName ( ) ) + "." + getter ( entity . findPropertyByName ( pName . getValue ( ) ) ) ) ; usedBeanPropertyNames . add ( pName . getValue ( ) ) ; paramTypeNames . add ( TypeUtility . typeName ( entity . findPropertyByName ( pName . getValue ( ) ) . getElement ( ) . asType ( ) ) ) ; usedMethodParameters . add ( method . findParameterNameByAlias ( pName . getBeanName ( ) ) ) ; } else { throw ( new PropertyInAnnotationNotFoundException ( method , pName . getValue ( ) ) ) ; } } // } else { // throw ( new PropertyInAnnotationNotFoundException ( method , rawName ) ) ; } this . sqlStatement = sqlStatement ;
public class AbstractCasWebflowConfigurer { /** * Create flow variable flow variable . * @ param flow the flow * @ param id the id * @ param type the type * @ return the flow variable */ public FlowVariable createFlowVariable ( final Flow flow , final String id , final Class type ) { } }
val opt = Arrays . stream ( flow . getVariables ( ) ) . filter ( v -> v . getName ( ) . equalsIgnoreCase ( id ) ) . findFirst ( ) ; if ( opt . isPresent ( ) ) { return opt . get ( ) ; } val flowVar = new FlowVariable ( id , new BeanFactoryVariableValueFactory ( type , applicationContext . getAutowireCapableBeanFactory ( ) ) ) ; flow . addVariable ( flowVar ) ; return flowVar ;
public class DatabaseSpec { /** * Gets a connection spec configured with the user , password and * connection string specified . Connections created from this connection * spec will have auto commit turned off . * @ return a { @ link ConnectionSpec } . */ public ConnectionSpec toConnectionSpec ( ) { } }
try { return getDatabaseAPI ( ) . newConnectionSpecInstance ( getConnect ( ) , getUser ( ) , getPasswd ( ) , false ) ; } catch ( InvalidConnectionSpecArguments ex ) { throw new AssertionError ( ex ) ; }
public class NamespaceParser { /** * unparseTokens . * @ param prefix a { @ link java . lang . String } object . * @ param args a { @ link java . util . List } object . * @ param out a { @ link java . lang . StringBuilder } object . */ static public void unparseTokens ( final String prefix , final List < ICmdLineArg < ? > > args , final StringBuilder out ) { } }
final Iterator < ICmdLineArg < ? > > aIter = args . iterator ( ) ; while ( aIter . hasNext ( ) ) { final ICmdLineArg < ? > arg = aIter . next ( ) ; if ( arg . isParsed ( ) ) arg . exportNamespace ( prefix , out ) ; }
public class SubsetIterator { /** * Fill the given collection with the items from the next subset . The collection is < b > not < / b > * cleared so already contained items will be retained . A reference to this same collection * is returned after it has been modified . * To store the next subset in a newly allocated { @ link LinkedHashSet } the alternative method * { @ link # next ( ) } may also be used . * @ param subset collection to fill with items from next generated subset * @ return reference to given collection , after it has been filled with the items from the next subset * @ throws NoSuchElementException if there is no next subset to be generated */ public Collection < T > next ( Collection < T > subset ) { } }
// check if there is a next subset to generate if ( ! hasNext ( ) ) { throw new NoSuchElementException ( "No more subsets to be generated." ) ; } // fill collection with currently selected items ( returned at the end of the method ) for ( int i = 0 ; i < t . length - 1 ; i ++ ) { // skip last element ( = dummy ) subset . add ( items [ t [ i ] ] ) ; } // set indices of items to be selected in next subset , if any , according to kSubsetRevDoorSuccessor // algorithm by Kreher and Stinson ( p . 52 ) , modified so that // - it is detected when all subsets of the current size have been generated // - in the latter case , the generation continues with the next size , if still valid // - indices and values in t are counted from 0 to k - 1 instead of 1 to k // - special cases ( size 1 and full size ) also work // k indicates current subset size ( account for dummy element ! ) int k = t . length - 1 ; // search for first index j where t [ j ] is different from j int j = 0 ; while ( j < k && t [ j ] == j ) { j ++ ; } // if j = k - 1 and t [ j ] = | items | - 1 , or k = | items | or k = 0 , all subsets of the current size have been generated if ( j == k - 1 && t [ j ] == items . length - 1 || k == items . length || k == 0 ) { // go to next size , if still within bounds int nextSize = k + 1 ; if ( nextSize <= maxSubsetSize && nextSize <= items . length ) { // set first subset of next size ( t = { 0,1 , . . . , nextSize - 1 } ) t = new int [ nextSize + 1 ] ; for ( int i = 0 ; i < nextSize ; i ++ ) { t [ i ] = i ; } // set dummy t [ nextSize ] = items . length ; } else { // next size is no longer within bounds t = null ; } } else { // generate next subset of current size // ( according to revolving door successor algorithm ) if ( ( k - ( j + 1 ) ) % 2 != 0 ) { if ( j == 0 ) { t [ 0 ] = t [ 0 ] - 1 ; } else { t [ j - 1 ] = j ; if ( j - 2 >= 0 ) { t [ j - 2 ] = j - 1 ; } } } else { if ( t [ j + 1 ] != t [ j ] + 1 ) { if ( j - 1 >= 0 ) { t [ j - 1 ] = t [ j ] ; } t [ j ] = t [ j ] + 1 ; } else { t [ j + 1 ] = t [ j ] ; t [ j ] = j ; } } } // return current subset return subset ;
public class DockerUtils { /** * Deletes a Docker image if it exists . * @ param imageId the image ID ( not null ) * @ param dockerClient a Docker client */ public static void deleteImageIfItExists ( String imageId , DockerClient dockerClient ) { } }
if ( imageId != null ) { List < Image > images = dockerClient . listImagesCmd ( ) . exec ( ) ; if ( findImageById ( imageId , images ) != null ) dockerClient . removeImageCmd ( imageId ) . withForce ( true ) . exec ( ) ; }
public class MtasFetchData { /** * Gets the file . * @ param prefix the prefix * @ param postfix the postfix * @ return the file * @ throws MtasParserException the mtas parser exception */ public Reader getFile ( String prefix , String postfix ) throws MtasParserException { } }
String file = getString ( ) ; if ( ( file != null ) && ! file . equals ( "" ) ) { if ( prefix != null ) { file = prefix + file ; } if ( postfix != null ) { file = file + postfix ; } Path path = ( new File ( file ) ) . toPath ( ) ; if ( Files . isReadable ( path ) ) { try { return new InputStreamReader ( new GZIPInputStream ( new FileInputStream ( file ) ) , StandardCharsets . UTF_8 ) ; } catch ( IOException e1 ) { log . debug ( e1 ) ; try { String text = new String ( Files . readAllBytes ( Paths . get ( file ) ) , StandardCharsets . UTF_8 ) ; return new StringReader ( text ) ; } catch ( IOException e2 ) { log . debug ( e2 ) ; throw new MtasParserException ( e2 . getMessage ( ) ) ; } } } else { throw new MtasParserException ( "file '" + file + "' does not exists or not readable" ) ; } } else { throw new MtasParserException ( "no valid file: " + file ) ; }
public class StopThreadsCleanUp { /** * Get { @ link Runnable } of given thread , if any */ private Runnable getRunnable ( ClassLoaderLeakPreventor preventor , Thread thread ) { } }
if ( oracleTarget == null && ibmRunnable == null ) { // Not yet initialized oracleTarget = preventor . findField ( Thread . class , "target" ) ; // Sun / Oracle JRE ibmRunnable = preventor . findField ( Thread . class , "runnable" ) ; // IBM JRE } return ( oracleTarget != null ) ? ( Runnable ) preventor . getFieldValue ( oracleTarget , thread ) : // Sun / Oracle JRE ( Runnable ) preventor . getFieldValue ( ibmRunnable , thread ) ; // IBM JRE
public class PublicCardUrl { /** * Get Resource Url for Delete * @ param cardId Unique identifier of the card associated with the customer account billing contact . * @ return String Resource Url */ public static MozuUrl deleteUrl ( String cardId ) { } }
UrlFormatter formatter = new UrlFormatter ( "/payments/commerce/payments/cards/{cardId}" ) ; formatter . formatUrl ( "cardId" , cardId ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . PCI_POD ) ;
public class Configuration { /** * Return the XML DOM corresponding to this Configuration . */ private synchronized Document asXmlDocument ( ) throws IOException { } }
Document doc ; try { doc = DocumentBuilderFactory . newInstance ( ) . newDocumentBuilder ( ) . newDocument ( ) ; } catch ( ParserConfigurationException pe ) { throw new IOException ( pe ) ; } Element conf = doc . createElement ( "configuration" ) ; doc . appendChild ( conf ) ; conf . appendChild ( doc . createTextNode ( "\n" ) ) ; handleDeprecation ( ) ; // ensure properties is set and deprecation is handled for ( Enumeration < Object > e = properties . keys ( ) ; e . hasMoreElements ( ) ; ) { String name = ( String ) e . nextElement ( ) ; Object object = properties . get ( name ) ; String value = null ; if ( object instanceof String ) { value = ( String ) object ; } else { continue ; } Element propNode = doc . createElement ( "property" ) ; conf . appendChild ( propNode ) ; Element nameNode = doc . createElement ( "name" ) ; nameNode . appendChild ( doc . createTextNode ( name ) ) ; propNode . appendChild ( nameNode ) ; Element valueNode = doc . createElement ( "value" ) ; valueNode . appendChild ( doc . createTextNode ( value ) ) ; propNode . appendChild ( valueNode ) ; if ( updatingResource != null ) { String [ ] sources = updatingResource . get ( name ) ; if ( sources != null ) { for ( String s : sources ) { Element sourceNode = doc . createElement ( "source" ) ; sourceNode . appendChild ( doc . createTextNode ( s ) ) ; propNode . appendChild ( sourceNode ) ; } } } conf . appendChild ( doc . createTextNode ( "\n" ) ) ; } return doc ;
public class Repository { /** * < p > removeSpecification . < / p > * @ param specification a { @ link com . greenpepper . server . domain . Specification } object . * @ throws com . greenpepper . server . GreenPepperServerException if any . */ public void removeSpecification ( Specification specification ) throws GreenPepperServerException { } }
if ( ! specifications . contains ( specification ) ) throw new GreenPepperServerException ( GreenPepperServerErrorKey . SPECIFICATION_NOT_FOUND , "Specification not found" ) ; specifications . remove ( specification ) ; specification . setRepository ( null ) ;
public class Predicates { /** * Creates a predicate which returns true if an attribute selected from an object passed to accept method * is not contained in the iterable . */ public static < T > Predicates < T > attributeNotIn ( Function < ? super T , ? > function , Iterable < ? > iterable ) { } }
return new AttributePredicate < T , Object > ( function , Predicates . notIn ( iterable ) ) ;
public class HtmlSerializerMiddlewares { /** * Sync middleware for POJO . * @ param templateName The template name , respective to the " resources " folder * @ param < T > The Type of the parameters * @ return the middlware */ public static < T > Middleware < SyncHandler < T > , AsyncHandler < Response < ByteString > > > htmlSerializeSync ( final String templateName ) { } }
Middleware < SyncHandler < T > , AsyncHandler < T > > syncToAsync = Middleware :: syncToAsync ; return syncToAsync . and ( htmlSerialize ( templateName ) ) ;
public class CmsPropertyEditorHelper { /** * Determines if the title property should be changed in case of a ' NavText ' change . < p > * @ param properties the current resource properties * @ return < code > true < / code > if the title property should be changed in case of a ' NavText ' change */ private boolean shouldChangeTitle ( Map < String , CmsProperty > properties ) { } }
return ( properties == null ) || ( properties . get ( CmsPropertyDefinition . PROPERTY_TITLE ) == null ) || ( properties . get ( CmsPropertyDefinition . PROPERTY_TITLE ) . getValue ( ) == null ) || ( ( properties . get ( CmsPropertyDefinition . PROPERTY_NAVTEXT ) != null ) && properties . get ( CmsPropertyDefinition . PROPERTY_TITLE ) . getValue ( ) . equals ( properties . get ( CmsPropertyDefinition . PROPERTY_NAVTEXT ) . getValue ( ) ) ) ;
public class AmazonDynamoDBAsyncClient { /** * Updates the provisioned throughput for the given table . * Setting the throughput for a table helps you manage performance and is * part of the Provisioned Throughput feature of Amazon DynamoDB . * @ param updateTableRequest Container for the necessary parameters to * execute the UpdateTable operation on AmazonDynamoDB . * @ return A Java Future object containing the response from the * UpdateTable service method , as returned by AmazonDynamoDB . * @ throws AmazonClientException * If any internal errors are encountered inside the client while * attempting to make the request or handle the response . For example * if a network connection is not available . * @ throws AmazonServiceException * If an error response is returned by AmazonDynamoDB indicating * either a problem with the data in the request , or a server side issue . */ public Future < UpdateTableResult > updateTableAsync ( final UpdateTableRequest updateTableRequest ) throws AmazonServiceException , AmazonClientException { } }
return executorService . submit ( new Callable < UpdateTableResult > ( ) { public UpdateTableResult call ( ) throws Exception { return updateTable ( updateTableRequest ) ; } } ) ;
public class DatatypeConverter { /** * Print an extended attribute currency value . * @ param value currency value * @ return string representation */ public static final String printExtendedAttributeCurrency ( Number value ) { } }
return ( value == null ? null : NUMBER_FORMAT . get ( ) . format ( value . doubleValue ( ) * 100 ) ) ;
public class RequestHelper { /** * Returns the { @ link Slot } for the given slot name from the request . * This method attempts to retrieve the requested { @ link Slot } from the incoming request . If the slot does not * exist in the request , an { @ link Optional } empty is returned . * This method returns an { @ link IllegalArgumentException } if the incoming request is not an { @ link IntentRequest } . * @ param slotName name of the slot to retrieve * @ return an { @ link Optional } containing the target slot if it exists in the request , else an empty { @ link Optional } */ public Optional < Slot > getSlot ( String slotName ) { } }
Map < String , Slot > slots = castRequestType ( handlerInput , IntentRequest . class ) . getIntent ( ) . getSlots ( ) ; if ( slots != null ) { return Optional . ofNullable ( slots . get ( slotName ) ) ; } return Optional . empty ( ) ;
public class filterhtmlinjectionvariable { /** * Use this API to update filterhtmlinjectionvariable resources . */ public static base_responses update ( nitro_service client , filterhtmlinjectionvariable resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { filterhtmlinjectionvariable updateresources [ ] = new filterhtmlinjectionvariable [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { updateresources [ i ] = new filterhtmlinjectionvariable ( ) ; updateresources [ i ] . variable = resources [ i ] . variable ; updateresources [ i ] . value = resources [ i ] . value ; } result = update_bulk_request ( client , updateresources ) ; } return result ;
public class HiveRegistrationPolicyBase { /** * Enrich the table - level properties with properties carried over from ingestion runtime . * Extend this class to add more runtime properties if required . */ protected State getRuntimePropsEnrichedTblProps ( ) { } }
State tableProps = new State ( this . props . getTablePartitionProps ( ) ) ; if ( this . props . getRuntimeTableProps ( ) . isPresent ( ) ) { tableProps . setProp ( HiveMetaStoreUtils . RUNTIME_PROPS , this . props . getRuntimeTableProps ( ) . get ( ) ) ; } return tableProps ;
public class RenderingErrorMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RenderingError renderingError , ProtocolMarshaller protocolMarshaller ) { } }
if ( renderingError == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( renderingError . getCode ( ) , CODE_BINDING ) ; protocolMarshaller . marshall ( renderingError . getMessage ( ) , MESSAGE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SimplePBKDF2 { /** * Verification function . * @ param formatted * & quot ; salt : iteration - count : derived - key & quot ; ( depends on * effective formatter ) . This value should come from server - side * storage . * @ param candidatePassword * The password that is checked against the formatted reference * data . This value will usually be supplied by the * & quot ; user & quot ; or & quot ; client & quot ; . * @ return < code > true < / code > verification OK . < code > false < / code > * verification failed or formatter unable to decode input value as * PBKDF2 parameters . */ public boolean verifyKeyFormatted ( String formatted , String candidatePassword ) { } }
// Parameter as member of Engine was not the smartest design decision back then . . . PBKDF2Parameters p = getParameters ( ) ; PBKDF2Parameters q = new PBKDF2Parameters ( ) ; q . hashAlgorithm = p . hashAlgorithm ; q . hashCharset = p . hashCharset ; boolean verifyOK = false ; if ( ! getFormatter ( ) . fromString ( q , formatted ) ) { try { setParameters ( q ) ; verifyOK = verifyKey ( candidatePassword ) ; } finally { setParameters ( p ) ; } } return verifyOK ;
public class CommandMessage { /** * { @ inheritDoc } */ @ Override protected void addParameters ( StringBuilder result ) { } }
super . addParameters ( result ) ; result . append ( ",messageRefType=" ) ; result . append ( messageRefType ) ; result . append ( ",operation=" ) ; result . append ( operation ) ;
public class TypeQualifierApplications { /** * Look for a default type qualifier annotation . * @ param o * an AnnotatedObject * @ param typeQualifierValue * a TypeQualifierValue * @ param elementType * type of element for which we ' re looking for a default * annotation * @ return default TypeQualifierAnnotation , or null if none */ private static @ CheckForNull TypeQualifierAnnotation getDefaultAnnotation ( AnnotatedObject o , TypeQualifierValue < ? > typeQualifierValue , ElementType elementType ) { } }
// Try to find a default annotation using the standard JSR - 305 // default annotation mechanism . TypeQualifierAnnotation result ; Collection < AnnotationValue > values = TypeQualifierResolver . resolveTypeQualifierDefaults ( o . getAnnotations ( ) , elementType ) ; TypeQualifierAnnotation tqa = extractAnnotation ( values , typeQualifierValue ) ; if ( tqa != null ) { // System . out . println ( " Found default annotation of " + tqa + // " for element " + elementType + " in " + o ) ; return tqa ; } // Try one of the FindBugs - specific default annotation mechanisms . if ( ( result = checkFindBugsDefaultAnnotation ( FindBugsDefaultAnnotations . DEFAULT_ANNOTATION , o , typeQualifierValue ) ) != null ) { return result ; } switch ( elementType ) { case FIELD : result = checkFindBugsDefaultAnnotation ( FindBugsDefaultAnnotations . DEFAULT_ANNOTATION_FOR_FIELDS , o , typeQualifierValue ) ; break ; case METHOD : result = checkFindBugsDefaultAnnotation ( FindBugsDefaultAnnotations . DEFAULT_ANNOTATION_FOR_METHODS , o , typeQualifierValue ) ; break ; case PARAMETER : result = checkFindBugsDefaultAnnotation ( FindBugsDefaultAnnotations . DEFAULT_ANNOTATION_FOR_PARAMETERS , o , typeQualifierValue ) ; break ; default : // ignore break ; } // Try out default JDT ( Eclipse ) annotations if ( result == null ) { AnnotationValue annotationValue = o . getAnnotation ( TypeQualifierResolver . eclipseNonNullByDefault ) ; if ( annotationValue != null ) { Collection < AnnotationValue > resolvedTypeQualifiers = TypeQualifierResolver . resolveTypeQualifiers ( annotationValue ) ; tqa = extractAnnotation ( resolvedTypeQualifiers , typeQualifierValue ) ; if ( tqa != null ) { return tqa ; } } } return result ;
public class MailSender { /** * Send . * @ param mailInfo the mail info */ public void send ( final MailInfo mailInfo ) { } }
try { final MultiPartEmail email = new MultiPartEmail ( ) ; email . setCharset ( "utf-8" ) ; mailInfo . fillEmail ( email ) ; email . send ( ) ; } catch ( Exception e ) { JK . throww ( e ) ; }
public class IfcDocumentInformationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) public EList < IfcActorSelect > getEditors ( ) { } }
return ( EList < IfcActorSelect > ) eGet ( Ifc2x3tc1Package . Literals . IFC_DOCUMENT_INFORMATION__EDITORS , true ) ;
public class FrameSplitter { /** * The task computes ESPC per split */ static long [ /* nsplits */ ] [ /* nchunks */ ] computeEspcPerSplit ( long [ ] espc , long len , double [ ] ratios ) { } }
assert espc . length > 0 && espc [ 0 ] == 0 ; assert espc [ espc . length - 1 ] == len ; long [ ] partSizes = partitione ( len , ratios ) ; // Split of whole vector int nparts = ratios . length + 1 ; long [ ] [ ] r = new long [ nparts ] [ espc . length ] ; // espc for each partition long nrows = 0 ; long start = 0 ; for ( int p = 0 , c = 0 ; p < nparts ; p ++ ) { int nc = 0 ; // number of chunks for this partition for ( ; c < espc . length - 1 && ( espc [ c + 1 ] - start ) <= partSizes [ p ] ; c ++ ) r [ p ] [ ++ nc ] = espc [ c + 1 ] - start ; if ( r [ p ] [ nc ] < partSizes [ p ] ) r [ p ] [ ++ nc ] = partSizes [ p ] ; // last item in espc contains number of rows r [ p ] = Arrays . copyOf ( r [ p ] , nc + 1 ) ; // Transfer rest of lines to the next part nrows = nrows - partSizes [ p ] ; start += partSizes [ p ] ; } return r ;
public class Widget { /** * Remove the layout { @ link Layout } from the chain * @ param layout { @ link Layout } * @ return true if layout has been removed successfully , false - otherwise */ public boolean removeLayout ( final Layout layout ) { } }
boolean removed = mLayouts . remove ( layout ) ; if ( layout != null && removed ) { layout . onLayoutApplied ( null , new Vector3Axis ( ) ) ; } return removed ;
public class InterceptorMetaDataFactory { /** * d472972 - rewrote entire method to pass CTS . */ private EJBInterceptorBinding findInterceptorBindingForMethod ( final Method method ) { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "findInterceptorBindingForMethod: " + method . toString ( ) ) ; } // Use style 4 binding if there is one for the method . String methodSignature = MethodAttribUtils . methodSignature ( method ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "lookup style 4 for method signature: " + methodSignature ) ; } EJBInterceptorBinding binding = ivStyle4InterceptorBindingMap . get ( methodSignature ) ; // If no Style 4 binding , then see if there is a Style 3 binding . if ( binding == null ) { String methodName = method . getName ( ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "lookup style 3 for method: " + methodName ) ; } binding = ivStyle3InterceptorBindingMap . get ( methodName ) ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) { if ( binding != null ) { Tr . exit ( tc , "findInterceptorBindingForMethod found: " ) ; binding . dump ( ) ; } else { Tr . exit ( tc , "findInterceptorBindingForMethod, interceptor-binding not found" ) ; } } return binding ;
public class Validate { /** * Checks if the given String is a positive double value . < br > * This method tries to parse a double value and then checks if it is bigger than 0. * @ param value The String value to validate . * @ return The parsed double value * @ throws ParameterException if the given String value cannot be parsed as double or its value is smaller or equal to 0. */ public static Double positiveDouble ( String value ) { } }
Double doubleValue = Validate . isDouble ( value ) ; positive ( doubleValue ) ; return doubleValue ;
public class BaseAdditiveAttributeMerger { /** * Do a deep clone of an attribute Map to ensure it is completley mutable . * @ param attributes Attribute map * @ return Mutable attribute map */ protected Map < String , List < Object > > buildMutableAttributeMap ( final Map < String , List < Object > > attributes ) { } }
final Map < String , List < Object > > mutableValuesBuilder = this . createMutableAttributeMap ( attributes . size ( ) ) ; for ( final Map . Entry < String , List < Object > > attrEntry : attributes . entrySet ( ) ) { final String key = attrEntry . getKey ( ) ; List < Object > value = attrEntry . getValue ( ) ; if ( value != null ) { value = new ArrayList < > ( value ) ; } mutableValuesBuilder . put ( key , value ) ; } return mutableValuesBuilder ;
public class Util { /** * Send this document as text to this output stream . * @ param doc */ public static void convertDOMToXML ( Node doc , Writer out ) { } }
try { // Use a Transformer for output TransformerFactory tFactory = TransformerFactory . newInstance ( ) ; Transformer transformer = tFactory . newTransformer ( ) ; DOMSource source = new DOMSource ( doc ) ; StreamResult result = new StreamResult ( out ) ; transformer . transform ( source , result ) ; } catch ( TransformerConfigurationException tce ) { // Error generated by the parser tce . printStackTrace ( ) ; } catch ( TransformerException te ) { // Error generated by the parser te . printStackTrace ( ) ; }
public class MatchAllWeight { /** * { @ inheritDoc } */ @ Override public Explanation explain ( IndexReader reader , int doc ) throws IOException { } }
return new Explanation ( Similarity . getDefault ( ) . idf ( reader . maxDoc ( ) , reader . maxDoc ( ) ) , "matchAll" ) ;
public class AdWordsServicesWithRateLimiter { /** * Gets a rate - limit - aware client for the service represented by the interface with a reference to * the session . * < p > The objects returned by this method are not thread - safe . * @ param < T > the service type * @ param session your current session * @ param interfaceClass the service interface class . This is a class representing a SOAP service * @ return the rate - limit - aware client for the service */ @ Override public < T > T get ( AdWordsSession session , Class < T > interfaceClass ) { } }
T originalInterfaceObject = adWordsServices . get ( session , interfaceClass ) ; return getProxyObject ( originalInterfaceObject , session , interfaceClass , false ) ;
public class LonePairGenerator { /** * { @ inheritDoc } */ @ Override public IRenderingElement generate ( IAtomContainer container , RendererModel model ) { } }
ElementGroup group = new ElementGroup ( ) ; // TODO : put into RendererModel final double SCREEN_RADIUS = 1.0 ; // separation between centers final double SCREEN_SEPARATION = 2.5 ; final Color RADICAL_COLOR = Color . BLACK ; // XXX : is this the best option ? final double ATOM_RADIUS = ( ( AtomRadius ) model . getParameter ( AtomRadius . class ) ) . getValue ( ) ; double scale = model . getParameter ( Scale . class ) . getValue ( ) ; double modelAtomRadius = ATOM_RADIUS / scale ; double modelPointRadius = SCREEN_RADIUS / scale ; double modelSeparation = SCREEN_SEPARATION / scale ; for ( ILonePair lonePair : container . lonePairs ( ) ) { IAtom atom = lonePair . getAtom ( ) ; Point2d point = atom . getPoint2d ( ) ; int align = GeometryUtil . getBestAlignmentForLabelXY ( container , atom ) ; double xRadius = point . x ; double yRadius = point . y ; double diffx = 0 ; double diffy = 0 ; if ( align == 1 ) { xRadius += modelAtomRadius ; diffy += modelSeparation ; } else if ( align == - 1 ) { xRadius -= modelAtomRadius ; diffy += modelSeparation ; } else if ( align == 2 ) { yRadius -= modelAtomRadius ; diffx += modelSeparation ; } else if ( align == - 2 ) { yRadius += modelAtomRadius ; diffx += modelSeparation ; } group . add ( new OvalElement ( xRadius + diffx , yRadius + diffy , modelPointRadius , true , RADICAL_COLOR ) ) ; group . add ( new OvalElement ( xRadius - diffx , yRadius - diffy , modelPointRadius , true , RADICAL_COLOR ) ) ; } return group ;
public class PowerMock { /** * A utility method that may be used to nicely mock several methods in an * easy way ( by just passing in the method names of the method you wish to * mock ) . Note that you cannot uniquely specify a method to mock using this * method if there are several methods with the same name in * { @ code type } . This method will mock ALL methods that match the * supplied name regardless of parameter types and signature . If this is the * case you should fall - back on using the * { @ link # createMock ( Class , Method . . . ) } method instead . * With this method you can specify where the class hierarchy the methods * are located . This is useful in , for example , situations where class A * extends B and both have a method called " mockMe " ( A overrides B ' s mockMe * method ) and you like to specify the only the " mockMe " method in B should * be mocked . " mockMe " in A should be left intact . In this case you should * do : * < pre > * A tested = createPartialMockNice ( A . class , B . class , & quot ; mockMe & quot ; ) ; * < / pre > * @ param < T > The type of the mock . * @ param type The type that ' ll be used to create a mock instance . * @ param where Where in the class hierarchy the methods resides . * @ param methodNames The names of the methods that should be mocked . If * { @ code null } , then this method will have the same effect * as just calling { @ link # createMock ( Class , Method . . . ) } with the * second parameter as { @ code new Method [ 0 ] } ( i . e . all * methods in that class will be mocked ) . * @ return A mock object of type < T > . */ public static synchronized < T > T createNicePartialMock ( Class < T > type , Class < ? super T > where , String ... methodNames ) { } }
return createNiceMock ( type , Whitebox . getMethods ( where , methodNames ) ) ;
public class ApiOvhEmailexchange { /** * Alter this object properties * REST : PUT / email / exchange / { organizationName } / service / { exchangeService } / sharedAccount / { sharedEmailAddress } * @ param body [ required ] New object properties * @ param organizationName [ required ] The internal name of your exchange organization * @ param exchangeService [ required ] The internal name of your exchange service * @ param sharedEmailAddress [ required ] Default email for this shared mailbox */ public void organizationName_service_exchangeService_sharedAccount_sharedEmailAddress_PUT ( String organizationName , String exchangeService , String sharedEmailAddress , OvhSharedAccount body ) throws IOException { } }
String qPath = "/email/exchange/{organizationName}/service/{exchangeService}/sharedAccount/{sharedEmailAddress}" ; StringBuilder sb = path ( qPath , organizationName , exchangeService , sharedEmailAddress ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class CmsResourceUtil { /** * Returns the big icon resource for the given resource . < p > * @ param explorerType the resource explorer type settings * @ param resourceName the resource name * @ return the icon resource */ public static Resource getBigIconResource ( CmsExplorerTypeSettings explorerType , String resourceName ) { } }
if ( explorerType == null ) { explorerType = OpenCms . getWorkplaceManager ( ) . getExplorerTypeSetting ( ( resourceName == null ) && ! CmsResource . isFolder ( resourceName ) ? CmsResourceTypeUnknownFile . RESOURCE_TYPE_NAME : CmsResourceTypeUnknownFolder . RESOURCE_TYPE_NAME ) ; } if ( ! explorerType . getIconRules ( ) . isEmpty ( ) && ( resourceName != null ) ) { String extension = CmsResource . getExtension ( resourceName ) ; if ( extension != null ) { CmsIconRule rule = explorerType . getIconRules ( ) . get ( extension ) ; if ( ( rule != null ) && ( rule . getBigIconStyle ( ) != null ) ) { return new CmsCssIcon ( rule . getBigIconStyle ( ) ) ; } } } if ( explorerType . getBigIconStyle ( ) != null ) { return new CmsCssIcon ( explorerType . getBigIconStyle ( ) ) ; } else if ( explorerType . getBigIcon ( ) != null ) { return new ExternalResource ( CmsWorkplace . getResourceUri ( CmsWorkplace . RES_PATH_FILETYPES + explorerType . getBigIcon ( ) ) ) ; } else { return new CmsCssIcon ( CmsExplorerTypeSettings . ICON_STYLE_DEFAULT_BIG ) ; }
public class AbstractAppender { /** * Builds an empty AppendEntries request . * Empty append requests are used as heartbeats to followers . */ @ SuppressWarnings ( "unchecked" ) protected AppendRequest buildAppendEmptyRequest ( MemberState member ) { } }
Entry prevEntry = getPrevEntry ( member ) ; ServerMember leader = context . getLeader ( ) ; return AppendRequest . builder ( ) . withTerm ( context . getTerm ( ) ) . withLeader ( leader != null ? leader . id ( ) : 0 ) . withLogIndex ( prevEntry != null ? prevEntry . getIndex ( ) : 0 ) . withLogTerm ( prevEntry != null ? prevEntry . getTerm ( ) : 0 ) . withEntries ( Collections . EMPTY_LIST ) . withCommitIndex ( context . getCommitIndex ( ) ) . withGlobalIndex ( context . getGlobalIndex ( ) ) . build ( ) ;
public class Menus { /** * Set up the key areas . */ public void setupKeys ( ) { } }
KeyAreaInfo keyArea = null ; keyArea = new KeyAreaInfo ( this , Constants . UNIQUE , ID_KEY ) ; keyArea . addKeyField ( ID , Constants . ASCENDING ) ; keyArea = new KeyAreaInfo ( this , Constants . NOT_UNIQUE , PARENT_FOLDER_ID_KEY ) ; keyArea . addKeyField ( PARENT_FOLDER_ID , Constants . ASCENDING ) ; keyArea . addKeyField ( SEQUENCE , Constants . ASCENDING ) ; keyArea . addKeyField ( TYPE , Constants . ASCENDING ) ; keyArea . addKeyField ( NAME , Constants . ASCENDING ) ; keyArea = new KeyAreaInfo ( this , Constants . SECONDARY_KEY , CODE_KEY ) ; keyArea . addKeyField ( CODE , Constants . ASCENDING ) ; keyArea = new KeyAreaInfo ( this , Constants . NOT_UNIQUE , TYPE_KEY ) ; keyArea . addKeyField ( TYPE , Constants . ASCENDING ) ; keyArea . addKeyField ( PROGRAM , Constants . ASCENDING ) ;
public class TangoEventsAdapter { public void addTangoChangeListener ( ITangoChangeListener listener , String attrName , String [ ] filters , boolean stateless ) throws DevFailed { } }
TangoChange tangoChange ; String key = deviceName + "/" + attrName ; if ( ( tangoChange = tango_change_source . get ( key ) ) == null ) { tangoChange = new TangoChange ( deviceProxy , attrName , filters ) ; tango_change_source . put ( key , tangoChange ) ; } synchronized ( moni ) { tangoChange . addTangoChangeListener ( listener , stateless ) ; }
public class Annotations { /** * Attach @ ConnectionDefinition * @ param mcf The managed connection factory * @ param cd The connection definition * @ param classLoader The class loader * @ param configProperties The config properties * @ param plainConfigProperties The plain config properties * @ return The updated metadata * @ exception Exception Thrown if an error occurs */ private ConnectionDefinition attachConnectionDefinition ( String mcf , javax . resource . spi . ConnectionDefinition cd , ClassLoader classLoader , ArrayList < ? extends ConfigProperty > configProperties , ArrayList < ? extends ConfigProperty > plainConfigProperties ) throws Exception { } }
if ( trace ) log . trace ( "Processing: " + cd ) ; ArrayList < ConfigProperty > validProperties = new ArrayList < ConfigProperty > ( ) ; if ( configProperties != null ) { for ( ConfigProperty configProperty : configProperties ) { if ( mcf . equals ( ( ( ConfigPropertyImpl ) configProperty ) . getAttachedClassName ( ) ) ) { if ( trace ) log . tracef ( "Attaching: %s (%s)" , configProperty , mcf ) ; validProperties . add ( configProperty ) ; } } } if ( plainConfigProperties != null ) { Set < String > mcfClasses = getClasses ( mcf , classLoader ) ; for ( ConfigProperty configProperty : plainConfigProperties ) { if ( mcfClasses . contains ( ( ( ConfigPropertyImpl ) configProperty ) . getAttachedClassName ( ) ) ) { if ( trace ) log . tracef ( "Attaching: %s (%s)" , configProperty , mcf ) ; validProperties . add ( configProperty ) ; } } } validProperties . trimToSize ( ) ; XsdString connectionfactoryInterface = new XsdString ( cd . connectionFactory ( ) . getName ( ) , null ) ; XsdString managedconnectionfactoryClass = new XsdString ( mcf , null ) ; XsdString connectionImplClass = new XsdString ( cd . connectionImpl ( ) . getName ( ) , null ) ; XsdString connectionfactoryImplClass = new XsdString ( cd . connectionFactoryImpl ( ) . getName ( ) , null ) ; XsdString connectionInterface = new XsdString ( cd . connection ( ) . getName ( ) , null ) ; return new ConnectionDefinitionImpl ( managedconnectionfactoryClass , validProperties , connectionfactoryInterface , connectionfactoryImplClass , connectionInterface , connectionImplClass , null ) ;
public class AsciiArtRenderer { /** * < b > Example : < / b > * { @ code > } */ @ Override public void renderNothing ( PositionedText target , double x , double y , boolean forward ) { } }
target . add ( x , y , ">" ) ;
public class TextReport { /** * Test events subscriptions . */ @ Subscribe public void onStart ( AggregatedStartEvent e ) throws IOException { } }
this . totalSuites = e . getSuiteCount ( ) ; logShort ( "Executing " + totalSuites + Pluralize . pluralize ( totalSuites , " suite" ) + " with " + e . getSlaveCount ( ) + Pluralize . pluralize ( e . getSlaveCount ( ) , " JVM" ) + ".\n" , false ) ; forkedJvmCount = e . getSlaveCount ( ) ; jvmIdFormat = " J%-" + ( 1 + ( int ) Math . floor ( Math . log10 ( forkedJvmCount ) ) ) + "d" ; outWriter = new PrefixedWriter ( stdoutIndent , output , DEFAULT_MAX_LINE_WIDTH ) ; errWriter = new PrefixedWriter ( stderrIndent , output , DEFAULT_MAX_LINE_WIDTH ) ;
public class Types { /** * Resolve a sub type of the given super type . * @ param superType * the super type * @ param subType * the sub type to resolve * @ return * resolved type * @ see TypeResolver # resolveSubtype ( ResolvedType , Class ) */ @ NonNull public static ResolvedType resolveSubtype ( @ NonNull ResolvedType superType , @ NonNull Class < ? > subType ) { } }
return typeResolver . resolveSubtype ( superType , subType ) ;
public class LuceneQueryHits { /** * { @ inheritDoc } */ public void close ( ) throws IOException { } }
if ( scorer != null ) { // make sure scorer frees resources scorer . advance ( Integer . MAX_VALUE ) ; } if ( releaseReaderOnClose && reader != null && reader instanceof ReleaseableIndexReader ) { ( ( ReleaseableIndexReader ) reader ) . release ( ) ; }
public class DomainModelControllerService { /** * bit of stuff */ @ Override protected void boot ( final BootContext context ) throws ConfigurationPersistenceException { } }
final ServiceTarget serviceTarget = context . getServiceTarget ( ) ; boolean ok = false ; boolean reachedServers = false ; try { // Install server inventory callback ServerInventoryCallbackService . install ( serviceTarget ) ; // handler for domain server auth . DomainManagedServerCallbackHandler . install ( serviceTarget ) ; // Parse the host . xml and invoke all the ops . The ops should rollback on any Stage . RUNTIME failure List < ModelNode > hostBootOps = hostControllerConfigurationPersister . load ( ) ; if ( hostBootOps . isEmpty ( ) ) { // booting with empty config ok = bootEmptyConfig ( context ) ; return ; } // We run the first op ( " / host = foo : add ( ) " ) separately to let it set up the host ManagementResourceRegistration ModelNode addHostOp = hostBootOps . remove ( 0 ) ; HostControllerLogger . ROOT_LOGGER . debug ( "Invoking the initial host=foo:add() op" ) ; // Disable model validation here since it will will fail ok = boot ( Collections . singletonList ( addHostOp ) , true , true ) ; // Add the controller initialization operation hostBootOps . add ( registerModelControllerServiceInitializationBootStep ( context ) ) ; // Pass in a custom mutable root resource registration provider for the remaining host model ops boot // This will be used to make sure that any extensions added in parallel get registered in the host model if ( ok ) { HostControllerLogger . ROOT_LOGGER . debug ( "Invoking remaining host.xml ops" ) ; ok = boot ( hostBootOps , true , true , new MutableRootResourceRegistrationProvider ( ) { public ManagementResourceRegistration getRootResourceRegistrationForUpdate ( OperationContext context ) { return hostModelRegistration ; } } ) ; } final RunningMode currentRunningMode = runningModeControl . getRunningMode ( ) ; if ( ok ) { // Now we know our management interface configuration . Install the server inventory Future < ServerInventory > inventoryFuture = installServerInventory ( serviceTarget ) ; // Now we know our discovery configuration . List < DiscoveryOption > discoveryOptions = hostControllerInfo . getRemoteDomainControllerDiscoveryOptions ( ) ; if ( hostControllerInfo . isMasterDomainController ( ) && ( discoveryOptions != null ) ) { // Install the discovery service installDiscoveryService ( serviceTarget , discoveryOptions ) ; } boolean useLocalDomainXml = hostControllerInfo . isMasterDomainController ( ) ; boolean isCachedDc = environment . isUseCachedDc ( ) ; if ( ! useLocalDomainXml ) { // Block for the ServerInventory establishServerInventory ( inventoryFuture ) ; boolean discoveryConfigured = ( discoveryOptions != null ) && ! discoveryOptions . isEmpty ( ) ; if ( currentRunningMode != RunningMode . ADMIN_ONLY ) { if ( discoveryConfigured ) { // Try and connect . // If can ' t connect & & ! environment . isUseCachedDc ( ) , abort // Otherwise if can ' t connect , use local domain . xml and start trying to reconnect later DomainConnectResult connectResult = connectToDomainMaster ( serviceTarget , currentRunningMode , isCachedDc , false ) ; if ( connectResult == DomainConnectResult . ABORT ) { ok = false ; } else if ( connectResult == DomainConnectResult . FAILED ) { useLocalDomainXml = true ; } } else { // Invalid configuration ; no way to get the domain config ROOT_LOGGER . noDomainControllerConfigurationProvided ( currentRunningMode , CommandLineConstants . ADMIN_ONLY , RunningMode . ADMIN_ONLY ) ; SystemExiter . abort ( ExitCodes . HOST_CONTROLLER_ABORT_EXIT_CODE ) ; } } else { // We ' re in admin - only mode . See how we handle access control config // if cached - dc is specified , we try and use the last configuration we have before failing . if ( isCachedDc ) { useLocalDomainXml = true ; } switch ( hostControllerInfo . getAdminOnlyDomainConfigPolicy ( ) ) { case ALLOW_NO_CONFIG : // our current setup is good , if we ' re using - - cached - dc , we ' ll try and load the config below // if not , we ' ll start empty . break ; case FETCH_FROM_MASTER : if ( discoveryConfigured ) { // Try and connect . // If can ' t connect & & ! environment . isUseCachedDc ( ) , abort // Otherwise if can ' t connect , use local domain . xml but DON ' T start trying to reconnect later DomainConnectResult connectResult = connectToDomainMaster ( serviceTarget , currentRunningMode , isCachedDc , true ) ; ok = connectResult != DomainConnectResult . ABORT ; } else { // try and use a local cached version below before failing if ( isCachedDc ) { break ; } // otherwise , this is an invalid configuration ; no way to get the domain config ROOT_LOGGER . noDomainControllerConfigurationProvidedForAdminOnly ( ModelDescriptionConstants . ADMIN_ONLY_POLICY , AdminOnlyDomainConfigPolicy . REQUIRE_LOCAL_CONFIG , CommandLineConstants . CACHED_DC , RunningMode . ADMIN_ONLY ) ; SystemExiter . abort ( ExitCodes . HOST_CONTROLLER_ABORT_EXIT_CODE ) ; break ; } break ; case REQUIRE_LOCAL_CONFIG : // if we have a cached copy , and - - cached - dc we can try to use that below if ( isCachedDc ) { break ; } // otherwise , this is an invalid configuration ; no way to get the domain config ROOT_LOGGER . noAccessControlConfigurationAvailable ( currentRunningMode , ModelDescriptionConstants . ADMIN_ONLY_POLICY , AdminOnlyDomainConfigPolicy . REQUIRE_LOCAL_CONFIG , CommandLineConstants . CACHED_DC , currentRunningMode ) ; SystemExiter . abort ( ExitCodes . HOST_CONTROLLER_ABORT_EXIT_CODE ) ; break ; default : throw new IllegalStateException ( hostControllerInfo . getAdminOnlyDomainConfigPolicy ( ) . toString ( ) ) ; } } } if ( useLocalDomainXml ) { if ( ! hostControllerInfo . isMasterDomainController ( ) && isCachedDc ) { ROOT_LOGGER . usingCachedDC ( CommandLineConstants . CACHED_DC , ConfigurationPersisterFactory . CACHED_DOMAIN_XML ) ; } // parse the domain . xml and load the steps // TODO look at having LocalDomainControllerAdd do this , using Stage . IMMEDIATE for the steps ConfigurationPersister domainPersister = hostControllerConfigurationPersister . getDomainPersister ( ) ; // if we ' re using - - cached - dc , we have to have had a persisted copy of the domain config for this to work // otherwise we fail and can ' t continue . List < ModelNode > domainBootOps = domainPersister . load ( ) ; HostControllerLogger . ROOT_LOGGER . debug ( "Invoking domain.xml ops" ) ; // https : / / issues . jboss . org / browse / WFCORE - 3897 domainConfigAvailable . set ( true ) ; ok = boot ( domainBootOps , false ) ; domainConfigAvailable . set ( ok ) ; if ( ! ok && runningModeControl . getRunningMode ( ) . equals ( RunningMode . ADMIN_ONLY ) ) { ROOT_LOGGER . reportAdminOnlyDomainXmlFailure ( ) ; ok = true ; } if ( ok && processType != ProcessType . EMBEDDED_HOST_CONTROLLER ) { InternalExecutor executor = new InternalExecutor ( ) ; ManagementRemotingServices . installManagementChannelServices ( serviceTarget , ManagementRemotingServices . MANAGEMENT_ENDPOINT , new MasterDomainControllerOperationHandlerService ( this , executor , executor , environment . getDomainTempDir ( ) , this , domainHostExcludeRegistry ) , DomainModelControllerService . SERVICE_NAME , ManagementRemotingServices . DOMAIN_CHANNEL , HC_EXECUTOR_SERVICE_NAME , HC_SCHEDULED_EXECUTOR_SERVICE_NAME ) ; // Block for the ServerInventory establishServerInventory ( inventoryFuture ) ; } // register local host controller final String hostName = hostControllerInfo . getLocalHostName ( ) ; slaveHostRegistrations . registerHost ( hostName , null , "local" ) ; } } if ( ok && hostControllerInfo . getAdminOnlyDomainConfigPolicy ( ) != AdminOnlyDomainConfigPolicy . ALLOW_NO_CONFIG ) { final ModelNode validate = new ModelNode ( ) ; validate . get ( OP ) . set ( "validate" ) ; validate . get ( OP_ADDR ) . setEmptyList ( ) ; final ModelNode result = internalExecute ( OperationBuilder . create ( validate ) . build ( ) , OperationMessageHandler . DISCARD , OperationTransactionControl . COMMIT , new OperationStepHandler ( ) { @ Override public void execute ( OperationContext context , ModelNode operation ) throws OperationFailedException { DomainModelIncludesValidator . validateAtBoot ( context , operation ) ; } } ) . getResponseNode ( ) ; if ( ! SUCCESS . equals ( result . get ( OUTCOME ) . asString ( ) ) ) { throw HostControllerLogger . ROOT_LOGGER . bootConfigValidationFailed ( result . get ( FAILURE_DESCRIPTION ) ) ; } } if ( ok && processType != ProcessType . EMBEDDED_HOST_CONTROLLER ) { // Install the server > host operation handler ServerToHostOperationHandlerFactoryService . install ( serviceTarget , ServerInventoryService . SERVICE_NAME , getExecutorServiceInjector ( ) . getValue ( ) , new InternalExecutor ( ) , this , expressionResolver , environment . getDomainTempDir ( ) ) ; // demand native mgmt services final ServiceBuilder nativeSB = serviceTarget . addService ( ServiceName . JBOSS . append ( "native-mgmt-startup" ) , Service . NULL ) ; nativeSB . requires ( ManagementRemotingServices . channelServiceName ( ManagementRemotingServices . MANAGEMENT_ENDPOINT , ManagementRemotingServices . SERVER_CHANNEL ) ) ; nativeSB . install ( ) ; // demand http mgmt services if ( capabilityRegistry . hasCapability ( UndertowHttpManagementService . EXTENSIBLE_HTTP_MANAGEMENT_CAPABILITY . getName ( ) , CapabilityScope . GLOBAL ) ) { final ServiceBuilder httpSB = serviceTarget . addService ( ServiceName . JBOSS . append ( "http-mgmt-startup" ) , Service . NULL ) ; httpSB . requires ( UndertowHttpManagementService . SERVICE_NAME ) ; httpSB . install ( ) ; } reachedServers = true ; if ( currentRunningMode == RunningMode . NORMAL ) { startServers ( ) ; } } } catch ( Exception e ) { ROOT_LOGGER . caughtExceptionDuringBoot ( e ) ; if ( ! reachedServers ) { ok = false ; } } finally { if ( ok ) { try { finishBoot ( ) ; } finally { // Trigger the started message Notification notification = new Notification ( ModelDescriptionConstants . BOOT_COMPLETE_NOTIFICATION , PathAddress . pathAddress ( PathElement . pathElement ( CORE_SERVICE , MANAGEMENT ) , PathElement . pathElement ( SERVICE , MANAGEMENT_OPERATIONS ) ) , ControllerLogger . MGMT_OP_LOGGER . bootComplete ( ) ) ; getNotificationSupport ( ) . emit ( notification ) ; bootstrapListener . printBootStatistics ( ) ; } } else { // Die ! String failed = ROOT_LOGGER . unsuccessfulBoot ( ) ; ROOT_LOGGER . fatal ( failed ) ; bootstrapListener . bootFailure ( failed ) ; // don ' t exit if we ' re embedded if ( processType != ProcessType . EMBEDDED_HOST_CONTROLLER ) { SystemExiter . abort ( ExitCodes . HOST_CONTROLLER_ABORT_EXIT_CODE ) ; } } }
public class CommerceTierPriceEntryPersistenceImpl { /** * Returns the commerce tier price entry with the primary key or throws a { @ link com . liferay . portal . kernel . exception . NoSuchModelException } if it could not be found . * @ param primaryKey the primary key of the commerce tier price entry * @ return the commerce tier price entry * @ throws NoSuchTierPriceEntryException if a commerce tier price entry with the primary key could not be found */ @ Override public CommerceTierPriceEntry findByPrimaryKey ( Serializable primaryKey ) throws NoSuchTierPriceEntryException { } }
CommerceTierPriceEntry commerceTierPriceEntry = fetchByPrimaryKey ( primaryKey ) ; if ( commerceTierPriceEntry == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchTierPriceEntryException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return commerceTierPriceEntry ;
public class LevelDB { /** * Returns metadata about indices for the given type . */ LevelDBTypeInfo getTypeInfo ( Class < ? > type ) throws Exception { } }
LevelDBTypeInfo ti = types . get ( type ) ; if ( ti == null ) { LevelDBTypeInfo tmp = new LevelDBTypeInfo ( this , type , getTypeAlias ( type ) ) ; ti = types . putIfAbsent ( type , tmp ) ; if ( ti == null ) { ti = tmp ; } } return ti ;
public class SimpleMMcifConsumer { /** * Here we link entities to chains . * Also if entities are not present in file , this initialises the entities with some heuristics , see { @ link org . biojava . nbio . structure . io . EntityFinder } */ private void linkEntities ( ) { } }
for ( int i = 0 ; i < allModels . size ( ) ; i ++ ) { for ( Chain chain : allModels . get ( i ) ) { // logger . info ( " linking entities for " + chain . getId ( ) + " " + chain . getName ( ) ) ; String entityId = asymId2entityId . get ( chain . getId ( ) ) ; if ( entityId == null ) { // this can happen for instance if the cif file didn ' t have _ struct _ asym category at all // and thus we have no asymId2entityId mapping at all logger . info ( "No entity id could be found for chain {}" , chain . getId ( ) ) ; continue ; } int eId = Integer . parseInt ( entityId ) ; // Entities are not added for non - polymeric entities , if a chain is non - polymeric its entity won ' t be found . // TODO : add all entities and unique compounds and add methods to directly get polymer or non - polymer // asyms ( chains ) . Either create a unique StructureImpl or modify existing for a better representation of the // mmCIF internal data structures but is compatible with Structure interface . // Some examples of PDB entries with this kind of problem : // - 2uub : asym _ id X , chainName Z , entity _ id 24 : fully non - polymeric but still with its own chainName // - 3o6j : asym _ id K , chainName Z , entity _ id 6 : a single water molecule // - 1dz9 : asym _ id K , chainName K , entity _ id 6 : a potassium ion alone EntityInfo entityInfo = structure . getEntityById ( eId ) ; if ( entityInfo == null ) { // Supports the case where the only chain members were from non - polymeric entity that is missing . // Solved by creating a new Compound ( entity ) to which this chain will belong . logger . info ( "Could not find an Entity for entity_id {}, for chain id {}, creating a new Entity." , eId , chain . getId ( ) ) ; entityInfo = new EntityInfo ( ) ; entityInfo . setMolId ( eId ) ; entityInfo . addChain ( chain ) ; if ( chain . isWaterOnly ( ) ) { entityInfo . setType ( EntityType . WATER ) ; } else { entityInfo . setType ( EntityType . NONPOLYMER ) ; } chain . setEntityInfo ( entityInfo ) ; structure . addEntityInfo ( entityInfo ) ; } else { logger . debug ( "Adding chain with chain id {} (auth id {}) to Entity with entity_id {}" , chain . getId ( ) , chain . getName ( ) , eId ) ; entityInfo . addChain ( chain ) ; chain . setEntityInfo ( entityInfo ) ; } } } // if no entity information was present in file we then go and find the entities heuristically with EntityFinder List < EntityInfo > entityInfos = structure . getEntityInfos ( ) ; if ( entityInfos == null || entityInfos . isEmpty ( ) ) { List < List < Chain > > polyModels = new ArrayList < > ( ) ; List < List < Chain > > nonPolyModels = new ArrayList < > ( ) ; List < List < Chain > > waterModels = new ArrayList < > ( ) ; for ( List < Chain > model : allModels ) { List < Chain > polyChains = new ArrayList < > ( ) ; List < Chain > nonPolyChains = new ArrayList < > ( ) ; List < Chain > waterChains = new ArrayList < > ( ) ; polyModels . add ( polyChains ) ; nonPolyModels . add ( nonPolyChains ) ; waterModels . add ( waterChains ) ; for ( Chain c : model ) { // we only have entities for polymeric chains , all others are ignored for assigning entities if ( c . isWaterOnly ( ) ) { waterChains . add ( c ) ; } else if ( c . isPureNonPolymer ( ) ) { nonPolyChains . add ( c ) ; } else { polyChains . add ( c ) ; } } } entityInfos = EntityFinder . findPolyEntities ( polyModels ) ; EntityFinder . createPurelyNonPolyEntities ( nonPolyModels , waterModels , entityInfos ) ; structure . setEntityInfos ( entityInfos ) ; } // final sanity check : it can happen that from the annotated entities some are not linked to any chains // e . g . 3s26 : a sugar entity does not have any chains associated to it ( it seems to be happening with many sugar compounds ) // we simply log it , this can sign some other problems if the entities are used down the line for ( EntityInfo e : entityInfos ) { if ( e . getChains ( ) . isEmpty ( ) ) { logger . info ( "Entity {} '{}' has no chains associated to it" , e . getMolId ( ) < 0 ? "with no entity id" : e . getMolId ( ) , e . getDescription ( ) ) ; } }
public class TcpIpHandlerAdapter { /** * / * ( non - Javadoc ) * @ see org . apache . mina . core . service . IoHandlerAdapter # sessionOpened ( org . apache . mina . core . session . IoSession ) */ @ Override public final void sessionOpened ( final IoSession session ) throws Exception { } }
session . write ( this . welcome ) ; session . write ( ENDLINE ) ; session . write ( "> " ) ; super . sessionOpened ( session ) ;
public class IterUtil { /** * 将键列表和值列表转换为Map < br > * 以键为准 , 值与键位置需对应 。 如果键元素数多于值元素 , 多余部分值用null代替 。 < br > * 如果值多于键 , 忽略多余的值 。 * @ param < K > 键类型 * @ param < V > 值类型 * @ param keys 键列表 * @ param values 值列表 * @ param isOrder 是否有序 * @ return 标题内容Map * @ since 4.1.12 */ public static < K , V > Map < K , V > toMap ( Iterable < K > keys , Iterable < V > values , boolean isOrder ) { } }
return toMap ( null == keys ? null : keys . iterator ( ) , null == values ? null : values . iterator ( ) , isOrder ) ;
public class ScaleTypeDrawable { /** * Sets the focus point . * If ScaleType . FOCUS _ CROP is used , focus point will attempted to be centered within a view . * Each coordinate is a real number in [ 0,1 ] range , in the coordinate system where top - left * corner of the image corresponds to ( 0 , 0 ) and the bottom - right corner corresponds to ( 1 , 1 ) . * @ param focusPoint focus point of the image */ public void setFocusPoint ( PointF focusPoint ) { } }
if ( Objects . equal ( mFocusPoint , focusPoint ) ) { return ; } if ( mFocusPoint == null ) { mFocusPoint = new PointF ( ) ; } mFocusPoint . set ( focusPoint ) ; configureBounds ( ) ; invalidateSelf ( ) ;
public class IOStreamConnector { /** * public IOStreamConnectorState getState ( ) { return state ; } */ public void close ( ) { } }
if ( thread == null ) { closed = true ; } running = false ; if ( thread != null ) { thread . interrupt ( ) ; }
public class TemporalAdjusters { /** * Obtains a { @ code TemporalAdjuster } that wraps a date adjuster . * The { @ code TemporalAdjuster } is based on the low level { @ code Temporal } interface . * This method allows an adjustment from { @ code LocalDate } to { @ code LocalDate } * to be wrapped to match the temporal - based interface . * This is provided for convenience to make user - written adjusters simpler . * In general , user - written adjusters should be static constants : * < pre > { @ code * static TemporalAdjuster TWO _ DAYS _ LATER = * TemporalAdjusters . ofDateAdjuster ( date - > date . plusDays ( 2 ) ) ; * } < / pre > * @ param dateBasedAdjuster the date - based adjuster , not null * @ return the temporal adjuster wrapping on the date adjuster , not null */ public static TemporalAdjuster ofDateAdjuster ( UnaryOperator < LocalDate > dateBasedAdjuster ) { } }
Objects . requireNonNull ( dateBasedAdjuster , "dateBasedAdjuster" ) ; return ( temporal ) -> { LocalDate input = LocalDate . from ( temporal ) ; LocalDate output = dateBasedAdjuster . apply ( input ) ; return temporal . with ( output ) ;
public class ResourceadapterTypeImpl { /** * Returns all < code > security - permission < / code > elements * @ return list of < code > security - permission < / code > */ public List < SecurityPermissionType < ResourceadapterType < T > > > getAllSecurityPermission ( ) { } }
List < SecurityPermissionType < ResourceadapterType < T > > > list = new ArrayList < SecurityPermissionType < ResourceadapterType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "security-permission" ) ; for ( Node node : nodeList ) { SecurityPermissionType < ResourceadapterType < T > > type = new SecurityPermissionTypeImpl < ResourceadapterType < T > > ( this , "security-permission" , childNode , node ) ; list . add ( type ) ; } return list ;
public class SaverDef { /** * < pre > * The operation to run when saving a model checkpoint . * < / pre > * < code > optional string save _ tensor _ name = 2 ; < / code > */ public java . lang . String getSaveTensorName ( ) { } }
java . lang . Object ref = saveTensorName_ ; if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; saveTensorName_ = s ; return s ; }
public class ManageAddOnsDialog { /** * Notifies that the given { @ code addOn } as not successfully uninstalled . Add - ons that were not successfully uninstalled are * not re - selectable . * @ param addOn the add - on that was not successfully uninstalled * @ since 2.4.0 */ public void notifyAddOnFailedUninstallation ( final AddOn addOn ) { } }
if ( EventQueue . isDispatchThread ( ) ) { installedAddOnsModel . notifyAddOnFailedUninstallation ( addOn ) ; } else { EventQueue . invokeLater ( new Runnable ( ) { @ Override public void run ( ) { notifyAddOnFailedUninstallation ( addOn ) ; } } ) ; }
public class X509CertImpl { /** * DER encode this object onto an output stream . * Implements the < code > DerEncoder < / code > interface . * @ param out the output stream on which to write the DER encoding . * @ exception IOException on encoding error . */ public void derEncode ( OutputStream out ) throws IOException { } }
if ( signedCert == null ) throw new IOException ( "Null certificate to encode" ) ; out . write ( signedCert . clone ( ) ) ;
public class CircularSlider { /** * common initializer method */ private void init ( Context context , AttributeSet attrs , int defStyleAttr ) { } }
TypedArray a = context . obtainStyledAttributes ( attrs , R . styleable . CircularSlider , defStyleAttr , 0 ) ; // read all available attributes float startAngle = a . getFloat ( R . styleable . CircularSlider_start_angle , ( float ) Math . PI / 2 ) ; float angle = a . getFloat ( R . styleable . CircularSlider_angle , ( float ) Math . PI / 2 ) ; int thumbSize = a . getDimensionPixelSize ( R . styleable . CircularSlider_thumb_size , 50 ) ; int thumbColor = a . getColor ( R . styleable . CircularSlider_thumb_color , Color . GRAY ) ; int borderThickness = a . getDimensionPixelSize ( R . styleable . CircularSlider_border_thickness , 20 ) ; int borderColor = a . getColor ( R . styleable . CircularSlider_border_color , Color . RED ) ; String borderGradientColors = a . getString ( R . styleable . CircularSlider_border_gradient_colors ) ; Drawable thumbImage = a . getDrawable ( R . styleable . CircularSlider_thumb_image ) ; // save those to fields ( really , do we need setters here . . ? ) setStartAngle ( startAngle ) ; setAngle ( angle ) ; setBorderThickness ( borderThickness ) ; setBorderColor ( borderColor ) ; if ( borderGradientColors != null ) { setBorderGradientColors ( borderGradientColors . split ( ";" ) ) ; } setThumbSize ( thumbSize ) ; setThumbImage ( thumbImage ) ; setThumbColor ( thumbColor ) ; // assign padding - check for version because of RTL layout compatibility int padding ; if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . JELLY_BEAN_MR1 ) { int all = getPaddingLeft ( ) + getPaddingRight ( ) + getPaddingBottom ( ) + getPaddingTop ( ) + getPaddingEnd ( ) + getPaddingStart ( ) ; padding = all / 6 ; } else { padding = ( getPaddingLeft ( ) + getPaddingRight ( ) + getPaddingBottom ( ) + getPaddingTop ( ) ) / 4 ; } setPadding ( padding ) ; a . recycle ( ) ;
public class BundleUtils { /** * Returns a optional { @ link java . lang . String } array value . In other words , returns the value mapped by key if it exists and is a { @ link java . lang . String } array . * The bundle argument is allowed to be { @ code null } . If the bundle is null , this method returns null . * @ param bundle a bundle . If the bundle is null , this method will return null . * @ param key a key for the value . * @ return a { @ link java . lang . String } array value if exists , null otherwise . * @ see android . os . Bundle # getStringArray ( String ) */ @ Nullable public static String [ ] optStringArray ( @ Nullable Bundle bundle , @ Nullable String key ) { } }
return optStringArray ( bundle , key , new String [ 0 ] ) ;
public class CmsResourceFilter { /** * Validates if a CmsResource fits to all filer settings . < p > * Please note that the " visible permission " setting of the filter is NOT checked * in this method since the permission information is not part of the resource . * The visible permission information in the filter will be used in the permission * checks * @ param context the current request context * @ param resource the resource to be validated * @ return true if the resource passes all validations , false otherwise */ public boolean isValid ( CmsRequestContext context , CmsResource resource ) { } }
if ( this == ALL ) { // shortcut for " ALL " filter where nothing is filtered return true ; } // check for required resource state switch ( m_filterState ) { case EXCLUDED : if ( resource . getState ( ) . equals ( m_state ) ) { return false ; } break ; case REQUIRED : if ( ! resource . getState ( ) . equals ( m_state ) ) { return false ; } break ; default : // ignored } // check for required resource state switch ( m_filterFlags ) { case EXCLUDED : if ( ( resource . getFlags ( ) & m_flags ) != 0 ) { return false ; } break ; case REQUIRED : if ( ( resource . getFlags ( ) & m_flags ) != m_flags ) { return false ; } break ; default : // ignored } // check for required resource type switch ( m_filterType ) { case EXCLUDED : if ( resource . getTypeId ( ) == m_type ) { return false ; } break ; case REQUIRED : if ( resource . getTypeId ( ) != m_type ) { return false ; } break ; default : // ignored } if ( m_onlyFolders != null ) { if ( m_onlyFolders . booleanValue ( ) ) { if ( ! resource . isFolder ( ) ) { // only folder resource are allowed return false ; } } else { if ( resource . isFolder ( ) ) { // no folder resources are allowed return false ; } } } // check if the resource was last modified within the given time range if ( m_filterLastModified ) { if ( ( m_modifiedAfter > 0L ) && ( resource . getDateLastModified ( ) < m_modifiedAfter ) ) { return false ; } if ( ( m_modifiedBefore > 0L ) && ( resource . getDateLastModified ( ) > m_modifiedBefore ) ) { return false ; } } // check if the resource expires within the given time range if ( m_filterExpire ) { if ( ( m_expireAfter > 0L ) && ( resource . getDateExpired ( ) < m_expireAfter ) ) { return false ; } if ( ( m_expireBefore > 0L ) && ( resource . getDateExpired ( ) > m_expireBefore ) ) { return false ; } } // check if the resource is released within the given time range if ( m_filterRelease ) { if ( ( m_releaseAfter > 0L ) && ( resource . getDateReleased ( ) < m_releaseAfter ) ) { return false ; } if ( ( m_releaseBefore > 0L ) && ( resource . getDateReleased ( ) > m_releaseBefore ) ) { return false ; } } // check if the resource is currently released and not expired if ( m_filterTimerange && ! resource . isReleasedAndNotExpired ( context . getRequestTime ( ) ) ) { return false ; } // everything is ok , so return true return true ;
public class RhinoSecurityManager { /** * Get the class of the top - most stack element representing a script . * @ return The class of the top - most script in the current stack , * or null if no script is currently running */ protected Class < ? > getCurrentScriptClass ( ) { } }
Class < ? > [ ] context = getClassContext ( ) ; for ( Class < ? > c : context ) { if ( c != InterpretedFunction . class && NativeFunction . class . isAssignableFrom ( c ) || PolicySecurityController . SecureCaller . class . isAssignableFrom ( c ) ) { return c ; } } return null ;
public class SubmissionDocumentRepositoryMongoImpl { /** * { @ inheritDoc } */ @ Override public final Iterable < SubmissionDocument > findAll ( final String filename ) { } }
final Query searchQuery = new Query ( Criteria . where ( "filename" ) . is ( filename ) ) ; final List < SubmissionDocumentMongo > submissionDocumentsMongo = mongoTemplate . find ( searchQuery , SubmissionDocumentMongo . class ) ; if ( submissionDocumentsMongo == null ) { return null ; } final List < SubmissionDocument > submissionDocuments = new ArrayList < > ( ) ; for ( final SubmissionDocument submDocument : submissionDocumentsMongo ) { final Submission submission = ( Submission ) toObjConverter . createGedObject ( null , submDocument ) ; submDocument . setGedObject ( submission ) ; submissionDocuments . add ( submDocument ) ; } return submissionDocuments ;
public class DoubleProperty { /** * Retrieves the value of this double property . If the property has no * value , returns the default value . If there is no default value , returns * the given default value . In all cases , the returned value is limited to * the min / max value range given during construction . */ public double get ( double defaultValue ) { } }
final String value = getInternal ( Double . toString ( defaultValue ) , false ) ; if ( value == null ) { return limit ( defaultValue ) ; } double v = Double . parseDouble ( value ) ; // need to limit value in case setString ( ) was called directly with // an out - of - range value return limit ( v ) ;
public class OWLOntologyID_CustomFieldSerializer { /** * Deserializes the content of the object from the * { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } . * @ param streamReader the { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } to read the * object ' s content from * @ param instance the object instance to deserialize * @ throws com . google . gwt . user . client . rpc . SerializationException * if the deserialization operation is not * successful */ @ Override public void deserializeInstance ( SerializationStreamReader streamReader , OWLOntologyID instance ) throws SerializationException { } }
deserialize ( streamReader , instance ) ;
public class WorkflowProgress { /** * Gets the steps value for this WorkflowProgress . * @ return steps * The steps in this workflow . */ public com . google . api . ads . admanager . axis . v201808 . ProgressStep [ ] getSteps ( ) { } }
return steps ;
public class DocImpl { /** * Utility for subclasses which read HTML documentation files . */ String readHTMLDocumentation ( InputStream input , FileObject filename ) throws IOException { } }
byte [ ] filecontents = new byte [ input . available ( ) ] ; try { DataInputStream dataIn = new DataInputStream ( input ) ; dataIn . readFully ( filecontents ) ; } finally { input . close ( ) ; } String encoding = env . getEncoding ( ) ; String rawDoc = ( encoding != null ) ? new String ( filecontents , encoding ) : new String ( filecontents ) ; Pattern bodyPat = Pattern . compile ( "(?is).*<body\\b[^>]*>(.*)</body\\b.*" ) ; Matcher m = bodyPat . matcher ( rawDoc ) ; if ( m . matches ( ) ) { return m . group ( 1 ) ; } else { String key = rawDoc . matches ( "(?is).*<body\\b.*" ) ? "javadoc.End_body_missing_from_html_file" : "javadoc.Body_missing_from_html_file" ; env . error ( SourcePositionImpl . make ( filename , Position . NOPOS , null ) , key ) ; return "" ; }
public class RandomCollection { /** * Resets this object so it behaves like a newly constructed instance . */ @ Override public void reset ( ) { } }
this . priorityElements = Lists . newArrayList ( originalElements ) ; Collections . shuffle ( priorityElements ) ; this . currentElements = Lists . newArrayListWithExpectedSize ( 2 * originalElements . size ( ) ) ; this . currentElements . addAll ( originalElements ) ;
public class Graph { /** * Adds a new edge to this graph , given a new Edge object . It uses the GraphStructure attribute ' s * addEdge method to add the new edge to this graph . If one of the node ids of the given Edge object * does not exists in this graph , a NodeNotFoundException is thrown . Otherwise , the edge is successfully * added to this graph . * @ param e the new edge to be added to the graph . */ public void addEdge ( Edge e ) { } }
structure . addEdge ( e ) ; for ( EdgeListener listener : edgeListeners ) listener . onInsert ( e ) ;
public class MediaTypes { /** * Merge with this one . * @ param types Types * @ return Merged list */ public MediaTypes merge ( final MediaTypes types ) { } }
final SortedSet < MediaType > set = new TreeSet < > ( ) ; set . addAll ( this . list ) ; set . addAll ( types . list ) ; return new MediaTypes ( set ) ;
public class PartialResponseWriter { /** * < p class = " changed _ added _ 2_0 " > Write the start of a partial response . < / p > * @ throws IOException if an input / output error occurs * @ since 2.0 */ public void startDocument ( ) throws IOException { } }
ResponseWriter writer = getWrapped ( ) ; String encoding = writer . getCharacterEncoding ( ) ; if ( encoding == null ) { encoding = "utf-8" ; } writer . writePreamble ( "<?xml version='1.0' encoding='" + encoding + "'?>\n" ) ; writer . startElement ( "partial-response" , null ) ; FacesContext ctx = FacesContext . getCurrentInstance ( ) ; if ( null != ctx && null != ctx . getViewRoot ( ) ) { String id = ctx . getViewRoot ( ) . getContainerClientId ( ctx ) ; writer . writeAttribute ( "id" , id , "id" ) ; }