signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class ReflectionUtils { /** * Returns this element ' s annotation for the specified type if
* such an annotation is < em > present < / em > , else null .
* @ param < T > the type of the annotation to query for and return if present
* @ param clazz Class to get annotation from
* @ param annotationClass the Class object corresponding to the
* annotation type
* @ return this element ' s annotation for the specified annotation type if
* present on this element , else null
* @ throws NullPointerException if the given annotation class is null
* @ since 1.5 */
public static < T extends Annotation > T getAnnotation ( Class < ? > clazz , Class < T > annotationClass ) { } } | assertReflectionAccessor ( ) ; return accessor . getAnnotation ( clazz , annotationClass ) ; |
public class Criteria { /** * The < code > in < / code > operator is analogous to the SQL IN modifier , allowing you
* to specify an array of possible matches .
* @ param c the collection containing the values to match against
* @ return the criteria */
public Criteria in ( Collection < ? > c ) { } } | notNull ( c , "collection can not be null" ) ; this . criteriaType = RelationalOperator . IN ; this . right = new ValueListNode ( c ) ; return this ; |
public class UserVerificationService { /** * Verifies the identity of the given user via the Duo multi - factor
* authentication service . If a signed response from Duo has not already
* been provided , a signed response from Duo is requested in the
* form of additional expected credentials . Any provided signed response
* is cryptographically verified . If no signed response is present , or the
* signed response is invalid , an exception is thrown .
* @ param authenticatedUser
* The user whose identity should be verified against Duo .
* @ throws GuacamoleException
* If required Duo - specific configuration options are missing or
* malformed , or if the user ' s identity cannot be verified . */
public void verifyAuthenticatedUser ( AuthenticatedUser authenticatedUser ) throws GuacamoleException { } } | // Pull the original HTTP request used to authenticate
Credentials credentials = authenticatedUser . getCredentials ( ) ; HttpServletRequest request = credentials . getRequest ( ) ; // Ignore anonymous users
if ( authenticatedUser . getIdentifier ( ) . equals ( AuthenticatedUser . ANONYMOUS_IDENTIFIER ) ) return ; // Retrieve signed Duo response from request
String signedResponse = request . getParameter ( DuoSignedResponseField . PARAMETER_NAME ) ; // If no signed response , request one
if ( signedResponse == null ) { // Create field which requests a signed response from Duo that
// verifies the identity of the given user via the configured
// Duo API endpoint
Field signedResponseField = new DuoSignedResponseField ( confService . getAPIHostname ( ) , duoService . createSignedRequest ( authenticatedUser ) ) ; // Create an overall description of the additional credentials
// required to verify identity
CredentialsInfo expectedCredentials = new CredentialsInfo ( Collections . singletonList ( signedResponseField ) ) ; // Request additional credentials
throw new GuacamoleInsufficientCredentialsException ( "LOGIN.INFO_DUO_AUTH_REQUIRED" , expectedCredentials ) ; } // If signed response does not verify this user ' s identity , abort auth
if ( ! duoService . isValidSignedResponse ( authenticatedUser , signedResponse ) ) throw new GuacamoleClientException ( "LOGIN.INFO_DUO_VALIDATION_CODE_INCORRECT" ) ; |
public class PanelGridRenderer { /** * Read the colSpans attribute .
* @ return a integer array
* @ throws FacesException
* if the attribute is missing or invalid . */
protected int [ ] getColSpanArray ( PanelGrid panelGrid ) { } } | String columnsCSV = panelGrid . getColSpans ( ) ; if ( null == columnsCSV || columnsCSV . trim ( ) . length ( ) == 0 ) { columnsCSV = panelGrid . getColumns ( ) ; if ( "1" . equals ( columnsCSV ) ) { columnsCSV = "12" ; } else if ( "2" . equals ( columnsCSV ) ) { columnsCSV = "6,6" ; } else if ( "3" . equals ( columnsCSV ) ) { columnsCSV = "4,4,4" ; } else if ( "4" . equals ( columnsCSV ) ) { columnsCSV = "3,3,3,3" ; } else if ( "6" . equals ( columnsCSV ) ) { columnsCSV = "2,2,2,2,2,2" ; } else if ( "12" . equals ( columnsCSV ) ) { columnsCSV = "1,1,1,1,1,1,1,1,1,1,1,1" ; } else { throw new FacesException ( "Error at " + panelGrid . getClientId ( ) + ". PanelGrid.columns attribute: Got " + columnsCSV + ". Legal values are 1, 2, 3, 4, 6 and 12. If you need a different number of columns, please use the attribute 'col-spans'." ) ; } } if ( null == columnsCSV || columnsCSV . trim ( ) . length ( ) == 0 ) { throw new FacesException ( "PanelGrid.colSpans attribute: Please provide a comma-separated list of integer values" ) ; } String [ ] columnList = columnsCSV . replaceAll ( " " , "" ) . split ( "," ) ; int [ ] columns = new int [ columnList . length ] ; int sum = 0 ; for ( int i = 0 ; i < columnList . length ; i ++ ) { try { columns [ i ] = ( int ) Integer . valueOf ( columnList [ i ] ) ; sum += columns [ i ] ; } catch ( NumberFormatException error ) { throw new FacesException ( "PanelGrid.colSpans attribute: the list has to consists of integer values" ) ; } } if ( sum != 12 ) { throw new FacesException ( "PanelGrid.colSpans attribute: The columns don't add up to 12" ) ; } return columns ; |
public class NumberMap { /** * Creates a NumberMap for Floats .
* @ param < K >
* @ return NumberMap & lt ; K , Float & gt ; */
public static < K > NumberMap < K , Float > newFloatMap ( ) { } } | return new NumberMap < K , Float > ( ) { @ Override public void add ( K key , Float addend ) { put ( key , containsKey ( key ) ? ( get ( key ) + addend ) : addend ) ; } @ Override public void sub ( K key , Float subtrahend ) { put ( key , ( containsKey ( key ) ? get ( key ) : 0f ) - subtrahend ) ; } } ; |
public class Graphics { /** * Get raster buffer from data .
* @ param img The image buffer ( must not be < code > null < / code > ) .
* @ param fr The first red .
* @ param fg The first green .
* @ param fb The first blue .
* @ return The rastered image .
* @ throws LionEngineException If invalid arguments . */
public static ImageBuffer getRasterBuffer ( ImageBuffer img , double fr , double fg , double fb ) { } } | return factoryGraphic . getRasterBuffer ( img , fr , fg , fb ) ; |
public class HdfsTaskLogs { /** * Due to https : / / issues . apache . org / jira / browse / HDFS - 13 " : " are not allowed in
* path names . So we format paths differently for HDFS . */
private Path getTaskReportsFileFromId ( String taskId ) { } } | return new Path ( mergePaths ( config . getDirectory ( ) , taskId . replace ( ':' , '_' ) + ".reports.json" ) ) ; |
public class HtmlDataTable { /** * < p > Set the value of the < code > summary < / code > property . < / p > */
public void setSummary ( java . lang . String summary ) { } } | getStateHelper ( ) . put ( PropertyKeys . summary , summary ) ; handleAttribute ( "summary" , summary ) ; |
public class Utils { /** * Load a string from the given buffer , reading first the two bytes of len
* and then the UTF - 8 bytes of the string .
* @ return the decoded string or null if NEED _ DATA */
static String decodeString ( ByteBuf in ) throws UnsupportedEncodingException { } } | if ( in . readableBytes ( ) < 2 ) { return null ; } // int strLen = Utils . readWord ( in ) ;
int strLen = in . readUnsignedShort ( ) ; if ( in . readableBytes ( ) < strLen ) { return null ; } byte [ ] strRaw = new byte [ strLen ] ; in . readBytes ( strRaw ) ; return new String ( strRaw , "UTF-8" ) ; |
public class QDate { /** * Prints the time in ISO 8601 */
public String printISO8601 ( ) { } } | StringBuilder sb = new StringBuilder ( ) ; if ( _year > 0 ) { sb . append ( ( _year / 1000 ) % 10 ) ; sb . append ( ( _year / 100 ) % 10 ) ; sb . append ( ( _year / 10 ) % 10 ) ; sb . append ( _year % 10 ) ; sb . append ( '-' ) ; sb . append ( ( ( _month + 1 ) / 10 ) % 10 ) ; sb . append ( ( _month + 1 ) % 10 ) ; sb . append ( '-' ) ; sb . append ( ( ( _dayOfMonth + 1 ) / 10 ) % 10 ) ; sb . append ( ( _dayOfMonth + 1 ) % 10 ) ; } long time = _timeOfDay / 1000 ; long ms = _timeOfDay % 1000 ; sb . append ( 'T' ) ; sb . append ( ( time / 36000 ) % 10 ) ; sb . append ( ( time / 3600 ) % 10 ) ; sb . append ( ':' ) ; sb . append ( ( time / 600 ) % 6 ) ; sb . append ( ( time / 60 ) % 10 ) ; sb . append ( ':' ) ; sb . append ( ( time / 10 ) % 6 ) ; sb . append ( ( time / 1 ) % 10 ) ; if ( ms != 0 ) { sb . append ( '.' ) ; sb . append ( ( ms / 100 ) % 10 ) ; sb . append ( ( ms / 10 ) % 10 ) ; sb . append ( ms % 10 ) ; } if ( _zoneName == null ) { sb . append ( "Z" ) ; return sb . toString ( ) ; } // server / 1471 - XXX : was commented out
long offset = _zoneOffset ; if ( offset < 0 ) { sb . append ( "-" ) ; offset = - offset ; } else sb . append ( "+" ) ; sb . append ( ( offset / 36000000 ) % 10 ) ; sb . append ( ( offset / 3600000 ) % 10 ) ; sb . append ( ':' ) ; sb . append ( ( offset / 600000 ) % 6 ) ; sb . append ( ( offset / 60000 ) % 10 ) ; return sb . toString ( ) ; |
public class UScript { /** * Sets code point c ' s Script _ Extensions as script code integers into the output BitSet .
* < ul >
* < li > If c does have Script _ Extensions , then the return value is
* the negative number of Script _ Extensions codes ( = - set . cardinality ( ) ) ;
* in this case , the Script property value
* ( normally Common or Inherited ) is not included in the set .
* < li > If c does not have Script _ Extensions , then the one Script code is put into the set
* and also returned .
* < li > If c is not a valid code point , then the one { @ link # UNKNOWN } code is put into the set
* and also returned .
* < / ul >
* In other words , if the return value is non - negative , it is c ' s single Script code
* and the set contains exactly this Script code .
* If the return value is - n , then the set contains c ' s n & gt ; = 2 Script _ Extensions script codes .
* < p > Some characters are commonly used in multiple scripts .
* For more information , see UAX # 24 : http : / / www . unicode . org / reports / tr24 / .
* @ param c code point
* @ param set set of script code integers ; will be cleared , then bits are set
* corresponding to c ' s Script _ Extensions
* @ return negative number of script codes in c ' s Script _ Extensions ,
* or the non - negative single Script value */
public static final int getScriptExtensions ( int c , BitSet set ) { } } | set . clear ( ) ; int scriptX = UCharacterProperty . INSTANCE . getAdditional ( c , 0 ) & UCharacterProperty . SCRIPT_X_MASK ; if ( scriptX < UCharacterProperty . SCRIPT_X_WITH_COMMON ) { set . set ( scriptX ) ; return scriptX ; } char [ ] scriptExtensions = UCharacterProperty . INSTANCE . m_scriptExtensions_ ; int scx = scriptX & UCharacterProperty . SCRIPT_MASK_ ; // index into scriptExtensions
if ( scriptX >= UCharacterProperty . SCRIPT_X_WITH_OTHER ) { scx = scriptExtensions [ scx + 1 ] ; } int length = 0 ; int sx ; do { sx = scriptExtensions [ scx ++ ] ; set . set ( sx & 0x7fff ) ; ++ length ; } while ( sx < 0x8000 ) ; // length = = set . cardinality ( )
return - length ; |
public class CPDefinitionGroupedEntryUtil { /** * Returns the first cp definition grouped entry in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; .
* @ param uuid the uuid
* @ param companyId the company ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching cp definition grouped entry , or < code > null < / code > if a matching cp definition grouped entry could not be found */
public static CPDefinitionGroupedEntry fetchByUuid_C_First ( String uuid , long companyId , OrderByComparator < CPDefinitionGroupedEntry > orderByComparator ) { } } | return getPersistence ( ) . fetchByUuid_C_First ( uuid , companyId , orderByComparator ) ; |
public class MoreCollectors { /** * Returns a { @ code Collector } which collects at most specified number of
* the least stream elements according to the natural order into the
* { @ link List } . The resulting { @ code List } is sorted in natural order
* ( least element is the first ) . The order of equal elements is the same as
* in the input stream .
* The operation performed by the returned collector is equivalent to
* { @ code stream . sorted ( ) . limit ( n ) . collect ( Collectors . toList ( ) ) } , but
* usually performed much faster if { @ code n } is much less than the stream
* size .
* There are no guarantees on the type , mutability , serializability , or
* thread - safety of the { @ code List } returned .
* When supplied { @ code n } is less or equal to zero , this method returns a
* < a href = " package - summary . html # ShortCircuitReduction " > short - circuiting
* collector < / a > which ignores the input and produces an empty list .
* @ param < T > the type of the input elements
* @ param n maximum number of stream elements to preserve
* @ return a collector which returns a { @ code List } containing the least n
* stream elements or less if the stream was shorter . */
public static < T extends Comparable < ? super T > > Collector < T , ? , List < T > > least ( int n ) { } } | return least ( Comparator . < T > naturalOrder ( ) , n ) ; |
public class PortMapping { /** * First check system properties , then the variables given */
private Integer getPortFromProjectOrSystemProperty ( String var ) { } } | String sysProp = System . getProperty ( var ) ; if ( sysProp != null ) { return getAsIntOrNull ( sysProp ) ; } if ( projProperties . containsKey ( var ) ) { return getAsIntOrNull ( projProperties . getProperty ( var ) ) ; } return null ; |
public class StringUtils { /** * < p > Compares two Strings , and returns the portion where they differ .
* ( More precisely , return the remainder of the second String ,
* starting from where it ' s different from the first . ) < / p >
* < p > For example ,
* < code > difference ( " i am a machine " , " i am a robot " ) - > " robot " < / code > . < / p >
* < pre >
* StringUtils . difference ( null , null ) = null
* StringUtils . difference ( " " , " " ) = " "
* StringUtils . difference ( " " , " abc " ) = " abc "
* StringUtils . difference ( " abc " , " " ) = " "
* StringUtils . difference ( " abc " , " abc " ) = " "
* StringUtils . difference ( " ab " , " abxyz " ) = " xyz "
* StringUtils . difference ( " abcde " , " abxyz " ) = " xyz "
* StringUtils . difference ( " abcde " , " xyz " ) = " xyz "
* < / pre >
* @ param str1 the first String , may be null
* @ param str2 the second String , may be null
* @ return the portion of str2 where it differs from str1 ; returns the
* empty String if they are equal
* @ since 2.0 */
public static String difference ( String str1 , String str2 ) { } } | if ( str1 == null ) { return str2 ; } if ( str2 == null ) { return str1 ; } int at = indexOfDifference ( str1 , str2 ) ; if ( at == INDEX_NOT_FOUND ) { return EMPTY ; } return str2 . substring ( at ) ; |
public class ClaimValueInjectionEndpoint { /** * Verify that values exist and that types match the corresponding Claims enum
* @ return a series of pass / fail statements regarding the check for each injected claim */
@ GET @ Path ( "/verifyInjectedOptionalCustomMissing" ) @ Produces ( MediaType . APPLICATION_JSON ) public JsonObject verifyInjectedOptionalCustomMissing ( ) { } } | boolean pass = false ; String msg ; // custom - missing
Optional < Long > customValue = custom . getValue ( ) ; if ( customValue == null ) { msg = "custom-missing value is null, FAIL" ; } else if ( ! customValue . isPresent ( ) ) { msg = "custom-missing PASS" ; pass = true ; } else { msg = String . format ( "custom: %s != %s" , null , customValue . get ( ) ) ; } JsonObject result = Json . createObjectBuilder ( ) . add ( "pass" , pass ) . add ( "msg" , msg ) . build ( ) ; return result ; |
public class AWSDatabaseMigrationServiceClient { /** * Deletes an AWS DMS event subscription .
* @ param deleteEventSubscriptionRequest
* @ return Result of the DeleteEventSubscription operation returned by the service .
* @ throws ResourceNotFoundException
* The resource could not be found .
* @ throws InvalidResourceStateException
* The resource is in a state that prevents it from being used for database migration .
* @ sample AWSDatabaseMigrationService . DeleteEventSubscription
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / dms - 2016-01-01 / DeleteEventSubscription " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public DeleteEventSubscriptionResult deleteEventSubscription ( DeleteEventSubscriptionRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDeleteEventSubscription ( request ) ; |
public class CmsOUHandler { /** * Gets List of managable OU names for the current user . < p >
* @ param cms CmsObject
* @ return List of String */
public static List < String > getManagableOUs ( CmsObject cms ) { } } | List < String > ous = new ArrayList < String > ( ) ; try { for ( CmsRole role : OpenCms . getRoleManager ( ) . getRolesOfUser ( cms , cms . getRequestContext ( ) . getCurrentUser ( ) . getName ( ) , "" , true , false , true ) ) { if ( role . getRoleName ( ) . equals ( CmsRole . ACCOUNT_MANAGER . getRoleName ( ) ) ) { if ( role . getOuFqn ( ) . equals ( "" ) ) { ous . add ( 0 , role . getOuFqn ( ) ) ; } else { ous . add ( role . getOuFqn ( ) ) ; } } } } catch ( CmsException e ) { } return ous ; |
public class ListVocabulariesResult { /** * A list of objects that describe the vocabularies that match the search criteria in the request .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setVocabularies ( java . util . Collection ) } or { @ link # withVocabularies ( java . util . Collection ) } if you want to
* override the existing values .
* @ param vocabularies
* A list of objects that describe the vocabularies that match the search criteria in the request .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListVocabulariesResult withVocabularies ( VocabularyInfo ... vocabularies ) { } } | if ( this . vocabularies == null ) { setVocabularies ( new java . util . ArrayList < VocabularyInfo > ( vocabularies . length ) ) ; } for ( VocabularyInfo ele : vocabularies ) { this . vocabularies . add ( ele ) ; } return this ; |
public class CPDefinitionSpecificationOptionValuePersistenceImpl { /** * Returns all the cp definition specification option values where CPDefinitionId = & # 63 ; and CPSpecificationOptionId = & # 63 ; .
* @ param CPDefinitionId the cp definition ID
* @ param CPSpecificationOptionId the cp specification option ID
* @ return the matching cp definition specification option values */
@ Override public List < CPDefinitionSpecificationOptionValue > findByC_CSO ( long CPDefinitionId , long CPSpecificationOptionId ) { } } | return findByC_CSO ( CPDefinitionId , CPSpecificationOptionId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ; |
public class CommerceShippingMethodPersistenceImpl { /** * Returns the commerce shipping method with the primary key or throws a { @ link com . liferay . portal . kernel . exception . NoSuchModelException } if it could not be found .
* @ param primaryKey the primary key of the commerce shipping method
* @ return the commerce shipping method
* @ throws NoSuchShippingMethodException if a commerce shipping method with the primary key could not be found */
@ Override public CommerceShippingMethod findByPrimaryKey ( Serializable primaryKey ) throws NoSuchShippingMethodException { } } | CommerceShippingMethod commerceShippingMethod = fetchByPrimaryKey ( primaryKey ) ; if ( commerceShippingMethod == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchShippingMethodException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return commerceShippingMethod ; |
public class SparkExport { /** * Quick and dirty CSV export : one file per sequence , with shuffling of the order of sequences */
public static void exportCSVSequenceLocal ( File baseDir , JavaRDD < List < List < Writable > > > sequences , long seed ) throws Exception { } } | baseDir . mkdirs ( ) ; if ( ! baseDir . isDirectory ( ) ) throw new IllegalArgumentException ( "File is not a directory: " + baseDir . toString ( ) ) ; String baseDirStr = baseDir . toString ( ) ; List < String > fileContents = sequences . map ( new SequenceToStringFunction ( "," ) ) . collect ( ) ; if ( ! ( fileContents instanceof ArrayList ) ) fileContents = new ArrayList < > ( fileContents ) ; Collections . shuffle ( fileContents , new Random ( seed ) ) ; int i = 0 ; for ( String s : fileContents ) { String path = FilenameUtils . concat ( baseDirStr , i + ".csv" ) ; File f = new File ( path ) ; FileUtils . writeStringToFile ( f , s ) ; i ++ ; } |
public class CasEntryPoint { /** * Constructs a service url from the HttpServletRequest or from the given
* serviceUrl . Prefers the serviceUrl provided if both a serviceUrl and a
* serviceName .
* @ param request the HttpServletRequest
* @ param response the HttpServletResponse
* @ param service the configured service url ( this will be used if not null )
* @ param serverName the server name to use to constuct the service url if service param is empty
* @ param artifactParameterName the artifact parameter name to remove ( i . e . ticket )
* @ param encode whether to encode the url or not ( i . e . Jsession ) .
* @ return the service url to use . */
public String constructServiceUrl ( final HttpServletRequest request , final HttpServletResponse response , final String service , final String serverName ) { } } | if ( Strings . isNotBlank ( service ) ) { return response . encodeURL ( service ) ; } final StringBuilder buffer = new StringBuilder ( ) ; if ( ! serverName . startsWith ( "https://" ) && ! serverName . startsWith ( "http://" ) ) { buffer . append ( request . isSecure ( ) ? "https://" : "http://" ) ; } buffer . append ( serverName ) ; buffer . append ( request . getRequestURI ( ) ) ; Set < String > reservedKeys = CollectUtils . newHashSet ( ) ; reservedKeys . add ( config . getArtifactName ( ) ) ; if ( null != sessionIdReader ) { reservedKeys . add ( sessionIdReader . idName ( ) ) ; } String queryString = request . getQueryString ( ) ; if ( Strings . isNotBlank ( queryString ) ) { String [ ] parts = Strings . split ( queryString , "&" ) ; Arrays . sort ( parts ) ; StringBuilder paramBuf = new StringBuilder ( ) ; for ( String part : parts ) { int equIdx = part . indexOf ( '=' ) ; if ( equIdx > 0 ) { String key = part . substring ( 0 , equIdx ) ; if ( ! reservedKeys . contains ( key ) ) { paramBuf . append ( '&' ) . append ( key ) . append ( part . substring ( equIdx ) ) ; } } } if ( paramBuf . length ( ) > 0 ) { paramBuf . setCharAt ( 0 , '?' ) ; buffer . append ( paramBuf ) ; } } return response . encodeURL ( buffer . toString ( ) ) ; |
public class ComponentBorder { /** * The complimentary edges of the Border may need to be adjusted to allow
* the component to fit completely in the bounds of the parent component . */
private void adjustBorderInsets ( ) { } } | Insets parentInsets = parent . getInsets ( ) ; // May need to adust the height of the parent component to fit
// the component in the Border
if ( edge == Edge . RIGHT || edge == Edge . LEFT ) { int parentHeight = parent . getPreferredSize ( ) . height - parentInsets . top - parentInsets . bottom ; int diff = component . getHeight ( ) - parentHeight ; if ( diff > 0 ) { int topDiff = ( int ) ( diff * alignment ) ; int bottomDiff = diff - topDiff ; borderInsets . top += topDiff ; borderInsets . bottom += bottomDiff ; } } // May need to adust the width of the parent component to fit
// the component in the Border
if ( edge == Edge . TOP || edge == Edge . BOTTOM ) { int parentWidth = parent . getPreferredSize ( ) . width - parentInsets . left - parentInsets . right ; int diff = component . getWidth ( ) - parentWidth ; if ( diff > 0 ) { int leftDiff = ( int ) ( diff * alignment ) ; int rightDiff = diff - leftDiff ; borderInsets . left += leftDiff ; borderInsets . right += rightDiff ; } } |
public class DivSufSort { /** * Binary partition for substrings . */
private int ssPartition ( int PA , int first , int last , int depth ) { } } | int a , b ; // SA pointer
int t ; for ( a = first - 1 , b = last ; ; ) { for ( ; ( ++ a < b ) && ( ( SA [ PA + SA [ a ] ] + depth ) >= ( SA [ PA + SA [ a ] + 1 ] + 1 ) ) ; ) { SA [ a ] = ~ SA [ a ] ; } for ( ; ( a < -- b ) && ( ( SA [ PA + SA [ b ] ] + depth ) < ( SA [ PA + SA [ b ] + 1 ] + 1 ) ) ; ) { } if ( b <= a ) { break ; } t = ~ SA [ b ] ; SA [ b ] = SA [ a ] ; SA [ a ] = t ; } if ( first < a ) { SA [ first ] = ~ SA [ first ] ; } return a ; |
public class ServerHandshakeHandler { /** * Determine if this request represents a web - socket upgrade request .
* @ param request The request to inspect .
* @ return < code > true < / code > if this request is indeed a web - socket upgrade
* request , otherwise < code > false < / code > . */
protected boolean isWebSocketsUpgradeRequest ( HttpRequest request ) { } } | String connectionHeader = request . getHeader ( Names . CONNECTION ) ; String upgradeHeader = request . getHeader ( Names . UPGRADE ) ; if ( connectionHeader == null || upgradeHeader == null ) { return false ; } if ( connectionHeader . trim ( ) . toLowerCase ( ) . contains ( Values . UPGRADE . toLowerCase ( ) ) ) { if ( upgradeHeader . trim ( ) . equalsIgnoreCase ( Values . WEBSOCKET ) ) { return true ; } } return false ; |
public class AWSAppSyncClient { /** * Retrieves a < code > GraphqlApi < / code > object .
* @ param getGraphqlApiRequest
* @ return Result of the GetGraphqlApi operation returned by the service .
* @ throws BadRequestException
* The request is not well formed . For example , a value is invalid or a required field is missing . Check the
* field values , and then try again .
* @ throws NotFoundException
* The resource specified in the request was not found . Check the resource , and then try again .
* @ throws UnauthorizedException
* You are not authorized to perform this operation .
* @ throws InternalFailureException
* An internal AWS AppSync error occurred . Try your request again .
* @ sample AWSAppSync . GetGraphqlApi
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / appsync - 2017-07-25 / GetGraphqlApi " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public GetGraphqlApiResult getGraphqlApi ( GetGraphqlApiRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeGetGraphqlApi ( request ) ; |
public class Artefact { /** * Adds an attribute value . If the attribute doesn ' t exist it will be created .
* @ param attributeName the name of the attribute .
* @ param attributeValue the value of the attribute in the given environment .
* @ param in the environment in which the attribute value applies */
public void addAttributeValue ( final String attributeName , final Value attributeValue , Environment in ) { } } | if ( in == null ) in = GlobalEnvironment . INSTANCE ; Attribute attr = attributes . get ( attributeName ) ; if ( attr == null ) { attr = new Attribute ( attributeName ) ; attributes . put ( attr . getName ( ) , attr ) ; } attr . addValue ( attributeValue , in ) ; Map < String , Object > valueMap = contentMap . get ( in ) ; if ( valueMap == null ) valueMap = new HashMap < > ( ) ; valueMap . put ( attributeName , attributeValue . getRaw ( ) ) ; contentMap . put ( in , valueMap ) ; // TODO check for loops and process such situation
if ( attributeValue instanceof IncludeValue ) externalConfigurations . add ( ( ( IncludeValue ) attributeValue ) . getConfigName ( ) ) ; |
public class BsonUtils { /** * The BSON specification ( or rather the < a href = " http : / / www . mongodb . org / display / DOCS / Object + IDs " > MongoDB
* documentation < / a > ) defines the structure of this data :
* < quote > " A BSON ObjectID is a 12 - byte value consisting of a 4 - byte timestamp ( seconds since epoch ) , a 3 - byte
* machine id , a 2 - byte process id , and a 3 - byte counter . Note that the timestamp and counter fields must be stored
* big endian unlike the rest of BSON . This is because they are compared byte - by - byte and we want to ensure a mostly
* increasing order . " < / quote >
* @ param bytes
* @ return the ObjectId */
public static ObjectId readObjectId ( byte [ ] bytes ) { } } | // Compute the values in big - endian . . .
int time = ( ( bytes [ 0 ] & 0xff ) << 24 ) + ( ( bytes [ 1 ] & 0xff ) << 16 ) + ( ( bytes [ 2 ] & 0xff ) << 8 ) + ( ( bytes [ 3 ] & 0xff ) << 0 ) ; int machine = ( ( bytes [ 4 ] & 0xff ) << 16 ) + ( ( bytes [ 5 ] & 0xff ) << 8 ) + ( ( bytes [ 6 ] & 0xff ) << 0 ) ; int process = ( ( bytes [ 7 ] & 0xff ) << 8 ) + ( ( bytes [ 8 ] & 0xff ) << 0 ) ; int inc = ( ( bytes [ 9 ] & 0xff ) << 16 ) + ( ( bytes [ 10 ] & 0xff ) << 8 ) + ( ( bytes [ 11 ] & 0xff ) << 0 ) ; // Create the value object . . .
return new ObjectId ( time , machine , process , inc ) ; |
public class ApplicationHealthIndicator { /** * { @ inheritDoc } */
@ Override public final Health health ( ) { } } | logger . debug ( "Health" ) ; final Builder builder = Health . up ( ) ; logger . debug ( " " + appInfo . getVersion ( ) ) ; builder . withDetail ( "version" , appInfo . getVersion ( ) ) ; final List < Map < String , Object > > details = loader . details ( ) ; logger . debug ( " " + details . size ( ) + " datasets" ) ; builder . withDetail ( "datasets" , details ) ; builder . up ( ) ; return builder . build ( ) ; |
public class Group { /** * Method to initialize the Cache of this CacheObjectInterface .
* @ see # CACHE */
public static void initialize ( ) { } } | if ( InfinispanCache . get ( ) . exists ( Group . IDCACHE ) ) { InfinispanCache . get ( ) . < Long , Group > getCache ( Group . IDCACHE ) . clear ( ) ; } else { InfinispanCache . get ( ) . < Long , Group > getCache ( Group . IDCACHE ) . addListener ( new CacheLogListener ( Group . LOG ) ) ; } if ( InfinispanCache . get ( ) . exists ( Group . NAMECACHE ) ) { InfinispanCache . get ( ) . < String , Group > getCache ( Group . NAMECACHE ) . clear ( ) ; } else { InfinispanCache . get ( ) . < String , Group > getCache ( Group . NAMECACHE ) . addListener ( new CacheLogListener ( Group . LOG ) ) ; } if ( InfinispanCache . get ( ) . exists ( Group . UUIDCACHE ) ) { InfinispanCache . get ( ) . < UUID , Group > getCache ( Group . UUIDCACHE ) . clear ( ) ; } else { InfinispanCache . get ( ) . < UUID , Group > getCache ( Group . UUIDCACHE ) . addListener ( new CacheLogListener ( Group . LOG ) ) ; } |
public class PrimitiveCases { /** * Matches a short .
* < p > If matched , the short value is extracted . */
public static DecomposableMatchBuilder1 < Short , Short > caseShort ( MatchesAny s ) { } } | List < Matcher < Object > > matchers = new ArrayList < > ( ) ; matchers . add ( any ( ) ) ; return new DecomposableMatchBuilder1 < > ( matchers , 0 , new PrimitiveFieldExtractor < > ( Short . class ) ) ; |
public class AJP13OutputStream { public void writeHeader ( HttpMessage httpMessage ) throws IOException { } } | HttpResponse response = ( HttpResponse ) httpMessage ; response . setState ( HttpMessage . __MSG_SENDING ) ; _ajpResponse . resetData ( ) ; _ajpResponse . addByte ( AJP13ResponsePacket . __SEND_HEADERS ) ; _ajpResponse . addInt ( response . getStatus ( ) ) ; _ajpResponse . addString ( response . getReason ( ) ) ; int mark = _ajpResponse . getMark ( ) ; _ajpResponse . addInt ( 0 ) ; int nh = 0 ; Enumeration e1 = response . getFieldNames ( ) ; while ( e1 . hasMoreElements ( ) ) { String h = ( String ) e1 . nextElement ( ) ; Enumeration e2 = response . getFieldValues ( h ) ; while ( e2 . hasMoreElements ( ) ) { _ajpResponse . addHeader ( h ) ; _ajpResponse . addString ( ( String ) e2 . nextElement ( ) ) ; nh ++ ; } } if ( nh > 0 ) _ajpResponse . setInt ( mark , nh ) ; _ajpResponse . setDataSize ( ) ; write ( _ajpResponse ) ; _ajpResponse . resetData ( ) ; |
public class AbstractRemoteTransport { /** * localUpdateLongRunningFree
* @ param logicalAddress the logical address
* @ param freeCount the free count */
public void localUpdateLongRunningFree ( Address logicalAddress , Long freeCount ) { } } | if ( trace ) log . tracef ( "LOCAL_UPDATE_LONGRUNNING_FREE(%s, %d)" , logicalAddress , freeCount ) ; DistributedWorkManager dwm = workManagerCoordinator . resolveDistributedWorkManager ( logicalAddress ) ; if ( dwm != null ) { Collection < NotificationListener > copy = new ArrayList < NotificationListener > ( dwm . getNotificationListeners ( ) ) ; for ( NotificationListener nl : copy ) { nl . updateLongRunningFree ( logicalAddress , freeCount ) ; } } else { WorkManagerEventQueue wmeq = WorkManagerEventQueue . getInstance ( ) ; wmeq . addEvent ( new WorkManagerEvent ( WorkManagerEvent . TYPE_UPDATE_LONG_RUNNING , logicalAddress , freeCount ) ) ; } |
public class BusItinerary { /** * Replies the list of the road segments of the bus itinerary .
* @ return a list of road segments */
@ Pure public Iterable < RoadSegment > roadSegments ( ) { } } | return new Iterable < RoadSegment > ( ) { @ Override public Iterator < RoadSegment > iterator ( ) { return roadSegmentsIterator ( ) ; } } ; |
public class ArrayUtil { /** * Allocate a new array of the same type as another array .
* @ param oldElements on which the new array is based .
* @ param length of the new array .
* @ param < T > type of the array .
* @ return the new array of requested length . */
@ SuppressWarnings ( "unchecked" ) public static < T > T [ ] newArray ( final T [ ] oldElements , final int length ) { } } | return ( T [ ] ) Array . newInstance ( oldElements . getClass ( ) . getComponentType ( ) , length ) ; |
public class JamaEIG { /** * Return the block diagonal eigenvalue matrix
* @ return D */
public Matrix getD ( ) { } } | Matrix X = MatrixFactory . createMatrix ( n , n ) ; for ( int i = 0 ; i < n ; i ++ ) { X . set ( i , i , d [ i ] ) ; if ( e [ i ] > 0 ) { X . set ( i , i + 1 , e [ i ] ) ; } else if ( e [ i ] < 0 ) { X . set ( i , i - 1 , e [ i ] ) ; } } return X ; |
public class EncodeTask { /** * Handle encoding of the plaintext provided . Capture any
* Exceptions and print the stack trace .
* @ param plaintext
* @ param encodingType
* @ param encodingKey
* @ return ciphertext
* @ throws InvalidPasswordEncodingException
* @ throws UnsupportedCryptoAlgorithmException */
private String encode ( PrintStream stderr , String plaintext , String encodingType , Map < String , String > properties ) throws InvalidPasswordEncodingException , UnsupportedCryptoAlgorithmException { } } | String ret = null ; try { ret = PasswordUtil . encode ( plaintext , encodingType == null ? PasswordUtil . getDefaultEncoding ( ) : encodingType , properties ) ; } catch ( InvalidPasswordEncodingException e ) { e . printStackTrace ( stderr ) ; throw e ; } catch ( UnsupportedCryptoAlgorithmException e ) { e . printStackTrace ( stderr ) ; throw e ; } return ret ; |
public class SparseIntegerVectorView { /** * { @ inheritDoc } */
@ SuppressWarnings ( "unchecked" ) @ Override public double magnitude ( ) { } } | // Check whether the current magnitude is valid and if not , recompute it
if ( magnitude < 0 ) { double m = 0 ; // Special case if we can iterate in time linear to the number of
// non - zero values
if ( sparseVector instanceof Iterable ) { for ( IntegerEntry e : ( Iterable < IntegerEntry > ) sparseVector ) { int idx = e . index ( ) ; if ( idx >= vectorOffset && idx < vectorOffset + vectorLength ) { int i = e . value ( ) ; m += i * i ; } } } else { for ( int nz : sparseVector . getNonZeroIndices ( ) ) { if ( nz >= vectorOffset && nz < vectorOffset + vectorLength ) { int j = intVector . get ( nz ) ; m += j * j ; } } } magnitude = Math . sqrt ( m ) ; } return magnitude ; |
public class Covers { /** * Computes an incremental structural cover for a given automaton , i . e . a cover that only contains the missing
* sequences for obtaining a complete structural cover .
* @ param automaton
* the automaton for which the cover should be computed
* @ param inputs
* the set of input symbols allowed in the cover sequences
* @ param oldCover
* the collection containing the already existing sequences of the structural cover
* @ param newCover
* the collection in which the missing sequences will be stored
* @ param < I >
* input symbol type
* @ return { @ code true } if new sequences have been added to the structural cover , { @ code false } otherwise .
* @ see # structuralCover ( DeterministicAutomaton , Collection , Collection ) */
public static < I > boolean incrementalStructuralCover ( DeterministicAutomaton < ? , I , ? > automaton , Collection < ? extends I > inputs , Collection < ? extends Word < I > > oldCover , Collection < ? super Word < I > > newCover ) { } } | final int oldCoverSize = newCover . size ( ) ; incrementalCover ( automaton , inputs , oldCover , Collections . emptySet ( ) , newCover :: add , newCover :: add ) ; return oldCoverSize < newCover . size ( ) ; |
public class KerasBatchNormalization { /** * Get BatchNormalization momentum parameter from Keras layer configuration .
* @ param layerConfig dictionary containing Keras layer configuration
* @ return momentum
* @ throws InvalidKerasConfigurationException Invalid Keras config */
private double getMomentumFromConfig ( Map < String , Object > layerConfig ) throws InvalidKerasConfigurationException { } } | Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; if ( ! innerConfig . containsKey ( LAYER_FIELD_MOMENTUM ) ) throw new InvalidKerasConfigurationException ( "Keras BatchNorm layer config missing " + LAYER_FIELD_MOMENTUM + " field" ) ; return ( double ) innerConfig . get ( LAYER_FIELD_MOMENTUM ) ; |
public class JavaTokenizer { /** * Report an error at the given position using the provided arguments . */
protected void lexError ( int pos , String key , Object ... args ) { } } | log . error ( pos , key , args ) ; tk = TokenKind . ERROR ; errPos = pos ; |
public class SimpleTemplate { /** * Replace string in the template text
* @ param varMap variable map
* @ return replaced string */
public String replace ( Map < String , String > varMap ) { } } | StringBuilder newString = new StringBuilder ( ) ; int p = 0 ; int p0 = 0 ; while ( true ) { p = templateText . indexOf ( "${" , p ) ; if ( p == - 1 ) { newString . append ( templateText . substring ( p0 , templateText . length ( ) ) ) ; break ; } else { newString . append ( templateText . substring ( p0 , p ) ) ; } p0 = p ; p = templateText . indexOf ( "}" , p ) ; if ( p != - 1 ) { String varName = templateText . substring ( p0 + 2 , p ) . trim ( ) ; if ( varMap . containsKey ( varName ) ) { newString . append ( varMap . get ( varName ) ) ; p0 = p + 1 ; } } } return newString . toString ( ) ; |
public class SnapshotClient { /** * Gets the access control policy for a resource . May be empty if no such policy or resource
* exists .
* < p > Sample code :
* < pre > < code >
* try ( SnapshotClient snapshotClient = SnapshotClient . create ( ) ) {
* ProjectGlobalSnapshotResourceName resource = ProjectGlobalSnapshotResourceName . of ( " [ PROJECT ] " , " [ RESOURCE ] " ) ;
* Policy response = snapshotClient . getIamPolicySnapshot ( resource . toString ( ) ) ;
* < / code > < / pre >
* @ param resource Name or id of the resource for this request .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Policy getIamPolicySnapshot ( String resource ) { } } | GetIamPolicySnapshotHttpRequest request = GetIamPolicySnapshotHttpRequest . newBuilder ( ) . setResource ( resource ) . build ( ) ; return getIamPolicySnapshot ( request ) ; |
public class ConverterRegistry { /** * 登记自定义转换器
* @ param type 转换的目标类型
* @ param converterClass 转换器类 , 必须有默认构造方法
* @ return { @ link ConverterRegistry } */
public ConverterRegistry putCustom ( Type type , Class < ? extends Converter < ? > > converterClass ) { } } | return putCustom ( type , ReflectUtil . newInstance ( converterClass ) ) ; |
public class Token { /** * Creates a token that represents a symbol , using a library for the text . */
public static Token newSymbol ( int type , int startLine , int startColumn ) { } } | return new Token ( type , Types . getText ( type ) , startLine , startColumn ) ; |
public class KafkaAvroSchemaRegistry { /** * Fetch schema by key . */
@ Override protected Schema fetchSchemaByKey ( String key ) throws SchemaRegistryException { } } | String schemaUrl = KafkaAvroSchemaRegistry . this . url + GET_RESOURCE_BY_ID + key ; GetMethod get = new GetMethod ( schemaUrl ) ; int statusCode ; String schemaString ; HttpClient httpClient = this . borrowClient ( ) ; try { statusCode = httpClient . executeMethod ( get ) ; schemaString = get . getResponseBodyAsString ( ) ; } catch ( IOException e ) { throw new SchemaRegistryException ( e ) ; } finally { get . releaseConnection ( ) ; this . httpClientPool . returnObject ( httpClient ) ; } if ( statusCode != HttpStatus . SC_OK ) { throw new SchemaRegistryException ( String . format ( "Schema with key %s cannot be retrieved, statusCode = %d" , key , statusCode ) ) ; } Schema schema ; try { schema = new Schema . Parser ( ) . parse ( schemaString ) ; } catch ( Throwable t ) { throw new SchemaRegistryException ( String . format ( "Schema with ID = %s cannot be parsed" , key ) , t ) ; } return schema ; |
public class TransmittableThreadLocal { /** * see { @ link InheritableThreadLocal # get ( ) } */
@ Override public final T get ( ) { } } | T value = super . get ( ) ; if ( null != value ) addValue ( ) ; return value ; |
public class OSKL { /** * Updates the average model to reflect the current time average */
private void updateAverage ( ) { } } | if ( t == last_t || t < burnIn ) return ; else if ( last_t < burnIn ) // first update since done burning
{ for ( int i = 0 ; i < alphaAveraged . size ( ) ; i ++ ) alphaAveraged . set ( i , alphas . get ( i ) ) ; } double w = t - last_t ; // time elapsed
for ( int i = 0 ; i < alphaAveraged . size ( ) ; i ++ ) { double delta = alphas . getD ( i ) - alphaAveraged . getD ( i ) ; alphaAveraged . set ( i , alphaAveraged . getD ( i ) + delta * w / t ) ; } last_t = t ; // average done |
public class XSequentialEventBuffer { /** * Retrieves the event recorded at the specified position
* @ param eventIndex
* Position of the requested event , defined to be within
* < code > [ 0 , size ( ) - 1 ] < / code > .
* @ return The requested event . */
public synchronized XEvent get ( int eventIndex ) throws IOException , IndexOutOfBoundsException { } } | // check for index sanity
if ( eventIndex < 0 || eventIndex >= size ) { throw new IndexOutOfBoundsException ( ) ; } // determine and set appropriate file pointer position
navigateToIndex ( eventIndex ) ; // read and return requested audit trail entry
return read ( ) ; |
public class AbstractDestinationDescriptor { /** * ( non - Javadoc )
* @ see net . timewalker . ffmq4 . utils . Checkable # check ( ) */
@ Override public void check ( ) throws JMSException { } } | if ( StringTools . isEmpty ( name ) ) throw new InvalidDescriptorException ( "Missing descriptor property : name" ) ; checkMinValue ( maxNonPersistentMessages , 0 , "maximum non persistent messages" ) ; checkMinValue ( initialBlockCount , 0 , "initial block count" ) ; checkMinValue ( maxBlockCount , 0 , "maximum block count" ) ; if ( maxBlockCount < initialBlockCount ) throw new InvalidDescriptorException ( "Maximum block count should be greater or equal than initial block count" ) ; if ( maxBlockCount > 0 ) { if ( rawDataFolder == null || StringTools . isEmpty ( rawDataFolder ) ) throw new InvalidDescriptorException ( "Missing destination raw data folder" ) ; if ( dataFolder == null || StringTools . isEmpty ( dataFolder . getName ( ) ) ) throw new InvalidDescriptorException ( "Missing destination data folder" ) ; if ( ! dataFolder . isDirectory ( ) ) throw new InvalidDescriptorException ( "Invalid data folder : " + dataFolder . getAbsolutePath ( ) ) ; checkMinValue ( blockSize , 1024 , "block size" ) ; if ( initialBlockCount != maxBlockCount ) checkMinValue ( autoExtendAmount , 1 , "auto extend amount" ) ; if ( useJournal ) { if ( ! journalFolder . isDirectory ( ) ) throw new InvalidDescriptorException ( "Invalid journal folder : " + journalFolder . getAbsolutePath ( ) ) ; checkMinValue ( maxJournalSize , 1024 , "maximum journal size" ) ; checkMinValue ( maxWriteBatchSize , 1 , "maximum write batch size" ) ; checkMinValue ( journalOutputBuffer , 1024 , "journal output buffer size" ) ; checkMinValue ( maxUnflushedJournalSize , 1024 , "maximum unflushed journal size" ) ; checkMinValue ( maxUncommittedStoreSize , 1024 , "maximum uncommitted store size" ) ; } } if ( initialBlockCount == 0 && maxNonPersistentMessages == 0 ) throw new InvalidDescriptorException ( "Destination cannot store any message !" ) ; if ( ! StorageSyncMethod . isValid ( storageSyncMethod ) ) throw new InvalidDescriptorException ( "Invalid storage sync method : " + storageSyncMethod ) ; |
public class Postconditions { /** * An { @ code int } specialized version of { @ link # checkPostcondition ( Object ,
* ContractConditionType ) } .
* @ param value The value
* @ param condition The predicate
* @ return value
* @ throws PostconditionViolationException If the predicate is false */
public static int checkPostconditionI ( final int value , final ContractIntConditionType condition ) throws PostconditionViolationException { } } | return checkPostconditionI ( value , condition . predicate ( ) , condition . describer ( ) ) ; |
public class MonitoringConfigurationUpdateMarshaller { /** * Marshall the given parameter object . */
public void marshall ( MonitoringConfigurationUpdate monitoringConfigurationUpdate , ProtocolMarshaller protocolMarshaller ) { } } | if ( monitoringConfigurationUpdate == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( monitoringConfigurationUpdate . getConfigurationTypeUpdate ( ) , CONFIGURATIONTYPEUPDATE_BINDING ) ; protocolMarshaller . marshall ( monitoringConfigurationUpdate . getMetricsLevelUpdate ( ) , METRICSLEVELUPDATE_BINDING ) ; protocolMarshaller . marshall ( monitoringConfigurationUpdate . getLogLevelUpdate ( ) , LOGLEVELUPDATE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class LoginAccount { /** * Sets the value of provided property to null .
* @ param propName
* allowed object is { @ link String } */
@ Override public void unset ( String propName ) { } } | if ( propName . equals ( PROP_CERTIFICATE ) ) { unsetCertificate ( ) ; } super . unset ( propName ) ; |
public class CategoryRoute { /** * Returns the LEX types
* @ return the LEX types */
public List < TypeCategory > getLEXTypes ( ) { } } | ClientResource resource = new ClientResource ( Route . LEX_TYPE . url ( ) ) ; try { Representation repr = resource . get ( ) ; return mapper . readValue ( repr . getText ( ) , new TypeReference < List < TypeCategory > > ( ) { } ) ; } catch ( IOException ex ) { LEX4JLogger . log ( Level . WARNING , "Could not retrieve LEX types correctly!" ) ; return null ; } |
public class KickUserImpl { /** * / * ( non - Javadoc )
* @ see com . lagente . core . command . BaseCommand # execute ( ) */
@ SuppressWarnings ( "unchecked" ) @ Override public String execute ( ) { } } | User sfsUser = CommandUtil . getSfsUser ( userToKick , api ) ; User sfsMod = ( modUser != null ) ? CommandUtil . getSfsUser ( modUser , api ) : null ; api . kickUser ( sfsUser , sfsMod , kickMessage , delaySeconds ) ; return userToKick ; |
public class SQLSupport { /** * Map a { @ link FilterOperationHint } to a SQL - specific { @ link FilterOperation } . When using SQL as a query
* language , all of the operations defined in { @ link FilterOperationHint } should be supported .
* @ param hint the hint
* @ return the { @ link FilterOperation } matching the given hint . */
public static final FilterOperation mapFilterHintToOperation ( FilterOperationHint hint ) { } } | for ( int i = 0 ; i < FILTER_OPERATIONS . length ; i ++ ) { FilterOperation op = FILTER_OPERATIONS [ i ] ; if ( op . getOperationHint ( ) . equals ( hint ) ) return op ; } return null ; |
public class CloudFoundryCustomContextPathExample { /** * tag : : configuration [ ] */
@ Bean public TomcatServletWebServerFactory servletWebServerFactory ( ) { } } | return new TomcatServletWebServerFactory ( ) { @ Override protected void prepareContext ( Host host , ServletContextInitializer [ ] initializers ) { super . prepareContext ( host , initializers ) ; StandardContext child = new StandardContext ( ) ; child . addLifecycleListener ( new Tomcat . FixContextListener ( ) ) ; child . setPath ( "/cloudfoundryapplication" ) ; ServletContainerInitializer initializer = getServletContextInitializer ( getContextPath ( ) ) ; child . addServletContainerInitializer ( initializer , Collections . emptySet ( ) ) ; child . setCrossContext ( true ) ; host . addChild ( child ) ; } } ; |
public class BundleRepositoryRegistry { /** * Add the default repositories for the product
* @ param processName If set to a processName , a cache will be created in that process ' s workarea . A null value disables caching .
* @ param useMsgs This setting is passed on to the held ContentLocalBundleRepositories .
* @ param isClient This is true when the current process is client . */
public static synchronized void initializeDefaults ( String processName , boolean useMsgs , boolean isClient ) { } } | BundleRepositoryRegistry . isClient = isClient ; BundleRepositoryRegistry . initializeDefaults ( processName , useMsgs ) ; |
public class IntSequenceIDSource { /** * { @ inheritDoc } */
public Integer nextID ( ) { } } | lock . lock ( ) ; try { if ( lastID == Integer . MAX_VALUE ) { long hours = ( System . currentTimeMillis ( ) - startTime ) / SECONDS_IN_HOUR ; throw new IDSourceExhaustedException ( "32-bit ID source exhausted after " + hours + " hours." ) ; } ++ lastID ; return lastID ; } finally { lock . unlock ( ) ; } |
public class ExcelFunctions { /** * Repeats text a given number of times */
public static String rept ( EvaluationContext ctx , Object text , Object numberTimes ) { } } | int _numberTimes = Conversions . toInteger ( numberTimes , ctx ) ; if ( _numberTimes < 0 ) { throw new RuntimeException ( "Number of times can't be negative" ) ; } return StringUtils . repeat ( Conversions . toString ( text , ctx ) , _numberTimes ) ; |
public class AbstractMedia { /** * Add the specified observer to this media element . */
protected void addObserver ( Object obs ) { } } | if ( _observers == null ) { _observers = ObserverList . newFastUnsafe ( ) ; } _observers . add ( obs ) ; |
public class GenerateInverseFromMinor { /** * Put the core auto - code algorithm here so an external class can call it */
public void printMinors ( int matrix [ ] , int N , PrintStream stream ) { } } | this . N = N ; this . stream = stream ; // compute all the minors
int index = 0 ; for ( int i = 1 ; i <= N ; i ++ ) { for ( int j = 1 ; j <= N ; j ++ , index ++ ) { stream . print ( " double m" + i + "" + j + " = " ) ; if ( ( i + j ) % 2 == 1 ) stream . print ( "-( " ) ; printTopMinor ( matrix , i - 1 , j - 1 , N ) ; if ( ( i + j ) % 2 == 1 ) stream . print ( ")" ) ; stream . print ( ";\n" ) ; } } stream . println ( ) ; // compute the determinant
stream . print ( " double det = (a11*m11" ) ; for ( int i = 2 ; i <= N ; i ++ ) { stream . print ( " + " + a ( i - 1 ) + "*m" + 1 + "" + i ) ; } stream . println ( ")/scale;" ) ; |
public class Logger { /** * Log an INFO message .
* If the logger is currently enabled for the INFO message
* level then the given message is forwarded to all the
* registered output Handler objects .
* @ param msg The string message ( or a key in the message catalog ) */
public void info ( String msg ) { } } | if ( Level . INFO . intValue ( ) < levelValue ) { return ; } log ( Level . INFO , msg ) ; |
public class MoleculerError { /** * - - - LOCAL / REMOTE STACK TRACE - - - */
public String getStack ( ) { } } | if ( stackTrace == null ) { if ( stack == null ) { StringWriter sw = new StringWriter ( 512 ) ; PrintWriter pw = new PrintWriter ( sw , true ) ; super . printStackTrace ( pw ) ; stackTrace = sw . toString ( ) . trim ( ) ; } else { stackTrace = stack . trim ( ) ; } } return stackTrace ; |
public class ClassNameDataUpdater { /** * The update method is overridden here directly since the usual extraction method
* is not needed
* { @ inheritDoc } */
@ Override void update ( JSONObject pJSONObject , MBeanInfo pMBeanInfo , Stack < String > pPathStack ) { } } | verifyThatPathIsEmpty ( pPathStack ) ; pJSONObject . put ( getKey ( ) , pMBeanInfo . getClassName ( ) ) ; |
public class ReferSubscriber { /** * This method is the same as refresh ( duration , eventId , timeout ) except that instead of creating
* the SUBSCRIBE request from parameters passed in , the given request message parameter is used
* for sending out the SUBSCRIBE message .
* The Request parameter passed into this method should come from calling createSubscribeMessage ( )
* - see that javadoc . The subscription duration is reset to the passed in Request ' s expiry value .
* If it is 0 , this is an unsubscribe . The event " id " in the given request will be used
* subsequently ( for error checking SUBSCRIBE responses and NOTIFYs from the server as well as for
* sending subsequent SUBSCRIBEs ) . */
public boolean refresh ( Request req , long timeout ) { } } | return refreshSubscription ( req , timeout , parent . getProxyHost ( ) != null ) ; |
public class AtomContainer { /** * { @ inheritDoc } */
@ Override public ILonePair removeLonePair ( int position ) { } } | ILonePair lp = lonePairs [ position ] ; lp . removeListener ( this ) ; for ( int i = position ; i < lonePairCount - 1 ; i ++ ) { lonePairs [ i ] = lonePairs [ i + 1 ] ; } lonePairs [ lonePairCount - 1 ] = null ; lonePairCount -- ; notifyChanged ( ) ; return lp ; |
public class CmsFloatDecoratedPanel { /** * Updates the vertical margin of the float box such that its vertical middle point coincides
* with the vertical middle point of the primary panel . < p > */
private void updateVerticalMargin ( ) { } } | int floatHeight = m_floatBox . getOffsetHeight ( ) ; int primaryHeight = m_primary . getOffsetHeight ( ) ; int verticalOffset = ( primaryHeight - floatHeight ) / 2 ; m_floatBox . getElement ( ) . getStyle ( ) . setMarginTop ( verticalOffset , Unit . PX ) ; |
public class Filter { /** * Checks that second clone contains first one .
* Clone A is contained in another clone B , if every part pA from A has part pB in B ,
* which satisfy the conditions :
* < pre >
* ( pA . resourceId = = pB . resourceId ) and ( pB . unitStart < = pA . unitStart ) and ( pA . unitEnd < = pB . unitEnd )
* < / pre >
* And all resourcesId from B exactly the same as all resourceId from A , which means that also every part pB from B has part pA in A ,
* which satisfy the condition :
* < pre >
* pB . resourceId = = pA . resourceId
* < / pre >
* So this relation is :
* < ul >
* < li > reflexive - A in A < / li >
* < li > transitive - ( A in B ) and ( B in C ) = > ( A in C ) < / li >
* < li > antisymmetric - ( A in B ) and ( B in A ) < = > ( A = B ) < / li >
* < / ul >
* < strong > Important : this method relies on fact that all parts were already sorted by resourceId and unitStart by using
* { @ link BlocksGroup . BlockComparator } , which uses { @ link org . sonar . duplications . utils . FastStringComparator } for comparison by resourceId . < / strong >
* Running time - O ( | A | + | B | ) . */
static boolean containsIn ( CloneGroup first , CloneGroup second ) { } } | if ( first . getCloneUnitLength ( ) > second . getCloneUnitLength ( ) ) { return false ; } List < ClonePart > firstParts = first . getCloneParts ( ) ; List < ClonePart > secondParts = second . getCloneParts ( ) ; return SortedListsUtils . contains ( secondParts , firstParts , new ContainsInComparator ( second . getCloneUnitLength ( ) , first . getCloneUnitLength ( ) ) ) && SortedListsUtils . contains ( firstParts , secondParts , ContainsInComparator . RESOURCE_ID_COMPARATOR ) ; |
public class Predicate { /** * A list of the conditions that determine when the trigger will fire .
* @ param conditions
* A list of the conditions that determine when the trigger will fire . */
public void setConditions ( java . util . Collection < Condition > conditions ) { } } | if ( conditions == null ) { this . conditions = null ; return ; } this . conditions = new java . util . ArrayList < Condition > ( conditions ) ; |
public class FactoryFinder { /** * Create an instance of a class using the specified ClassLoader and optionally fall back to the
* current ClassLoader if not found .
* @ param className Name of the concrete class corresponding to the service provider
* @ param cl ClassLoader to use to load the class , null means to use the bootstrap ClassLoader
* @ param doFallback true if the current ClassLoader should be tried as a fallback if the class
* is not found using cl */
private static Object newInstance ( final String className , ClassLoader cl , final boolean doFallback ) throws ConfigurationError { } } | try { Class < ? > providerClass ; if ( cl == null ) { // If classloader is null Use the bootstrap ClassLoader .
// Thus Class . forName ( String ) will use the current
// ClassLoader which will be the bootstrap ClassLoader .
providerClass = Class . forName ( className ) ; } else { try { providerClass = cl . loadClass ( className ) ; } catch ( final ClassNotFoundException x ) { if ( doFallback ) { // Fall back to current classloader
cl = FactoryFinder . class . getClassLoader ( ) ; providerClass = cl . loadClass ( className ) ; } else { throw x ; } } } final Object instance = providerClass . newInstance ( ) ; dPrint ( "created new instance of " + providerClass + " using ClassLoader: " + cl ) ; return instance ; } catch ( final ClassNotFoundException x ) { throw new ConfigurationError ( "Provider " + className + " not found" , x ) ; } catch ( final Exception x ) { throw new ConfigurationError ( "Provider " + className + " could not be instantiated: " + x , x ) ; } |
public class MagickUtil { /** * Converts a gray { @ code MagickImage } to a { @ code BufferedImage } , of
* type { @ code TYPE _ USHORT _ GRAY } or { @ code TYPE _ BYTE _ GRAY } .
* @ param pImage the original { @ code MagickImage }
* @ param pAlpha keep alpha channel
* @ return a new { @ code BufferedImage }
* @ throws MagickException if an exception occurs during conversion
* @ see BufferedImage */
private static BufferedImage grayToBuffered ( MagickImage pImage , boolean pAlpha ) throws MagickException { } } | Dimension size = pImage . getDimension ( ) ; int length = size . width * size . height ; int bands = pAlpha ? 2 : 1 ; byte [ ] pixels = new byte [ length * bands ] ; // TODO : Make a fix for 16 bit TYPE _ USHORT _ GRAY ? !
// Note : The ordering AI or I corresponds to BufferedImage
// TYPE _ CUSTOM and TYPE _ BYTE _ GRAY respectively
pImage . dispatchImage ( 0 , 0 , size . width , size . height , pAlpha ? "AI" : "I" , pixels ) ; // Init databuffer with array , to avoid allocation of empty array
DataBuffer buffer = new DataBufferByte ( pixels , pixels . length ) ; int [ ] bandOffsets = pAlpha ? new int [ ] { 1 , 0 } : new int [ ] { 0 } ; WritableRaster raster = Raster . createInterleavedRaster ( buffer , size . width , size . height , size . width * bands , bands , bandOffsets , LOCATION_UPPER_LEFT ) ; return new BufferedImage ( pAlpha ? CM_GRAY_ALPHA : CM_GRAY_OPAQUE , raster , pAlpha , null ) ; |
public class InMemoryWordNetBinaryArray { /** * Create caches of WordNet to speed up matching .
* @ param componentKey a key to the component in the configuration
* @ param properties configuration
* @ throws SMatchException SMatchException */
public static void createWordNetCaches ( String componentKey , Properties properties ) throws SMatchException { } } | properties = getComponentProperties ( makeComponentPrefix ( componentKey , InMemoryWordNetBinaryArray . class . getSimpleName ( ) ) , properties ) ; if ( properties . containsKey ( JWNL_PROPERTIES_PATH_KEY ) ) { // initialize JWNL ( this must be done before JWNL library can be used )
try { final String configPath = properties . getProperty ( JWNL_PROPERTIES_PATH_KEY ) ; log . info ( "Initializing JWNL from " + configPath ) ; JWNL . initialize ( new FileInputStream ( configPath ) ) ; } catch ( JWNLException e ) { final String errMessage = e . getClass ( ) . getSimpleName ( ) + ": " + e . getMessage ( ) ; log . error ( errMessage , e ) ; throw new SMatchException ( errMessage , e ) ; } catch ( FileNotFoundException e ) { final String errMessage = e . getClass ( ) . getSimpleName ( ) + ": " + e . getMessage ( ) ; log . error ( errMessage , e ) ; throw new SMatchException ( errMessage , e ) ; } } else { final String errMessage = "Cannot find configuration key " + JWNL_PROPERTIES_PATH_KEY ; log . error ( errMessage ) ; throw new SMatchException ( errMessage ) ; } log . info ( "Creating WordNet caches..." ) ; writeNominalizations ( properties ) ; writeSynonymsAdj ( properties ) ; writeOppAdverbs ( properties ) ; writeOppAdjectives ( properties ) ; writeOppNouns ( properties ) ; writeNounMG ( properties ) ; writeVerbMG ( properties ) ; log . info ( "Done" ) ; |
public class MessageHeaderUtils { /** * Check if given header name belongs to Spring Integration internal headers .
* This is given if header name starts with internal header prefix or
* matches one of Spring ' s internal header names .
* @ param headerName
* @ return */
public static boolean isSpringInternalHeader ( String headerName ) { } } | // " springintegration _ " makes Citrus work with Spring Integration 1 . x release
if ( headerName . startsWith ( "springintegration_" ) ) { return true ; } else if ( headerName . equals ( MessageHeaders . ID ) ) { return true ; } else if ( headerName . equals ( MessageHeaders . TIMESTAMP ) ) { return true ; } else if ( headerName . equals ( MessageHeaders . REPLY_CHANNEL ) ) { return true ; } else if ( headerName . equals ( MessageHeaders . ERROR_CHANNEL ) ) { return true ; } else if ( headerName . equals ( MessageHeaders . CONTENT_TYPE ) ) { return true ; } else if ( headerName . equals ( IntegrationMessageHeaderAccessor . PRIORITY ) ) { return true ; } else if ( headerName . equals ( IntegrationMessageHeaderAccessor . CORRELATION_ID ) ) { return true ; } else if ( headerName . equals ( IntegrationMessageHeaderAccessor . ROUTING_SLIP ) ) { return true ; } else if ( headerName . equals ( IntegrationMessageHeaderAccessor . DUPLICATE_MESSAGE ) ) { return true ; } else if ( headerName . equals ( IntegrationMessageHeaderAccessor . SEQUENCE_NUMBER ) ) { return true ; } else if ( headerName . equals ( IntegrationMessageHeaderAccessor . SEQUENCE_SIZE ) ) { return true ; } else if ( headerName . equals ( IntegrationMessageHeaderAccessor . SEQUENCE_DETAILS ) ) { return true ; } else if ( headerName . equals ( IntegrationMessageHeaderAccessor . EXPIRATION_DATE ) ) { return true ; } else if ( headerName . startsWith ( "jms_" ) ) { return true ; } return false ; |
public class ClientSocketFactory { /** * Returns a valid recycled stream from the idle pool to the backend .
* If the stream has been in the pool for too long ( > live _ time ) ,
* close it instead .
* @ return the socket ' s read / write pair . */
private ClientSocket openRecycle ( ) { } } | long now = CurrentTime . currentTime ( ) ; ClientSocket stream = null ; synchronized ( this ) { if ( _idleHead != _idleTail ) { stream = _idle [ _idleHead ] ; long freeTime = stream . getIdleStartTime ( ) ; _idle [ _idleHead ] = null ; _idleHead = ( _idleHead + _idle . length - 1 ) % _idle . length ; // System . out . println ( " RECYCLE : " + stream + " " + ( freeTime - now ) + " " + _ loadBalanceIdleTime ) ;
if ( now < freeTime + _loadBalanceIdleTime ) { _activeCount . incrementAndGet ( ) ; _keepaliveCountTotal ++ ; stream . clearIdleStartTime ( ) ; stream . toActive ( ) ; return stream ; } } } if ( stream != null ) { if ( log . isLoggable ( Level . FINER ) ) log . finer ( this + " close idle " + stream + " expire=" + QDate . formatISO8601 ( stream . getIdleStartTime ( ) + _loadBalanceIdleTime ) ) ; stream . closeImpl ( ) ; } return null ; |
public class AuthleteApiImpl { /** * Call an API with HTTP DELETE method and Service Owner credentials . */
private < TResponse > void callServiceOwnerDeleteApi ( String path , Map < String , String > queryParams ) throws AuthleteApiException { } } | callDeleteApi ( mServiceOwnerCredentials , path , queryParams ) ; |
public class ServiceGraphModule { /** * Creates the new { @ code ServiceGraph } without circular dependencies and
* intermediary nodes
* @ param injector injector for building the graphs of objects
* @ return created ServiceGraph */
@ Provides @ Singleton synchronized ServiceGraph serviceGraph ( Injector injector , OptionalInitializer < ServiceGraphModule > serviceGraphModuleOptionalInitializer , OptionalInitializer < ServiceGraph > serviceGraphOptionalInitializer ) { } } | serviceGraphModuleOptionalInitializer . accept ( this ) ; if ( serviceGraph == null ) { serviceGraph = ServiceGraph . create ( ) . withStartCallback ( ( ) -> { createGuiceGraph ( injector , serviceGraph ) ; serviceGraph . removeIntermediateNodes ( ) ; } ) . withNodeSuffixes ( key -> { List < ? > list = injector . getInstance ( WorkerPools . class ) . getAllObjects ( key ) ; return list . isEmpty ( ) ? null : list . size ( ) ; } ) . initialize ( initializer ) . initialize ( serviceGraphOptionalInitializer ) ; } return serviceGraph ; |
public class RandomProjectedNeighborsAndDensities { /** * Split the data set by distances .
* @ param ind Object index
* @ param begin Interval begin
* @ param end Interval end
* @ param tpro Projection
* @ param rand Random generator
* @ return Splitting point */
public int splitByDistance ( ArrayModifiableDBIDs ind , int begin , int end , DoubleDataStore tpro , Random rand ) { } } | DBIDArrayIter it = ind . iter ( ) ; // pick random splitting point based on distance
double rmin = Double . MAX_VALUE * .5 , rmax = - Double . MAX_VALUE * .5 ; int minInd = begin , maxInd = end - 1 ; for ( it . seek ( begin ) ; it . getOffset ( ) < end ; it . advance ( ) ) { double currEle = tpro . doubleValue ( it ) ; rmin = Math . min ( currEle , rmin ) ; rmax = Math . max ( currEle , rmax ) ; } if ( rmin != rmax ) { // if not all elements are the same
double rs = rmin + rand . nextDouble ( ) * ( rmax - rmin ) ; // permute elements such that all points smaller than the splitting
// element are on the right and the others on the left in the array
while ( minInd < maxInd ) { double currEle = tpro . doubleValue ( it . seek ( minInd ) ) ; if ( currEle > rs ) { while ( minInd < maxInd && tpro . doubleValue ( it . seek ( maxInd ) ) > rs ) { maxInd -- ; } if ( minInd == maxInd ) { break ; } ind . swap ( minInd , maxInd ) ; maxInd -- ; } minInd ++ ; } } else { // if all elements are the same split in the middle
minInd = ( begin + end ) >>> 1 ; } return minInd ; |
public class SheetFilterResourcesImpl { /** * Get all filters .
* It mirrors to the following Smartsheet REST API method : GET / sheets / { sheetId } / filters
* Exceptions :
* IllegalArgumentException : if any argument is null
* InvalidRequestException : if there is any problem with the REST API request
* AuthorizationException : if there is any problem with the REST API authorization ( access token )
* ServiceUnavailableException : if the REST API service is not available ( possibly due to rate limiting )
* SmartsheetRestException : if there is any other REST API related error occurred during the operation
* SmartsheetException : if there is any other error occurred during the operation
* @ param sheetId the sheet ID
* @ param pagination the pagination pagination
* @ return all the filters
* @ throws SmartsheetException the smartsheet exception */
public PagedResult < SheetFilter > listFilters ( long sheetId , PaginationParameters pagination ) throws SmartsheetException { } } | String path = "sheets/" + sheetId + "/filters" ; HashMap < String , Object > parameters = new HashMap < String , Object > ( ) ; if ( pagination != null ) { parameters = pagination . toHashMap ( ) ; } path += QueryUtil . generateUrl ( null , parameters ) ; return this . listResourcesWithWrapper ( path , SheetFilter . class ) ; |
public class StringConvert { /** * Checks if a suitable converter exists for the type .
* This performs the same checks as the { @ code findConverter } methods .
* Calling this before { @ code findConverter } will cache the converter .
* Note that all exceptions , including developer errors are caught and hidden .
* @ param cls the class to find a converter for , null returns false
* @ return true if convertible
* @ since 1.5 */
public boolean isConvertible ( final Class < ? > cls ) { } } | try { return cls != null && findConverterQuiet ( cls ) != null ; } catch ( RuntimeException ex ) { return false ; } |
public class FieldSet { /** * Like { @ link # writeTo } but uses MessageSet wire format . */
public void writeMessageSetTo ( final CodedOutputStream output ) throws IOException { } } | for ( int i = 0 ; i < fields . getNumArrayEntries ( ) ; i ++ ) { writeMessageSetTo ( fields . getArrayEntryAt ( i ) , output ) ; } for ( final Map . Entry < FieldDescriptorType , Object > entry : fields . getOverflowEntries ( ) ) { writeMessageSetTo ( entry , output ) ; } |
public class MessageStore { /** * ( non - Javadoc )
* @ see com . ibm . ws . sib . msgstore . MessageStoreInterface # add ( com . ibm . ws . sib . msgstore . ItemStream , com . ibm . ws . sib . msgstore . transactions . Transaction ) */
@ Override public void add ( ItemStream itemStream , Transaction transaction ) throws MessageStoreException { } } | add ( itemStream , AbstractItem . NO_LOCK_ID , transaction ) ; |
public class KubernetesClient { /** * Makes a REST call to Kubernetes API and returns the result JSON .
* @ param urlString Kubernetes API REST endpoint
* @ return parsed JSON
* @ throws KubernetesClientException if Kubernetes API didn ' t respond with 200 and a valid JSON content */
private JsonObject callGet ( final String urlString ) { } } | return RetryUtils . retry ( new Callable < JsonObject > ( ) { @ Override public JsonObject call ( ) { return Json . parse ( RestClient . create ( urlString ) . withHeader ( "Authorization" , String . format ( "Bearer %s" , apiToken ) ) . withCaCertificate ( caCertificate ) . get ( ) ) . asObject ( ) ; } } , retries , NON_RETRYABLE_KEYWORDS ) ; |
public class BSHMethodDeclaration { /** * Evaluate the declaration of the method . That is , determine the
* structure of the method and install it into the caller ' s namespace . */
public Object eval ( CallStack callstack , Interpreter interpreter ) throws EvalError { } } | returnType = evalReturnType ( callstack , interpreter ) ; evalNodes ( callstack , interpreter ) ; // Install an * instance * of this method in the namespace .
// See notes in BshMethod
// This is not good . . .
// need a way to update eval without re - installing . . .
// so that we can re - eval params , etc . when classloader changes
// look into this
NameSpace namespace = callstack . top ( ) ; BshMethod bshMethod = new BshMethod ( this , namespace , modifiers ) ; namespace . setMethod ( bshMethod ) ; return Primitive . VOID ; |
public class BootstrapMojo { /** * Return the celerio template packs found on the classpath . */
protected List < TemplatePackInfo > getBootstrapPacksInfo ( ) { } } | if ( bootstrapPacksInfo == null ) { bootstrapPacksInfo = getCelerioApplicationContext ( ) . getBean ( ClasspathTemplatePackInfoLoader . class ) . resolveTopLevelPacks ( ) ; } return bootstrapPacksInfo ; |
public class Javalin { /** * Adds a OPTIONS request handler for the specified path to the instance .
* @ see < a href = " https : / / javalin . io / documentation # handlers " > Handlers in docs < / a > */
public Javalin options ( @ NotNull String path , @ NotNull Handler handler ) { } } | return addHandler ( HandlerType . OPTIONS , path , handler ) ; |
public class Stream { /** * Zip together the " a " and " b " iterators until all of them runs out of values .
* Each pair of values is combined into a single value using the supplied zipFunction function .
* @ param a
* @ param b
* @ param valueForNoneA value to fill if " a " runs out of values first .
* @ param valueForNoneB value to fill if " b " runs out of values first .
* @ param zipFunction
* @ return */
public static < R > Stream < R > zip ( final long [ ] a , final long [ ] b , final long valueForNoneA , final long valueForNoneB , final LongBiFunction < R > zipFunction ) { } } | return zip ( LongIteratorEx . of ( a ) , LongIteratorEx . of ( b ) , valueForNoneA , valueForNoneB , zipFunction ) ; |
public class NetworkConfig { /** * Returns the specified JsonValue as a String , or null if it ' s not a string */
private static String getJsonValueAsNumberString ( JsonValue value ) { } } | return ( value != null && value . getValueType ( ) == ValueType . NUMBER ) ? value . toString ( ) : null ; |
public class RemoveTagsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( RemoveTagsRequest removeTagsRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( removeTagsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( removeTagsRequest . getResourceId ( ) , RESOURCEID_BINDING ) ; protocolMarshaller . marshall ( removeTagsRequest . getTagKeys ( ) , TAGKEYS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class CardInputWidget { /** * Gets a { @ link Card } object from the user input , if all fields are valid . If not , returns
* { @ code null } .
* @ return a valid { @ link Card } object based on user input , or { @ code null } if any field is
* invalid */
@ Nullable public Card getCard ( ) { } } | String cardNumber = mCardNumberEditText . getCardNumber ( ) ; int [ ] cardDate = mExpiryDateEditText . getValidDateFields ( ) ; if ( cardNumber == null || cardDate == null || cardDate . length != 2 ) { return null ; } // CVC / CVV is the only field not validated by the entry control itself , so we check here .
String cvcValue = mCvcNumberEditText . getText ( ) . toString ( ) . trim ( ) ; if ( ! isCvcLengthValid ( ) ) { return null ; } return new Card ( cardNumber , cardDate [ 0 ] , cardDate [ 1 ] , cvcValue ) . addLoggingToken ( LOGGING_TOKEN ) ; |
public class GVRFloatAnimation { /** * Resize the key data area .
* This function will truncate the keys if the
* initial setting was too large .
* @ oaran numKeys the desired number of keys */
public void resizeKeys ( int numKeys ) { } } | int n = numKeys * mFloatsPerKey ; if ( mKeys . length == n ) { return ; } float [ ] newKeys = new float [ n ] ; n = Math . min ( n , mKeys . length ) ; System . arraycopy ( mKeys , 0 , newKeys , 0 , n ) ; mKeys = newKeys ; mFloatInterpolator . setKeyData ( mKeys ) ; |
public class FontDescriptorSpecificationImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setFtUsFlags ( Integer newFtUsFlags ) { } } | Integer oldFtUsFlags = ftUsFlags ; ftUsFlags = newFtUsFlags ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . FONT_DESCRIPTOR_SPECIFICATION__FT_US_FLAGS , oldFtUsFlags , ftUsFlags ) ) ; |
public class FontBuilder { /** * Load the font file .
* @ param fontParams the name of the font to load */
private void checkFontStatus ( final FontParams fontParams ) { } } | // Try to load system fonts
final List < String > fonts = Font . getFontNames ( transformFontName ( fontParams . name ( ) . name ( ) ) ) ; Font font = null ; String fontName = null ; if ( fonts . isEmpty ( ) ) { final List < String > fontPaths = fontParams instanceof RealFont && ( ( RealFont ) fontParams ) . skipFontsFolder ( ) ? Collections . singletonList ( "" ) : ResourceParameters . FONT_FOLDER . get ( ) ; for ( int i = 0 ; i < fontPaths . size ( ) && font == null ; i ++ ) { String fontPath = fontPaths . get ( i ) ; if ( ! fontPath . isEmpty ( ) ) { fontPath += Resources . PATH_SEP ; // NOSONAR
} // This variable will hold the 2 alternative font names
fontName = fontPath + transformFontName ( fontParams . name ( ) . name ( ) ) + "." + fontParams . extension ( ) ; LOGGER . trace ( "Try to load Transformed Font {}" , fontName ) ; font = Font . loadFont ( Thread . currentThread ( ) . getContextClassLoader ( ) . getResourceAsStream ( fontName ) , fontParams . size ( ) ) ; // The font name contains ' _ ' in its file name to replace ' '
if ( font == null ) { fontName = fontPath + fontParams . name ( ) . name ( ) + "." + fontParams . extension ( ) ; LOGGER . trace ( "Try to load Raw Font {}" , fontName ) ; font = Font . loadFont ( Thread . currentThread ( ) . getContextClassLoader ( ) . getResourceAsStream ( fontName ) , fontParams . size ( ) ) ; if ( font != null ) { // Raw font has been loaded
LOGGER . info ( "{} Raw Font loaded" , fontName ) ; } } else { // Transformed font has been loaded
LOGGER . info ( "{} Transformed Font loaded" , fontName ) ; } } if ( font == null ) { // Neither transformed nor raw font has been loaded ( with or without ' _ ' )
LOGGER . error ( "Font : {} not found into base folder: {}" , fontParams . name ( ) . name ( ) + "." + fontParams . extension ( ) , ResourceParameters . FONT_FOLDER . get ( ) ) ; } } |
public class Application { /** * Performs initializations common to all applications . Applications should override { @ link
* # willInit } to perform initializations that need to take place before the common
* initialization ( which includes the creation of the site identifier and message manager ) and
* should override { @ link # didInit } to perform initializations that need to take place after
* the common initialization ( like passing the application to entities that might turn around
* and request a reference to our site identifier ) .
* @ param config the servlet config from which the application will load configuration
* information .
* @ param context the servlet context in which this application is operating .
* @ param logicPkg the base package for all of the logic implementations for this application . */
public void init ( ServletConfig config , ServletContext context , String logicPkg ) { } } | // keep this around for later
_context = context ; // stick ourselves into an application attribute so that we can be accessed by Velocity
// plugins
Velocity . setApplicationAttribute ( VELOCITY_ATTR_KEY , this ) ; // let the derived application do pre - init stuff
willInit ( config ) ; // remove any trailing dot
if ( logicPkg . endsWith ( "." ) ) { _logicPkg = logicPkg . substring ( 0 , logicPkg . length ( ) - 1 ) ; } else { _logicPkg = logicPkg ; } // create our site identifier
_siteIdent = createSiteIdentifier ( _context ) ; // create a site resource loader if the user set up the site - specific jar file path
String siteJarPath = getInitParameter ( config , SITE_JAR_PATH_KEY ) ; if ( ! StringUtil . isBlank ( siteJarPath ) ) { _siteLoader = new SiteResourceLoader ( _siteIdent , siteJarPath ) ; } // instantiate our message manager if the application wants one
String bundlePath = getInitParameter ( config , MESSAGE_BUNDLE_PATH_KEY ) ; if ( ! StringUtil . isBlank ( bundlePath ) ) { _msgmgr = createMessageManager ( bundlePath ) ; } // if we have a site - specific resource loader , configure the message manager with it , so
// that it can load site - specific message resources
if ( _msgmgr != null && _siteLoader != null ) { String siteBundlePath = getInitParameter ( config , SITE_MESSAGE_BUNDLE_PATH_KEY ) ; if ( ! StringUtil . isBlank ( siteBundlePath ) ) { _msgmgr . activateSiteSpecificMessages ( siteBundlePath , _siteLoader ) ; } else { log . info ( "No '" + SITE_MESSAGE_BUNDLE_PATH_KEY + "' specified in servlet " + "configuration. This is required to allow the message manager to load " + "site-specific translation resources." ) ; } } // let the derived application do post - init stuff
didInit ( config ) ; |
public class ArrayExpressionSatisfies { /** * Creates a complete quantified operator with the given satisfies expression .
* @ param expression Parameter expression : The satisfies expression used for evaluating each item in the array .
* @ return The quantified expression . */
@ NonNull public Expression satisfies ( @ NonNull Expression expression ) { } } | if ( expression == null ) { throw new IllegalArgumentException ( "expression cannot be null." ) ; } return new QuantifiedExpression ( type , variable , inExpression , expression ) ; |
public class SLF4JLog { /** * Converts the first input parameter to String and then delegates to the
* wrapped < code > org . slf4j . Logger < / code > instance .
* @ param message
* the message to log . Converted to { @ link String }
* @ param t
* the exception to log */
public void error ( Object message , Throwable t ) { } } | logger . error ( String . valueOf ( message ) , t ) ; |
public class Fat { /** * Reads a { @ code Fat } as specified by a { @ code BootSector } .
* @ param bs the boot sector specifying the { @ code Fat } layout
* @ param fatNr the number of the { @ code Fat } to read
* @ return the { @ code Fat } that was read
* @ throws IOException on read error
* @ throws IllegalArgumentException if { @ code fatNr } is greater than
* { @ link BootSector # getNrFats ( ) } */
public static Fat read ( BootSector bs , int fatNr ) throws IOException , IllegalArgumentException { } } | if ( fatNr > bs . getNrFats ( ) ) { throw new IllegalArgumentException ( "boot sector says there are only " + bs . getNrFats ( ) + " FATs when reading FAT #" + fatNr ) ; } final long fatOffset = bs . getFatOffset ( fatNr ) ; final Fat result = new Fat ( bs , fatOffset ) ; result . read ( ) ; return result ; |
public class Pair { /** * Counts the number of elements in an isomorphic list .
* @ param < T > the type of the elements in the isomorphic list
* @ param list - a list represented by the first Pair object isomorphic to the elements , or an element when the list is trivial
* @ return the number of elements discovered */
@ SuppressWarnings ( "unchecked" ) public static < T > int count ( T list ) { } } | if ( list == null ) { return 0 ; } else if ( list instanceof Pair ) { return 1 + count ( ( ( Pair < T , T > ) list ) . second ) ; } else { return 1 ; } |
public class PersistenceStrategy { /** * Returns the " per - table " persistence strategy , i . e . one dedicated cache will be used for each
* entity / association / id source table . */
private static PersistenceStrategy < ? , ? , ? > getPerTableStrategy ( EmbeddedCacheManager externalCacheManager , URL configUrl , JtaPlatform platform , Set < EntityKeyMetadata > entityTypes , Set < AssociationKeyMetadata > associationTypes , Set < IdSourceKeyMetadata > idSourceTypes ) { } } | PerTableKeyProvider keyProvider = new PerTableKeyProvider ( ) ; PerTableCacheManager cacheManager = externalCacheManager != null ? new PerTableCacheManager ( externalCacheManager , entityTypes , associationTypes , idSourceTypes ) : new PerTableCacheManager ( configUrl , platform , entityTypes , associationTypes , idSourceTypes ) ; return new PersistenceStrategy < PersistentEntityKey , PersistentAssociationKey , PersistentIdSourceKey > ( cacheManager , keyProvider ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.