signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Application { /** * < code > null < / code > is returned , of the version file could not be opened * and read . * @ param _ versionUrl URL of the version file which defines the application * @ param _ rootUrl root URL where the source files are located ( for local * files ) ; URL of the class file ( if source is in a Jar file ) * @ param _ classpathElements elements of the class path * @ return application instance with all version information * @ throws InstallationException if version XML file could not be parsed */ public static Application getApplication ( final URL _versionUrl , final URL _rootUrl , final List < String > _classpathElements ) throws InstallationException { } }
Application appl = null ; try { final DigesterLoader loader = DigesterLoader . newLoader ( new FromAnnotationsRuleModule ( ) { @ Override protected void configureRules ( ) { bindRulesFrom ( Application . class ) ; } } ) ; final Digester digester = loader . newDigester ( ) ; appl = ( Application ) digester . parse ( _versionUrl ) ; appl . rootUrl = _rootUrl ; appl . classpathElements = _classpathElements ; for ( final InstallFile installFile : appl . tmpElements ) { appl . install . addFile ( installFile . setURL ( new URL ( _rootUrl , installFile . getName ( ) ) ) ) ; } appl . tmpElements = null ; Collections . sort ( appl . dependencies , ( _dependency0 , _dependency1 ) -> _dependency0 . getOrder ( ) . compareTo ( _dependency1 . getOrder ( ) ) ) ; for ( final ApplicationVersion applVers : appl . getVersions ( ) ) { applVers . setApplication ( appl ) ; appl . setMaxVersion ( applVers . getNumber ( ) ) ; } } catch ( final IOException e ) { if ( e . getCause ( ) instanceof InstallationException ) { throw ( InstallationException ) e . getCause ( ) ; } else { throw new InstallationException ( "Could not open / read version file '" + _versionUrl + "'" ) ; } // CHECKSTYLE : OFF } catch ( final Exception e ) { // CHECKSTYLE : ON throw new InstallationException ( "Error while parsing file '" + _versionUrl + "'" , e ) ; } return appl ;
public class Rect { /** * Resets the limits . */ public void reset ( ) { } }
xMin = Double . NaN ; xMax = Double . NaN ; yMin = Double . NaN ; yMax = Double . NaN ;
public class SibRaConnectionFactory { /** * Creates a connection via the connection manager . * @ param requestInfo * the request information * @ return the new connection * @ throws SIResourceException * if an exception relating to the JCA processing occurs * @ throws SINotPossibleInCurrentConfigurationException * if an exception occurs when delegating to the underlying core * SPI implementation * @ throws SIIncorrectCallException * if an exception occurs when delegating to the underlying core * SPI implementation * @ throws SIAuthenticationException * if an exception occurs when delegating to the underlying core * SPI implementation */ private SibRaConnection createConnection ( ConnectionRequestInfo requestInfo ) throws SIResourceException , SINotPossibleInCurrentConfigurationException , SIIncorrectCallException , SIAuthenticationException { } }
if ( TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , "createConnection" , requestInfo ) ; } SibRaConnection result = null ; boolean tryAgain = true ; do { try { // Obtain connection via connection manager final Object connection = _connectionManager . allocateConnection ( _managedConnectionFactory , requestInfo ) ; // Check it is one of ours if ( connection instanceof SibRaConnection ) { result = ( SibRaConnection ) connection ; SibRaManagedConnection managedConnection = result . getManagedConnection ( ) ; // Pass a reference to this connection factory as the // connection needs access to the connection manager and // managed connection factory to perform lazy enlistment and // association result . setConnectionFactory ( this ) ; tryAgain = result . isCoreConnectionInValid ( ) ; if ( tryAgain ) { SibTr . info ( TRACE , NLS . getString ( "CONNECTION_ERROR_RETRY_CWSIV0356" ) , new Object [ ] { result . getManagedConnection ( ) . getConnectionException ( ) } ) ; // We need to try again so we clone and change the cri ( incremenet counter ) which // forces j2c to create a new managed connection . Cloning is needed to prevent // a broken connection in the shared pool being returned because it has a // cri = = this cri ( PM31826) requestInfo = ( ConnectionRequestInfo ) ( ( SibRaConnectionRequestInfo ) requestInfo ) . clone ( ) ; ( ( SibRaConnectionRequestInfo ) requestInfo ) . incrementRequestCounter ( ) ; // PK60857 the connection is broken so notify JCA to ensure it is cleaned up managedConnection . connectionErrorOccurred ( new SIResourceException ( ) , true ) ; } } else { final ResourceException exception = new ResourceAdapterInternalException ( NLS . getFormattedMessage ( "INCORRECT_CONNECTION_TYPE_CWSIV0101" , new Object [ ] { connection , SibRaConnection . class } , null ) ) ; if ( TRACE . isEventEnabled ( ) ) { SibTr . exception ( this , TRACE , exception ) ; } throw exception ; } } catch ( ResourceException exception ) { FFDCFilter . processException ( exception , "com.ibm.ws.sib.ra.impl.SibRaConnectionFactory.createConnection" , "1:318:1.21" , this ) ; if ( TRACE . isEventEnabled ( ) ) { SibTr . exception ( this , TRACE , exception ) ; } if ( exception . getCause ( ) instanceof SIResourceException ) { // If the original exception came from the underlying core SPI // throw this back to the caller . . . throw ( SIResourceException ) exception . getCause ( ) ; } else if ( exception . getCause ( ) instanceof SIErrorException ) { // If the original exception came from the underlying core SPI // throw this back to the caller . . . throw ( SIErrorException ) exception . getCause ( ) ; } else if ( exception . getCause ( ) instanceof SINotPossibleInCurrentConfigurationException ) { // If the original exception came from the underlying core SPI // throw this back to the caller . . . throw ( SINotPossibleInCurrentConfigurationException ) exception . getCause ( ) ; } else if ( exception . getCause ( ) instanceof SIIncorrectCallException ) { // If the original exception came from the underlying core SPI // throw this back to the caller . . . throw ( SIIncorrectCallException ) exception . getCause ( ) ; } else if ( exception . getCause ( ) instanceof SIAuthenticationException ) { // If the original exception came from the underlying core SPI // throw this back to the caller . . . throw ( SIAuthenticationException ) exception . getCause ( ) ; } else { // . . . otherwise , wrap it in an SIResourceException throw new SIResourceException ( NLS . getString ( "CONNECTION_FACTORY_EXCEPTION_CWSIV0050" ) , exception ) ; } } } while ( tryAgain ) ; if ( TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , "createConnection" , result ) ; } return result ;
public class DefaultJobProgress { /** * Called when a { @ link PopLevelProgressEvent } is fired . */ private void onPopLevelProgress ( Object source ) { } }
DefaultJobProgressStep parent = this . currentStep . getParent ( ) ; if ( parent == null ) { LOGGER . warn ( "PopLevelProgressEvent was fired too many times. Don't forget " + "to match each PopLevelProgressEvent with a PushLevelProgressEvent." ) ; return ; } // Try to find the right level based on the source DefaultJobProgressStep level = findLevel ( this . currentStep . getParent ( ) , source ) ; if ( level == null ) { LOGGER . warn ( "Could not find any matching step level for source [{}]. Ignoring PopLevelProgressEvent." , source . toString ( ) ) ; return ; } // Move to parent step this . currentStep = level ; // Close the level this . currentStep . finishLevel ( ) ;
public class CronEntry { /** * - - - - - private methods - - - - - */ private static CronField parseField ( String field , int minValue , int maxValue ) { } }
// asterisk : * if ( "*" . equals ( field ) ) { return new CronField ( minValue , maxValue , 1 , true ) ; } // asterisk with step : * / 3 if ( field . startsWith ( "*/" ) ) { int step = Integer . parseInt ( field . substring ( 2 ) ) ; if ( step > 0 & step <= maxValue ) { return new CronField ( minValue , maxValue , step ) ; } else { throw new IllegalArgumentException ( "Illegal step: '" + step + "'" ) ; } } // simple number : 2 if ( field . matches ( "[0-9]{1,2}" ) ) { int value = Integer . parseInt ( field ) ; if ( ( value >= minValue ) && ( value <= maxValue ) ) { return new CronField ( value , value , 1 ) ; } else { throw new IllegalArgumentException ( "Parameter not within range: '" + field + "'" ) ; } } // range : 4-6 if ( field . matches ( "[0-9]{1,2}-[0-9]{1,2}" ) ) { String [ ] rangeValues = field . split ( "[-]+" ) ; if ( rangeValues . length == 2 ) { int start = Integer . parseInt ( rangeValues [ 0 ] ) ; int end = Integer . parseInt ( rangeValues [ 1 ] ) ; if ( ( start >= minValue ) && ( start <= maxValue ) && ( end >= minValue ) && ( end <= maxValue ) ) { return new CronField ( start , end , 1 ) ; } else { throw new IllegalArgumentException ( "Parameters not within range: '" + field + "'" ) ; } } else { throw new IllegalArgumentException ( "Invalid range: '" + field + "'" ) ; } } // list : 4,6 if ( field . contains ( "," ) ) { final String [ ] listValues = field . split ( "[,]+" ) ; final List < Integer > values = new LinkedList < > ( ) ; for ( final String value : listValues ) { try { values . add ( Integer . parseInt ( value ) ) ; } catch ( Throwable t ) { throw new IllegalArgumentException ( "Invalid list value: '" + value + "'" ) ; } } return new CronField ( values ) ; } // range with step : 4-6/3 if ( field . matches ( "[0-9]{1,2}-[0-9]{1,2}/[0-9]{1,2}" ) ) { throw new UnsupportedOperationException ( "Steps are not supported yet." ) ; } throw new IllegalArgumentException ( "Invalid field: '" + field + "'" ) ;
public class URLUtils { /** * Constructs URL for in service requests ( e . g . calls for endpoint of this application ) */ public static String getInServiceURL ( HttpServletRequest request , String path ) { } }
String contextPath = ( ( Request ) request ) . getContext ( ) . getContextPath ( ) ; int port = request . getServerPort ( ) ; return getInServiceURL ( port , contextPath , path ) ;
public class DatabaseSpec { /** * Insert data in a MongoDB table . * @ param dataBase Mongo database * @ param tabName Mongo table * @ param table Datatable used for insert elements */ @ Given ( "^I insert into a MongoDB database '(.+?)' and table '(.+?)' this values:$" ) public void insertOnMongoTable ( String dataBase , String tabName , DataTable table ) { } }
commonspec . getMongoDBClient ( ) . connectToMongoDBDataBase ( dataBase ) ; commonspec . getMongoDBClient ( ) . insertIntoMongoDBCollection ( tabName , table ) ;
public class AbstractConverter { /** * 不抛异常转换 < br > * 当转换失败时返回默认值 * @ param value 被转换的值 * @ param defaultValue 默认值 * @ return 转换后的值 * @ since 4.5.7 */ public T convertQuietly ( Object value , T defaultValue ) { } }
try { return convert ( value , defaultValue ) ; } catch ( Exception e ) { return defaultValue ; }
public class DefaultComparisonFormatter { /** * Create a default Transformer to format a XML - Node to a String . * @ param numberOfBlanksToIndent the number of spaces which is used for indent the XML - structure * @ return the transformer * @ since XMLUnit 2.4.0 */ protected Transformer createXmlTransformer ( int numberOfBlanksToIndent ) throws TransformerConfigurationException { } }
TransformerFactoryConfigurer . Builder b = TransformerFactoryConfigurer . builder ( ) . withExternalStylesheetLoadingDisabled ( ) . withDTDLoadingDisabled ( ) ; if ( numberOfBlanksToIndent >= 0 ) { // not all TransformerFactories support this feature b = b . withSafeAttribute ( "indent-number" , numberOfBlanksToIndent ) ; } final TransformerFactory factory = b . build ( ) . configure ( TransformerFactory . newInstance ( ) ) ; final Transformer transformer = factory . newTransformer ( ) ; transformer . setOutputProperty ( OutputKeys . OMIT_XML_DECLARATION , "yes" ) ; transformer . setOutputProperty ( OutputKeys . METHOD , "xml" ) ; if ( numberOfBlanksToIndent >= 0 ) { try { transformer . setOutputProperty ( "{http://xml.apache.org/xslt}indent-amount" , String . valueOf ( numberOfBlanksToIndent ) ) ; } catch ( final IllegalArgumentException ex ) { // Could not set property ' { http : / / xml . apache . org / xslt } indent - amount ' on // transformer . getClass ( ) . getName ( ) // which is fine for us } transformer . setOutputProperty ( OutputKeys . INDENT , "yes" ) ; } return transformer ;
public class SelfCalibrationLinearDualQuadratic { /** * Computes the calibration for each view . . */ private void computeSolutions ( DMatrix4x4 Q ) { } }
DMatrixRMaj w_i = new DMatrixRMaj ( 3 , 3 ) ; for ( int i = 0 ; i < cameras . size ; i ++ ) { computeW ( cameras . get ( i ) , Q , w_i ) ; Intrinsic calib = solveForCalibration ( w_i ) ; if ( sanityCheck ( calib ) ) { solutions . add ( calib ) ; } }
public class ReturnTypeAddModification { /** * Create the quick fix if needed . * < p > User data contains the name of the expected type . * @ param provider the quick fix provider . * @ param issue the issue to fix . * @ param acceptor the quick fix acceptor . */ public static void accept ( SARLQuickfixProvider provider , Issue issue , IssueResolutionAcceptor acceptor ) { } }
final String [ ] data = issue . getData ( ) ; if ( data != null && data . length > 0 ) { final String expectedType = data [ 0 ] ; final ReturnTypeAddModification modification = new ReturnTypeAddModification ( expectedType ) ; modification . setIssue ( issue ) ; modification . setTools ( provider ) ; acceptor . accept ( issue , MessageFormat . format ( Messages . SARLQuickfixProvider_13 , expectedType ) , Messages . SARLQuickfixProvider_14 , JavaPluginImages . IMG_CORRECTION_ADD , modification , IProposalRelevance . CHANGE_RETURN_TYPE ) ; }
public class DescribeNotebookInstanceLifecycleConfigResult { /** * The shell script that runs every time you start a notebook instance , including when you create the notebook * instance . * @ param onStart * The shell script that runs every time you start a notebook instance , including when you create the * notebook instance . */ public void setOnStart ( java . util . Collection < NotebookInstanceLifecycleHook > onStart ) { } }
if ( onStart == null ) { this . onStart = null ; return ; } this . onStart = new java . util . ArrayList < NotebookInstanceLifecycleHook > ( onStart ) ;
public class DefaultClusterManager { /** * Deploys a module */ private void doDeployModule ( final Message < JsonObject > message ) { } }
String moduleName = message . body ( ) . getString ( "module" ) ; if ( moduleName == null ) { message . reply ( new JsonObject ( ) . putString ( "status" , "error" ) . putString ( "message" , "No module name specified." ) ) ; return ; } JsonObject config = message . body ( ) . getObject ( "config" ) ; if ( config == null ) { config = new JsonObject ( ) ; } int instances = message . body ( ) . getInteger ( "instances" , 1 ) ; platform . deployModule ( moduleName , config , instances , createDeploymentHandler ( message ) ) ;
public class ProtectionContainersInner { /** * Lists the containers registered to the Recovery Services vault . * @ param vaultName The name of the Recovery Services vault . * @ param resourceGroupName The name of the resource group associated with the Recovery Services vault . * @ param filter The following equation is used to sort or filter the containers registered to the vault . providerType eq { AzureIaasVM , MAB , DPM , AzureBackupServer , AzureSql } and status eq { Unknown , NotRegistered , Registered , Registering } and friendlyName eq { containername } and backupManagementType eq { AzureIaasVM , MAB , DPM , AzureBackupServer , AzureSql } . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < ProtectionContainerResourceInner > > listAsync ( String vaultName , String resourceGroupName , String filter , final ServiceCallback < List < ProtectionContainerResourceInner > > serviceCallback ) { } }
return ServiceFuture . fromResponse ( listWithServiceResponseAsync ( vaultName , resourceGroupName , filter ) , serviceCallback ) ;
public class RaspiPin { /** * SDC . 0 pin has a physical pull - up resistor */ protected static Pin createDigitalPinNoPullDown ( int address , String name ) { } }
return createDigitalPin ( RaspiGpioProvider . NAME , address , name , EnumSet . of ( PinPullResistance . OFF , PinPullResistance . PULL_UP ) , PinEdge . all ( ) ) ;
public class AWSCodeBuildClient { /** * Gets information about builds . * @ param batchGetBuildsRequest * @ return Result of the BatchGetBuilds operation returned by the service . * @ throws InvalidInputException * The input value that was provided is not valid . * @ sample AWSCodeBuild . BatchGetBuilds * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / codebuild - 2016-10-06 / BatchGetBuilds " target = " _ top " > AWS API * Documentation < / a > */ @ Override public BatchGetBuildsResult batchGetBuilds ( BatchGetBuildsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeBatchGetBuilds ( request ) ;
public class Collectors { /** * Returns a { @ code Collector } that reduces input elements . * @ param < T > the type of the input elements * @ param identity the initial value * @ param op the operator to reduce elements * @ return a { @ code Collector } * @ see # reducing ( java . lang . Object , com . annimon . stream . function . Function , com . annimon . stream . function . BinaryOperator ) */ @ NotNull public static < T > Collector < T , ? , T > reducing ( @ Nullable final T identity , @ NotNull final BinaryOperator < T > op ) { } }
return new CollectorsImpl < T , Tuple1 < T > , T > ( new Supplier < Tuple1 < T > > ( ) { @ NotNull @ Override public Tuple1 < T > get ( ) { return new Tuple1 < T > ( identity ) ; } } , new BiConsumer < Tuple1 < T > , T > ( ) { @ Override public void accept ( @ NotNull Tuple1 < T > tuple , T value ) { tuple . a = op . apply ( tuple . a , value ) ; } } , new Function < Tuple1 < T > , T > ( ) { @ Override public T apply ( @ NotNull Tuple1 < T > tuple ) { return tuple . a ; } } ) ;
public class CmsFavoriteDAO { /** * Validates a favorite entry . * < p > If the favorite entry references a resource or project that can ' t be read , this will return false . * @ param entry the favorite entry * @ return the */ private boolean validate ( CmsFavoriteEntry entry ) { } }
try { String siteRoot = entry . getSiteRoot ( ) ; if ( ! m_okSiteRoots . contains ( siteRoot ) ) { m_rootCms . readResource ( siteRoot ) ; m_okSiteRoots . add ( siteRoot ) ; } CmsUUID project = entry . getProjectId ( ) ; if ( ! m_okProjects . contains ( project ) ) { m_cms . readProject ( project ) ; m_okProjects . add ( project ) ; } for ( CmsUUID id : Arrays . asList ( entry . getDetailId ( ) , entry . getStructureId ( ) ) ) { if ( ( id != null ) && ! m_okStructureIds . contains ( id ) ) { m_cms . readResource ( id , CmsResourceFilter . IGNORE_EXPIRATION . addRequireVisible ( ) ) ; m_okStructureIds . add ( id ) ; } } return true ; } catch ( Exception e ) { LOG . info ( "Favorite entry validation failed: " + e . getLocalizedMessage ( ) , e ) ; return false ; }
public class Stream { /** * Intermediate operation returning a Stream with the elements obtained by applying an * optional < i > navigation path < / i > and conversion to a certain type to the elements of this * Stream . The path is a sequence of keys ( { @ code String } s , { @ code URI } s , generic objects ) * that are applied to { @ code Record } , { @ code BindingSet } , { @ code Map } and { @ code Multimap } * elements to extract child elements in a recursive fashion . Starting from an element * returned by this stream , the result of this navigation process is a list of ( sub - ) child * elements that are converted to the requested type ( via { @ link Data # convert ( Object , Class ) } ) * and concatenated in the resulting stream ; { @ code Iterable } s , { @ code Iterator } s and arrays * found during the navigation are exploded and their elements individually considered . The * { @ code lenient } parameters controls whether conversion errors should be ignored or result * in an exception being thrown by the returned Stream . * @ param type * the class resulting elements should be converted to * @ param lenient * true if conversion errors should be ignored * @ param path * a vararg array of zero or more keys that recursively select the elements to * return * @ param < R > * the type of resulting elements * @ return a Stream over the elements obtained applying the navigation path and the conversion * specified */ public final < R > Stream < R > transform ( final Class < R > type , final boolean lenient , final Object ... path ) { } }
synchronized ( this . state ) { checkState ( ) ; return concat ( new TransformPathStream < T , R > ( this , type , lenient , path ) ) ; }
public class JsonApiResponseFilter { /** * Determines whether the given response entity is either a Crnk * resource or a list of resource ; */ private Optional < RegistryEntry > getRegistryEntry ( Object response ) { } }
if ( response != null ) { Class responseClass = response . getClass ( ) ; boolean resourceList = ResourceList . class . isAssignableFrom ( responseClass ) ; if ( resourceList ) { ResourceList responseList = ( ResourceList ) response ; if ( responseList . isEmpty ( ) ) { return Optional . empty ( ) ; } // get common super class of all list element = > resource class Class elementType = responseList . get ( 0 ) . getClass ( ) ; for ( int i = 0 ; i < responseList . size ( ) ; i ++ ) { Class otherType = responseList . get ( i ) . getClass ( ) ; while ( ! elementType . isAssignableFrom ( otherType ) ) { elementType = elementType . getSuperclass ( ) ; } } responseClass = elementType ; } ResourceRegistry resourceRegistry = feature . getBoot ( ) . getResourceRegistry ( ) ; if ( resourceRegistry . hasEntry ( responseClass ) ) { return Optional . of ( resourceRegistry . getEntry ( responseClass ) ) ; } } return Optional . empty ( ) ;
public class ByteBufPool { /** * Appends byte array to ByteBuf . If ByteBuf can ' t accommodate the * byte array , a new ByteBuf is created which contains all data from * the original ByteBuf and has enough capacity to accommodate the * byte array . * ByteBuf must be not recycled before the operation . * @ param to the target ByteBuf to which byte array will be appended * @ param from the source byte array to be appended * @ param offset the value of offset for the byte array * @ param length amount of the bytes to be appended to the ByteBuf * The sum of the length and offset parameters can ' t * be greater than the whole length of the byte array * @ return ByteBuf which contains the result of the appending */ @ NotNull public static ByteBuf append ( @ NotNull ByteBuf to , @ NotNull byte [ ] from , int offset , int length ) { } }
assert ! to . isRecycled ( ) ; to = ensureWriteRemaining ( to , length ) ; to . put ( from , offset , length ) ; return to ;
public class DefaultOptionParser { /** * Parse the arguments according to the specified options and properties . * @ param options the specified Options * @ param args the command line arguments * @ param properties command line option name - value pairs * @ return the list of atomic option and value tokens * @ throws OptionParserException if there are any problems encountered * while parsing the command line tokens */ public ParsedOptions parse ( Options options , String [ ] args , Properties properties ) throws OptionParserException { } }
return parse ( options , args , properties , false ) ;
public class AWSCloud9Client { /** * Deletes an environment member from an AWS Cloud9 development environment . * @ param deleteEnvironmentMembershipRequest * @ return Result of the DeleteEnvironmentMembership operation returned by the service . * @ throws BadRequestException * The target request is invalid . * @ throws ConflictException * A conflict occurred . * @ throws NotFoundException * The target resource cannot be found . * @ throws ForbiddenException * An access permissions issue occurred . * @ throws TooManyRequestsException * Too many service requests were made over the given time period . * @ throws LimitExceededException * A service limit was exceeded . * @ throws InternalServerErrorException * An internal server error occurred . * @ sample AWSCloud9 . DeleteEnvironmentMembership * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cloud9-2017-09-23 / DeleteEnvironmentMembership " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DeleteEnvironmentMembershipResult deleteEnvironmentMembership ( DeleteEnvironmentMembershipRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteEnvironmentMembership ( request ) ;
public class SqlServerDialect { /** * sql . replaceFirst ( " ( ? i ) select " , " " ) 正则中带有 " ( ? i ) " 前缀 , 指定在匹配时不区分大小写 */ public String forPaginate ( int pageNumber , int pageSize , StringBuilder findSql ) { } }
int end = pageNumber * pageSize ; if ( end <= 0 ) { end = pageSize ; } int begin = ( pageNumber - 1 ) * pageSize ; if ( begin < 0 ) { begin = 0 ; } StringBuilder ret = new StringBuilder ( ) ; ret . append ( "SELECT * FROM ( SELECT row_number() over (order by tempcolumn) temprownumber, * FROM " ) ; ret . append ( " ( SELECT TOP " ) . append ( end ) . append ( " tempcolumn=0," ) ; ret . append ( findSql . toString ( ) . replaceFirst ( "(?i)select" , "" ) ) ; ret . append ( ")vip)mvp where temprownumber>" ) . append ( begin ) ; return ret . toString ( ) ;
public class Metadata { /** * Attempts to parse a MANIFEST . MF file from an input stream . * @ param is * An input stream containing the extracted manifest file . * @ return HashMap of the type { atribute name : attribute value } . */ public static Metadata fromManifest ( InputStream is ) { } }
try { Manifest mf = new Manifest ( is ) ; return fromManifest ( mf ) ; } catch ( IOException e ) { // Problems ? Too bad ! } return new Metadata ( ) ;
public class Router { /** * Add route . * < p > A path pattern can only point to one target . This method does nothing if the pattern * has already been added . */ public Router < T > addRoute ( HttpMethod method , String pathPattern , T target ) { } }
getMethodlessRouter ( method ) . addRoute ( pathPattern , target ) ; return this ;
public class AtlasTypeDefGraphStoreV1 { /** * increment the version value for this vertex */ private void markVertexUpdated ( AtlasVertex vertex ) { } }
Date now = new Date ( ) ; Number currVersion = vertex . getProperty ( Constants . VERSION_PROPERTY_KEY , Number . class ) ; long newVersion = currVersion == null ? 1 : ( currVersion . longValue ( ) + 1 ) ; vertex . setProperty ( Constants . MODIFICATION_TIMESTAMP_PROPERTY_KEY , now . getTime ( ) ) ; vertex . setProperty ( Constants . VERSION_PROPERTY_KEY , newVersion ) ;
public class OnWorkspaceInconsistency { /** * Returns the { @ link OnWorkspaceInconsistency } with the given * < code > name < / code > . * @ param name the name of a { @ link OnWorkspaceInconsistency } . * @ return the { @ link OnWorkspaceInconsistency } with the given * < code > name < / code > . * @ throws IllegalArgumentException if < code > name < / code > is not a well - known * { @ link OnWorkspaceInconsistency } name . */ public static OnWorkspaceInconsistency fromString ( String name ) throws IllegalArgumentException { } }
OnWorkspaceInconsistency handler = INSTANCES . get ( name . toLowerCase ( ) ) ; if ( handler == null ) { throw new IllegalArgumentException ( "Unknown name: " + name ) ; } else { return handler ; }
public class ArrayUtil { /** * public static void copyColumnValues ( int [ ] row , int [ ] colindex , * int [ ] colobject ) { * for ( int i = 0 ; i < colindex . length ; i + + ) { * colobject [ i ] = row [ colindex [ i ] ] ; * public static void copyColumnValues ( boolean [ ] row , int [ ] colindex , * boolean [ ] colobject ) { * for ( int i = 0 ; i < colindex . length ; i + + ) { * colobject [ i ] = row [ colindex [ i ] ] ; * public static void copyColumnValues ( byte [ ] row , int [ ] colindex , * byte [ ] colobject ) { * for ( int i = 0 ; i < colindex . length ; i + + ) { * colobject [ i ] = row [ colindex [ i ] ] ; */ public static void projectMap ( int [ ] mainMap , int [ ] subMap , int [ ] newSubMap ) { } }
for ( int i = 0 ; i < subMap . length ; i ++ ) { for ( int j = 0 ; j < mainMap . length ; j ++ ) { if ( subMap [ i ] == mainMap [ j ] ) { newSubMap [ i ] = j ; break ; } } }
public class GeneIDGFF2Reader { /** * Read a file into a FeatureList . Each line of the file becomes one Feature object . * @ param filename The path to the GFF file . * @ return A FeatureList . * @ throws IOException Something went wrong - - check exception detail message . */ public static FeatureList read ( String filename ) throws IOException { } }
logger . info ( "Reading: {}" , filename ) ; FeatureList features = new FeatureList ( ) ; BufferedReader br = new BufferedReader ( new FileReader ( filename ) ) ; String s ; for ( s = br . readLine ( ) ; null != s ; s = br . readLine ( ) ) { s = s . trim ( ) ; if ( s . length ( ) > 0 ) { if ( s . charAt ( 0 ) == '#' ) { // ignore comment lines } else { FeatureI f = parseLine ( s ) ; if ( f != null ) { features . add ( f ) ; } } } } br . close ( ) ; return features ;
public class SkewGeneralizedNormalDistribution { /** * Probability density function of the skewed normal distribution . * @ param x The value . * @ param mu The mean . * @ param sigma The standard deviation . * @ return PDF of the given normal distribution at x . */ public static double pdf ( double x , double mu , double sigma , double skew ) { } }
if ( x != x ) { return Double . NaN ; } x = ( x - mu ) / sigma ; // Scale if ( skew == 0. ) { return MathUtil . ONE_BY_SQRTTWOPI / sigma * FastMath . exp ( - .5 * x * x ) ; } final double y = - FastMath . log1p ( - skew * x ) / skew ; if ( y != y || y == Double . POSITIVE_INFINITY || y == Double . NEGATIVE_INFINITY ) { // NaN return 0. ; } return MathUtil . ONE_BY_SQRTTWOPI / sigma * FastMath . exp ( - .5 * y * y ) / ( 1 - skew * x ) ;
public class InterceptorMetaDataFactory { /** * d457352 - added entire method */ public static final String normalizeSignature ( String deplDescriptorSignature ) { } }
StringBuilder theSignature = new StringBuilder ( deplDescriptorSignature ) ; int scanIndex = 0 ; while ( scanIndex < theSignature . length ( ) ) { if ( theSignature . charAt ( scanIndex ) == ' ' ) { char next = theSignature . charAt ( scanIndex + 1 ) ; if ( next == ' ' | next == '[' | next == ']' ) { theSignature . deleteCharAt ( scanIndex ) ; } else { ++ scanIndex ; } } else { ++ scanIndex ; } } return theSignature . toString ( ) ;
public class DefaultPlatformManager { /** * Parses an includes string . */ private String [ ] parseIncludes ( String sincludes ) { } }
sincludes = sincludes . trim ( ) ; if ( "" . equals ( sincludes ) ) { return null ; } String [ ] arr = sincludes . split ( "," ) ; if ( arr != null ) { for ( int i = 0 ; i < arr . length ; i ++ ) { arr [ i ] = arr [ i ] . trim ( ) ; } } return arr ;
public class SSLHandshakeIOCallback { /** * @ see com . ibm . wsspi . tcpchannel . TCPReadCompletedCallback # error ( com . ibm . wsspi . channelfw . VirtualConnection , com . ibm . wsspi . tcpchannel . TCPReadRequestContext , java . io . IOException ) */ public void error ( VirtualConnection vc , TCPReadRequestContext rsc , IOException ioe ) { } }
// Alert the handshake completed callback . Buffers used in the handshake will be freed there . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Error occured during a read, exception:" + ioe ) ; } this . callback . error ( ioe ) ;
public class MultifactorAuthenticationTrustUtils { /** * Generate geography . * @ return the geography */ public static String generateGeography ( ) { } }
val clientInfo = ClientInfoHolder . getClientInfo ( ) ; return clientInfo . getClientIpAddress ( ) . concat ( "@" ) . concat ( WebUtils . getHttpServletRequestUserAgentFromRequestContext ( ) ) ;
public class CmsJspTagSecureParams { /** * Static method which provides the actual functionality of this tag . < p > * @ param request the request for which the parameters should be escaped * @ param allowXml the comma - separated list of parameters for which XML characters will not be escaped * @ param allowHtml the comma - separated list of parameters for which HTML will be allowed , but be escaped * @ param policy the site path of an AntiSamy policy file */ public static void secureParamsTagAction ( ServletRequest request , String allowXml , String allowHtml , String policy ) { } }
if ( request instanceof CmsFlexRequest ) { CmsFlexRequest flexRequest = ( CmsFlexRequest ) request ; CmsObject cms = CmsFlexController . getCmsObject ( flexRequest ) ; List < String > exceptions = Collections . emptyList ( ) ; if ( allowXml != null ) { exceptions = CmsStringUtil . splitAsList ( allowXml , "," ) ; } flexRequest . enableParameterEscaping ( ) ; flexRequest . getParameterEscaper ( ) . setExceptions ( exceptions ) ; Set < String > allowHtmlSet = Collections . emptySet ( ) ; if ( allowHtml != null ) { allowHtmlSet = new HashSet < String > ( CmsStringUtil . splitAsList ( allowHtml , "," ) ) ; flexRequest . getParameterEscaper ( ) . enableAntiSamy ( cms , policy , allowHtmlSet ) ; } }
public class ShortBuffer { /** * Returns a duplicated buffer that shares its content with this buffer . * < p > The duplicated buffer ' s position , limit , capacity and mark are the same as this buffer . * The duplicated buffer ' s read - only property and byte order are the same as this buffer ' s . * < p > The new buffer shares its content with this buffer , which means either buffer ' s change * of content will be visible to the other . The two buffer ' s position , limit and mark are * independent . < / p > * @ return a duplicated buffer that shares its content with this buffer . */ public ShortBuffer duplicate ( ) { } }
ShortBuffer buf = new ShortBuffer ( ( ByteBuffer ) byteBuffer . duplicate ( ) ) ; buf . limit = limit ; buf . position = position ; buf . mark = mark ; return buf ;
public class BrowserHelperFilter { /** * Sets the accept - mappings for this filter * @ param pPropertiesFile name of accept - mappings properties files * @ throws ServletConfigException if the accept - mappings properties * file cannot be read . */ @ InitParam ( name = "accept-mappings-file" ) public void setAcceptMappingsFile ( String pPropertiesFile ) throws ServletConfigException { } }
// NOTE : Format is : // < agent - name > = < reg - exp > // < agent - name > . accept = < http - accept - header > Properties mappings = new Properties ( ) ; try { log ( "Reading Accept-mappings properties file: " + pPropertiesFile ) ; mappings . load ( getServletContext ( ) . getResourceAsStream ( pPropertiesFile ) ) ; // System . out . println ( " - - > Loaded file : " + pPropertiesFile ) ; } catch ( IOException e ) { throw new ServletConfigException ( "Could not read Accept-mappings properties file: " + pPropertiesFile , e ) ; } parseMappings ( mappings ) ;
public class Futures { /** * Create a { @ link CompletableFuture } that is completed exceptionally with { @ code throwable } . * @ param throwable must not be { @ literal null } . * @ return the exceptionally completed { @ link CompletableFuture } . */ public static < T > CompletableFuture < T > failed ( Throwable throwable ) { } }
LettuceAssert . notNull ( throwable , "Throwable must not be null" ) ; CompletableFuture < T > future = new CompletableFuture < > ( ) ; future . completeExceptionally ( throwable ) ; return future ;
public class SourceNode { /** * Call String . prototype . replace on the very right - most source snippet . Useful for trimming whitespace from the end of a source node , etc . * @ param aPattern * The pattern to replace . * @ param aReplacement * The thing to replace the pattern with . */ public void replaceRight ( Pattern aPattern , String aReplacement ) { } }
Object lastChild = this . children . get ( this . children . size ( ) - 1 ) ; if ( lastChild instanceof SourceNode ) { ( ( SourceNode ) lastChild ) . replaceRight ( aPattern , aReplacement ) ; } else if ( lastChild instanceof String ) { this . children . set ( this . children . size ( ) - 1 , aPattern . matcher ( ( ( String ) lastChild ) ) . replaceFirst ( aReplacement ) ) ; } else { this . children . add ( aPattern . matcher ( "" ) . replaceFirst ( aReplacement ) ) ; }
public class FnJodaTimeUtils { /** * It creates an { @ link Interval } from the input { @ link Date } elements . * The { @ link Interval } will be created with the given { @ link Chronology } * @ param chronology { @ link Chronology } to be used * @ return the { @ link Interval } created from the input and arguments */ public static final < T extends Date > Function < T [ ] , Interval > dateFieldArrayToInterval ( Chronology chronology ) { } }
return FnInterval . dateFieldArrayToInterval ( chronology ) ;
public class DatabaseConnection { /** * Connection to the specified data source . */ boolean connect ( ) throws SQLException { } }
try { if ( driver != null && driver . length ( ) != 0 ) { Class . forName ( driver ) ; } } catch ( ClassNotFoundException cnfe ) { return sqlLine . error ( cnfe ) ; } boolean foundDriver = false ; Driver theDriver = null ; try { theDriver = DriverManager . getDriver ( url ) ; foundDriver = theDriver != null ; } catch ( Exception e ) { // ignore } if ( ! foundDriver ) { sqlLine . output ( sqlLine . loc ( "autoloading-known-drivers" , url ) ) ; sqlLine . registerKnownDrivers ( ) ; theDriver = DriverManager . getDriver ( url ) ; } try { close ( ) ; } catch ( Exception e ) { return sqlLine . error ( e ) ; } // Avoid using DriverManager . getConnection ( ) . It is a synchronized // method and thus holds the lock while making the connection . // Deadlock can occur if the driver ' s connection processing uses any // synchronized DriverManager methods . One such example is the // RMI - JDBC driver , whose RJDriverServer . connect ( ) method uses // DriverManager . getDriver ( ) . Because RJDriverServer . connect runs in // a different thread ( RMI ) than the getConnection ( ) caller ( here ) , // this sequence will hang every time . /* connection = DriverManager . getConnection ( url , username , password ) ; */ // Instead , we use the driver instance to make the connection connection = theDriver . connect ( url , info ) ; meta = ( DatabaseMetaData ) Proxy . newProxyInstance ( DatabaseMetaData . class . getClassLoader ( ) , new Class [ ] { DatabaseMetaData . class } , new DatabaseMetaDataHandler ( connection . getMetaData ( ) ) ) ; try { sqlLine . debug ( sqlLine . loc ( "connected" , meta . getDatabaseProductName ( ) , meta . getDatabaseProductVersion ( ) ) ) ; } catch ( Exception e ) { sqlLine . handleException ( e ) ; } try { sqlLine . debug ( sqlLine . loc ( "driver" , meta . getDriverName ( ) , meta . getDriverVersion ( ) ) ) ; } catch ( Exception e ) { sqlLine . handleException ( e ) ; } try { connection . setAutoCommit ( sqlLine . getOpts ( ) . getAutoCommit ( ) ) ; sqlLine . autocommitStatus ( connection ) ; } catch ( Exception e ) { sqlLine . handleException ( e ) ; } try { // nothing is done off of this command beyond the handle so no // need to use the callback . sqlLine . getCommands ( ) . isolation ( "isolation: " + sqlLine . getOpts ( ) . getIsolation ( ) , new DispatchCallback ( ) ) ; initSyntaxRule ( ) ; } catch ( Exception e ) { sqlLine . handleException ( e ) ; } sqlLine . showWarnings ( ) ; return true ;
public class DistributionSetInfoPanel { /** * Create Label for SW Module . * @ param labelName * as Name * @ param swModule * as Module ( JVM | OS | AH ) * @ return Label as UI */ private Label getSWModlabel ( final String labelName , final SoftwareModule swModule ) { } }
return SPUIComponentProvider . createNameValueLabel ( labelName + " : " , swModule . getName ( ) , swModule . getVersion ( ) ) ;
public class NotifierBase { /** * Call dynamically a command . * According to its runIntoType the command will be run into JAT , JIT or a Thread Pool * Each time a new fresh command will be retrieved . * This method is called from the JIT ( JRebirth Internal Thread ) < br > * @ param wave the wave that contains all informations */ @ SuppressWarnings ( "unchecked" ) private void callCommand ( final Wave wave ) { } }
// Use the Wave UID to guarantee that a new fresh command is built and used ! final Command command = wave . contains ( JRebirthWaves . REUSE_COMMAND ) && wave . get ( JRebirthWaves . REUSE_COMMAND ) ? globalFacade ( ) . commandFacade ( ) . retrieve ( ( Class < Command > ) wave . componentClass ( ) ) : globalFacade ( ) . commandFacade ( ) . retrieve ( ( Class < Command > ) wave . componentClass ( ) , wave . wUID ( ) ) ; if ( command == null ) { LOGGER . error ( COMMAND_NOT_FOUND_ERROR , wave . toString ( ) ) ; // When developer mode is activated an error will be thrown by logger // Otherwise the wave will be managed by UnprocessedWaveHandler this . unprocessedWaveHandler . manageUnprocessedWave ( COMMAND_NOT_FOUND_MESSAGE . getText ( ) , wave ) ; } else { // Run the command into the predefined thread command . run ( wave ) ; }
public class ClusterCacheStatus { /** * Helpers for working with immutable lists */ private < T > List < T > immutableAdd ( List < T > list , T element ) { } }
List < T > result = new ArrayList < T > ( list ) ; result . add ( element ) ; return Collections . unmodifiableList ( result ) ;
public class Tensor { /** * Divide each value by lambda . */ public void divide ( double val ) { } }
for ( int c = 0 ; c < this . values . length ; c ++ ) { divideValue ( c , val ) ; }
public class Application { /** * Sets the application of this vm . Can only be called once ( except a second * time with the same application ) . This method should only be called by a * frontend or a backend main class . * @ param application normally the created by createApplication method */ public static void setInstance ( Application application ) { } }
if ( application == Application . instance ) { return ; } if ( Application . instance != null ) { throw new IllegalStateException ( "Application cannot be changed" ) ; } if ( application == null ) { throw new IllegalArgumentException ( "Application cannot be null" ) ; } Application . instance = application ;
public class StreamGraphGenerator { /** * Transforms a { @ code SideOutputTransformation } . * < p > For this we create a virtual node in the { @ code StreamGraph } that holds the side - output * { @ link org . apache . flink . util . OutputTag } . * @ see org . apache . flink . streaming . api . graph . StreamGraphGenerator */ private < T > Collection < Integer > transformSideOutput ( SideOutputTransformation < T > sideOutput ) { } }
StreamTransformation < ? > input = sideOutput . getInput ( ) ; Collection < Integer > resultIds = transform ( input ) ; // the recursive transform might have already transformed this if ( alreadyTransformed . containsKey ( sideOutput ) ) { return alreadyTransformed . get ( sideOutput ) ; } List < Integer > virtualResultIds = new ArrayList < > ( ) ; for ( int inputId : resultIds ) { int virtualId = StreamTransformation . getNewNodeId ( ) ; streamGraph . addVirtualSideOutputNode ( inputId , virtualId , sideOutput . getOutputTag ( ) ) ; virtualResultIds . add ( virtualId ) ; } return virtualResultIds ;
public class MinkowskiDistance { /** * Minkowski distance between the two arrays of type integer . */ public double d ( int [ ] x , int [ ] y ) { } }
if ( x . length != y . length ) throw new IllegalArgumentException ( String . format ( "Arrays have different length: x[%d], y[%d]" , x . length , y . length ) ) ; double dist = 0.0 ; if ( weight == null ) { for ( int i = 0 ; i < x . length ; i ++ ) { double d = Math . abs ( x [ i ] - y [ i ] ) ; dist += Math . pow ( d , p ) ; } } else { if ( x . length != weight . length ) throw new IllegalArgumentException ( String . format ( "Input vectors and weight vector have different length: %d, %d" , x . length , weight . length ) ) ; for ( int i = 0 ; i < x . length ; i ++ ) { double d = Math . abs ( x [ i ] - y [ i ] ) ; dist += weight [ i ] * Math . pow ( d , p ) ; } } return Math . pow ( dist , 1.0 / p ) ;
public class IoUtil { /** * 获得一个Writer * @ param out 输入流 * @ param charset 字符集 * @ return OutputStreamWriter对象 */ public static OutputStreamWriter getWriter ( OutputStream out , Charset charset ) { } }
if ( null == out ) { return null ; } if ( null == charset ) { return new OutputStreamWriter ( out ) ; } else { return new OutputStreamWriter ( out , charset ) ; }
public class Trie2Writable { /** * Uncompact a compacted Trie2Writable . * This is needed if a the WritableTrie2 was compacted in preparation for creating a read - only * Trie2 , and then is subsequently altered . * The structure is a bit awkward - it would be cleaner to leave the original * Trie2 unaltered - but compacting in place was taken directly from the ICU4C code . * The approach is to create a new ( uncompacted ) Trie2Writable from this one , then transfer * the guts from the new to the old . */ private void uncompact ( ) { } }
Trie2Writable tempTrie = new Trie2Writable ( this ) ; // Members from Trie2Writable this . index1 = tempTrie . index1 ; this . index2 = tempTrie . index2 ; this . data = tempTrie . data ; this . index2Length = tempTrie . index2Length ; this . dataCapacity = tempTrie . dataCapacity ; this . isCompacted = tempTrie . isCompacted ; // Members From Trie2 this . header = tempTrie . header ; this . index = tempTrie . index ; this . data16 = tempTrie . data16 ; this . data32 = tempTrie . data32 ; this . indexLength = tempTrie . indexLength ; this . dataLength = tempTrie . dataLength ; this . index2NullOffset = tempTrie . index2NullOffset ; this . initialValue = tempTrie . initialValue ; this . errorValue = tempTrie . errorValue ; this . highStart = tempTrie . highStart ; this . highValueIndex = tempTrie . highValueIndex ; this . dataNullOffset = tempTrie . dataNullOffset ;
public class GuiceComponentProvider { /** * Get service locator { @ link DynamicConfiguration dynamic configuration } . * @ param locator HK2 service locator . * @ return dynamic configuration for a given service locator . */ private static DynamicConfiguration getConfiguration ( final ServiceLocator locator ) { } }
final DynamicConfigurationService dcs = locator . getService ( DynamicConfigurationService . class ) ; return dcs . createDynamicConfiguration ( ) ;
public class VectorMath { /** * Subtracts the second { @ code Vector } fromt the first { @ code Vector } and * returns the result . * @ param vector1 The destination vector to be subtracted from . * @ param vector2 The source vector to subtract . * @ return The subtraction of { code vector2 } from { @ code vector1 } . */ public static Vector subtract ( Vector vector1 , Vector vector2 ) { } }
if ( vector2 . length ( ) != vector1 . length ( ) ) throw new IllegalArgumentException ( "Vectors of different sizes cannot be added" ) ; if ( vector2 instanceof IntegerVector && vector1 instanceof DoubleVector ) return subtract ( vector1 , Vectors . asDouble ( vector2 ) ) ; if ( vector2 instanceof SparseVector ) subtractSparseValues ( vector1 , vector2 ) ; else { for ( int i = 0 ; i < vector2 . length ( ) ; ++ i ) { double value = vector1 . getValue ( i ) . doubleValue ( ) - vector2 . getValue ( i ) . doubleValue ( ) ; vector1 . set ( i , value ) ; } } return vector1 ;
public class Stripe { /** * Retrieve an existing { @ link Source } from the Stripe API . Note that this is a * synchronous method , and cannot be called on the main thread . Doing so will cause your app * to crash . * @ param sourceId the { @ link Source # mId } field of the desired Source object * @ param clientSecret the { @ link Source # mClientSecret } field of the desired Source object * @ param publishableKey a publishable API key to use * @ return a { @ link Source } if one could be found based on the input params , or { @ code null } if * no such Source could be found . * @ throws AuthenticationException failure to properly authenticate yourself ( check your key ) * @ throws InvalidRequestException your request has invalid parameters * @ throws APIConnectionException failure to connect to Stripe ' s API * @ throws APIException any other type of problem ( for instance , a temporary issue with * Stripe ' s servers ) */ public Source retrieveSourceSynchronous ( @ NonNull @ Size ( min = 1 ) String sourceId , @ NonNull @ Size ( min = 1 ) String clientSecret , @ Nullable String publishableKey ) throws AuthenticationException , InvalidRequestException , APIConnectionException , APIException { } }
String apiKey = publishableKey == null ? mDefaultPublishableKey : publishableKey ; if ( apiKey == null ) { return null ; } return mApiHandler . retrieveSource ( sourceId , clientSecret , apiKey , mStripeAccount ) ;
public class JSONArray { /** * Get the boolean value associated with an index . < p > * The string values " true " and " false " are converted to boolean . < p > * @ param index the index must be between 0 and length ( ) - 1 * @ return the truth * @ throws JSONException if there is no value for the index or if the value is not convertable to boolean */ public boolean getBoolean ( int index ) throws JSONException { } }
Object o = get ( index ) ; if ( o . equals ( Boolean . FALSE ) || ( ( o instanceof String ) && ( ( String ) o ) . equalsIgnoreCase ( "false" ) ) ) { return false ; } else if ( o . equals ( Boolean . TRUE ) || ( ( o instanceof String ) && ( ( String ) o ) . equalsIgnoreCase ( "true" ) ) ) { return true ; } throw new JSONException ( "JSONArray[" + index + "] is not a Boolean." ) ;
public class StreamNanoHTTPD { /** * Add session handler function . * @ param path the path * @ param value the value * @ return the function */ public Function < IHTTPSession , Response > addSessionHandler ( final String path , final Function < IHTTPSession , Response > value ) { } }
return customHandlers . put ( path , value ) ;
public class JCasUtil2 { /** * Returns the JCas of this annotation . * The method converts the potentially thrown { @ link CASException } to an * unchecked { @ link IllegalArgumentException } . * @ param annotation the annotation * @ return the extracted JCas */ public static JCas getJCas ( final Annotation annotation ) { } }
JCas result = null ; try { result = annotation . getCAS ( ) . getJCas ( ) ; } catch ( final CASException e ) { throw new IllegalArgumentException ( e ) ; } return result ;
public class AuthorizationDao { /** * Loads all the permissions granted to anonymous user for the specified organization */ public Set < String > selectOrganizationPermissionsOfAnonymous ( DbSession dbSession , String organizationUuid ) { } }
return mapper ( dbSession ) . selectOrganizationPermissionsOfAnonymous ( organizationUuid ) ;
public class UniversalDateAndTimeStampImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setSecond ( Integer newSecond ) { } }
Integer oldSecond = second ; second = newSecond ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . UNIVERSAL_DATE_AND_TIME_STAMP__SECOND , oldSecond , second ) ) ;
public class JSLocalConsumerPoint { /** * ( non - Javadoc ) * If the maxSequentialFailures value less than 1 then the consumer is deemed non stoppable * @ see com . ibm . ws . sib . processor . impl . interfaces . LocalConsumerPoint # registerStoppableAsynchConsumer ( com . ibm . wsspi . sib . core . StoppableAsynchConsumerCallback , int , long , int , * com . ibm . websphere . sib . Reliability , boolean , com . ibm . ws . sib . processor . impl . OrderingContextImpl , com . ibm . ws . sib . processor . impl . interfaces . ExternalConsumerLock , int ) */ @ Override public void registerStoppableAsynchConsumer ( StoppableAsynchConsumerCallback callback , int maxActiveMessages , long messageLockExpiry , int maxBatchSize , Reliability unrecoverableReliability , boolean inLine , OrderingContextImpl orderingGroup , ExternalConsumerLock optionalCallbackBusyLock , int maxSequentialFailures , long hiddenMessageDelay ) throws SISessionUnavailableException , SISessionDroppedException , SIErrorException , SIIncorrectCallException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "registerStoppableAsynchConsumer" , new Object [ ] { this , callback , Integer . valueOf ( maxActiveMessages ) , Long . valueOf ( messageLockExpiry ) , Integer . valueOf ( maxBatchSize ) , unrecoverableReliability , Boolean . valueOf ( inLine ) , orderingGroup , optionalCallbackBusyLock , maxSequentialFailures , hiddenMessageDelay } ) ; try { this . lock ( ) ; { _maxSequentialFailuresThreshold = maxSequentialFailures ; // If message ordering is required or if no exception destination has been specified , then any // positive value for the threshold should be set to one , so that a consumer will be stopped // as soon as the first message hits its redelivery limit if ( _destinationAttachedTo . isOrdered ( ) || ( _destinationAttachedTo . getExceptionDestination ( ) == null ) || ( _destinationAttachedTo . getExceptionDestination ( ) . equals ( "" ) ) ) { if ( _maxSequentialFailuresThreshold > 0 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Setting maxSequentialMessageThreshold to 0" ) ; _maxSequentialFailuresThreshold = 1 ; } } if ( _maxSequentialFailuresThreshold <= 0 ) { // if the max sequential failure threshold is zero or less then the consumer // will never be stopped so we don ' t want to hide messages _consumerStoppable = false ; _hiddenMessageDelay = 0 ; } else { _consumerStoppable = true ; _hiddenMessageDelay = hiddenMessageDelay ; } } } finally { this . unlock ( ) ; } registerAsynchConsumer ( callback , maxActiveMessages , messageLockExpiry , maxBatchSize , unrecoverableReliability , inLine , orderingGroup , optionalCallbackBusyLock ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "registerStoppableAsynchConsumer" ) ;
public class NodeTraversal { /** * Traverse a function out - of - band of normal traversal . * @ param node The function node . * @ param scope The scope the function is contained in . Does not fire enter / exit * callback events for this scope . */ public void traverseFunctionOutOfBand ( Node node , AbstractScope < ? , ? > scope ) { } }
checkNotNull ( scope ) ; checkState ( node . isFunction ( ) , node ) ; checkNotNull ( scope . getRootNode ( ) ) ; initTraversal ( node ) ; curNode = node . getParent ( ) ; pushScope ( scope , true /* quietly */ ) ; traverseBranch ( node , curNode ) ; popScope ( true /* quietly */ ) ;
public class BytesMessageImpl { /** * ( non - Javadoc ) * @ see javax . jms . BytesMessage # writeBoolean ( boolean ) */ @ Override public void writeBoolean ( boolean value ) throws JMSException { } }
try { getOutput ( ) . writeBoolean ( value ) ; } catch ( IOException e ) { throw new FFMQException ( "Cannot write message body" , "IO_ERROR" , e ) ; }
public class BeanDesc { /** * 根据字段创建属性描述 < br > * 查找Getter和Setter方法时会 : * < pre > * 1 . 忽略字段和方法名的大小写 * 2 . Getter查找getXXX 、 isXXX 、 getIsXXX * 3 . Setter查找setXXX 、 setIsXXX * 4 . Setter忽略参数值与字段值不匹配的情况 , 因此有多个参数类型的重载时 , 会调用首次匹配的 * < / pre > * @ param field 字段 * @ return { @ link PropDesc } * @ since 4.0.2 */ private PropDesc createProp ( Field field ) { } }
final String fieldName = field . getName ( ) ; final Class < ? > fieldType = field . getType ( ) ; final boolean isBooeanField = BooleanUtil . isBoolean ( fieldType ) ; Method getter = null ; Method setter = null ; String methodName ; Class < ? > [ ] parameterTypes ; for ( Method method : ReflectUtil . getMethods ( this . beanClass ) ) { parameterTypes = method . getParameterTypes ( ) ; if ( parameterTypes . length > 1 ) { // 多于1个参数说明非Getter或Setter continue ; } methodName = method . getName ( ) ; if ( parameterTypes . length == 0 ) { // 无参数 , 可能为Getter方法 if ( isMatchGetter ( methodName , fieldName , isBooeanField ) ) { // 方法名与字段名匹配 , 则为Getter方法 getter = method ; } } else if ( isMatchSetter ( methodName , fieldName , isBooeanField ) ) { // 只有一个参数的情况下方法名与字段名对应匹配 , 则为Setter方法 setter = method ; } if ( null != getter && null != setter ) { // 如果Getter和Setter方法都找到了 , 不再继续寻找 break ; } } return new PropDesc ( field , getter , setter ) ;
public class CredentialListMapping { /** * Create a CredentialListMappingCreator to execute create . * @ param pathAccountSid The unique sid that identifies this account * @ param pathDomainSid A string that identifies the SIP Domain for which the * CredentialList resource will be mapped * @ param credentialListSid A string that identifies the CredentialList * resource to map to the SIP domain * @ return CredentialListMappingCreator capable of executing the create */ public static CredentialListMappingCreator creator ( final String pathAccountSid , final String pathDomainSid , final String credentialListSid ) { } }
return new CredentialListMappingCreator ( pathAccountSid , pathDomainSid , credentialListSid ) ;
public class RandomAccessStream { /** * Writes data to the file . */ public boolean writeToStream ( SendfileOutputStream os , long offset , long length , long [ ] blockAddresses , long blockLength ) throws IOException { } }
throw new UnsupportedOperationException ( getClass ( ) . getName ( ) ) ;
public class Types { /** * Determines whether the given type is an array type . * @ param type the given type * @ return true if the given type is a subclass of java . lang . Class or implements GenericArrayType */ public static boolean isArray ( Type type ) { } }
return ( type instanceof GenericArrayType ) || ( type instanceof Class < ? > && ( ( Class < ? > ) type ) . isArray ( ) ) ;
public class AFactoryAppBeans { /** * < p > Get HolderRapiGetters in lazy mode . < / p > * @ return HolderRapiGetters - HolderRapiGetters * @ throws Exception - an exception */ public final HolderRapiGetters lazyGetHolderRapiGetters ( ) throws Exception { } }
String beanName = getHolderRapiGettersName ( ) ; HolderRapiGetters holderRapiGetters = ( HolderRapiGetters ) this . beansMap . get ( beanName ) ; if ( holderRapiGetters == null ) { holderRapiGetters = new HolderRapiGetters ( ) ; holderRapiGetters . setUtlReflection ( lazyGetUtlReflection ( ) ) ; this . beansMap . put ( beanName , holderRapiGetters ) ; lazyGetLogger ( ) . info ( null , AFactoryAppBeans . class , beanName + " has been created." ) ; } return holderRapiGetters ;
public class ChartComputator { /** * Computes the current scrollable surface size , in pixels . For example , if the entire chart area is visible , this * is simply the current size of { @ link # contentRectMinusAllMargins } . If the chart is zoomed in 200 % in both * directions , the * returned size will be twice as large horizontally and vertically . */ public void computeScrollSurfaceSize ( Point out ) { } }
out . set ( ( int ) ( maxViewport . width ( ) * contentRectMinusAllMargins . width ( ) / currentViewport . width ( ) ) , ( int ) ( maxViewport . height ( ) * contentRectMinusAllMargins . height ( ) / currentViewport . height ( ) ) ) ;
public class ReflectionUtils { /** * Calls the method with the specified name on the given object , casting the method ' s return value * to the desired class type . This method assumes the " method " to invoke is an instance ( object ) member method . * @ param < T > the desired return type in which the method ' s return value will be cast ; should be compatible with * the method ' s return type . * @ param obj the Object on which the method to invoke is defined . * @ param methodName a String indicating the name of the method to invoke . * @ param returnType the desired Class type in which to cast the method ' s return value . * @ return the specified method ' s return value cast to the desired return type . * @ throws IllegalArgumentException if the method with the specified name is not declared and defined * on the given object ' s class type . * @ throws MethodInvocationException if the method invocation ( call ) fails to be executed successfully . * @ see # invoke ( Object , String , Class [ ] , Object [ ] , Class ) */ public static < T > T invoke ( Object obj , String methodName , Class < T > returnType ) { } }
return invoke ( obj , methodName , null , null , returnType ) ;
public class FLV { /** * { @ inheritDoc } */ @ Override public ITagWriter getAppendWriter ( ) throws IOException { } }
log . info ( "getAppendWriter: {}" , file ) ; return new FLVWriter ( file . toPath ( ) , true ) ;
public class RemoveWatersheds { /** * Removes watersheds from the segmented image . The input image must be the entire original * segmented image and assumes the outside border is filled with values < 0 . To access * this image call { @ link boofcv . alg . segmentation . watershed . WatershedVincentSoille1991 # getOutputBorder ( ) } . * Each watershed is assigned the value of an arbitrary neighbor . 4 - connect rule is used for neighbors . * Doesn ' t matter if initial segmented was done using another connectivity rule . The value of each region i * s reduced by one at the very end . * @ param segmented Entire segmented image ( including border of - 1 values ) with watersheds */ public void remove ( GrayS32 segmented ) { } }
// very quick sanity check if ( segmented . get ( 0 , 0 ) >= 0 ) throw new IllegalArgumentException ( "The segmented image must contain a border of -1 valued pixels. See" + " JavaDoc for important details you didn't bother to read about." ) ; open . reset ( ) ; connect [ 0 ] = - 1 ; connect [ 1 ] = 1 ; connect [ 2 ] = segmented . stride ; connect [ 3 ] = - segmented . stride ; // step through the inner pixels and find watershed pixels for ( int y = 1 ; y < segmented . height - 1 ; y ++ ) { int index = y * segmented . stride + 1 ; for ( int x = 1 ; x < segmented . width - 1 ; x ++ , index ++ ) { if ( segmented . data [ index ] == 0 ) { open . add ( index ) ; } } } // assign region values to watersheds until they are all assigned while ( open . size != 0 ) { open2 . reset ( ) ; for ( int i = 0 ; i < open . size ; i ++ ) { int index = open . get ( i ) ; // assign it to the first valid region it finds for ( int j = 0 ; j < 4 ; j ++ ) { // the outside border in the enlarged segmented image will have - 1 and watersheds are 0 int r = segmented . data [ index + connect [ j ] ] ; if ( r > 0 ) { segmented . data [ index ] = r ; break ; } } // see if it was not assigned a region if ( segmented . data [ index ] == 0 ) { open2 . add ( index ) ; } } // swap open and open2 GrowQueue_I32 tmp = open ; open = open2 ; open2 = tmp ; } // watershed pixels have a value of 0 and have been removed . So change the region ID numbers by 1 for ( int y = 1 ; y < segmented . height - 1 ; y ++ ) { int index = y * segmented . stride + 1 ; for ( int x = 1 ; x < segmented . width - 1 ; x ++ , index ++ ) { segmented . data [ index ] -- ; } }
public class EvaluateRetrievalPerformance { /** * Test whether two relation agree . * @ param ref Reference object * @ param test Test object * @ return { @ code true } if the objects match */ protected static boolean match ( Object ref , Object test ) { } }
if ( ref == null ) { return false ; } // Cheap and fast , may hold for class labels ! if ( ref == test ) { return true ; } if ( ref instanceof LabelList && test instanceof LabelList ) { final LabelList lref = ( LabelList ) ref ; final LabelList ltest = ( LabelList ) test ; final int s1 = lref . size ( ) , s2 = ltest . size ( ) ; if ( s1 == 0 || s2 == 0 ) { return false ; } for ( int i = 0 ; i < s1 ; i ++ ) { String l1 = lref . get ( i ) ; if ( l1 == null ) { continue ; } for ( int j = 0 ; j < s2 ; j ++ ) { if ( l1 . equals ( ltest . get ( j ) ) ) { return true ; } } } } // Fallback to equality , e . g . on class labels return ref . equals ( test ) ;
public class AccountFiltersInner { /** * Get an Account Filter . * Get the details of an Account Filter in the Media Services account . * @ param resourceGroupName The name of the resource group within the Azure subscription . * @ param accountName The Media Services account name . * @ param filterName The Account Filter name * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the AccountFilterInner object */ public Observable < AccountFilterInner > getAsync ( String resourceGroupName , String accountName , String filterName ) { } }
return getWithServiceResponseAsync ( resourceGroupName , accountName , filterName ) . map ( new Func1 < ServiceResponse < AccountFilterInner > , AccountFilterInner > ( ) { @ Override public AccountFilterInner call ( ServiceResponse < AccountFilterInner > response ) { return response . body ( ) ; } } ) ;
public class HttpQuery { /** * Send a file ( with zero - copy ) to the client with a 200 OK status . * This method doesn ' t provide any security guarantee . The caller is * responsible for the argument they pass in . * @ param path The path to the file to send to the client . * @ param max _ age The expiration time of this entity , in seconds . This is * not a timestamp , it ' s how old the resource is allowed to be in the client * cache . See RFC 2616 section 14.9 for more information . Use 0 to disable * caching . */ public void sendFile ( final String path , final int max_age ) throws IOException { } }
sendFile ( HttpResponseStatus . OK , path , max_age ) ;
public class AbstractConfiguration { /** * Reads the configuration property as an optional value , so it is not required to have a value for the key / propertyName , and * returns the < code > defaultValue < / code > when the value isn ' t defined . * @ param < T > the property type * @ param propertyName The configuration propertyName . * @ param propertyType The type into which the resolve property value should be converted * @ return the resolved property value as an value of the requested type . ( defaultValue when not found ) */ protected < T > T getOptionalValue ( String propertyName , T defaultValue , Class < T > propertyType ) { } }
T result = ConfigOptionalValue . getValue ( propertyName , propertyType ) ; if ( result == null ) { result = defaultValue ; } return result ;
public class ServerSocket { /** * Enable / disable SO _ TIMEOUT with the specified timeout , in * milliseconds . With this option set to a non - zero timeout , * a call to accept ( ) for this ServerSocket * will block for only this amount of time . If the timeout expires , * a < B > java . net . SocketTimeoutException < / B > is raised , though the * ServerSocket is still valid . The option < B > must < / B > be enabled * prior to entering the blocking operation to have effect . The * timeout must be > 0. * A timeout of zero is interpreted as an infinite timeout . * @ param timeout the specified timeout , in milliseconds * @ exception SocketException if there is an error in * the underlying protocol , such as a TCP error . * @ since JDK1.1 * @ see # getSoTimeout ( ) */ public synchronized void setSoTimeout ( int timeout ) throws SocketException { } }
if ( isClosed ( ) ) throw new SocketException ( "Socket is closed" ) ; getImpl ( ) . setOption ( SocketOptions . SO_TIMEOUT , new Integer ( timeout ) ) ;
public class Datatype_Builder { /** * Replaces the value to be returned by { @ link Datatype # getPartialType ( ) } by applying { @ code * mapper } to it and using the result . * @ return this { @ code Builder } object * @ throws NullPointerException if { @ code mapper } is null or returns null * @ throws IllegalStateException if the field has not been set */ public Datatype . Builder mapPartialType ( UnaryOperator < TypeClass > mapper ) { } }
Objects . requireNonNull ( mapper ) ; return setPartialType ( mapper . apply ( getPartialType ( ) ) ) ;
public class Util { /** * Returns a List of all of the values in the Map whose key matches an entry in the nameMapping array . * @ param map * the map * @ param nameMapping * the keys of the Map values to add to the List * @ return a List of all of the values in the Map whose key matches an entry in the nameMapping array * @ throws NullPointerException * if map or nameMapping is null */ public static List < Object > filterMapToList ( final Map < String , ? > map , final String [ ] nameMapping ) { } }
if ( map == null ) { throw new NullPointerException ( "map should not be null" ) ; } else if ( nameMapping == null ) { throw new NullPointerException ( "nameMapping should not be null" ) ; } final List < Object > result = new ArrayList < Object > ( nameMapping . length ) ; for ( final String key : nameMapping ) { result . add ( map . get ( key ) ) ; } return result ;
public class StrKit { /** * 字符串为 null 或者内部字符全部为 ' ' ' \ t ' ' \ n ' ' \ r ' 这四类字符时返回 true */ public static boolean isBlank ( String str ) { } }
if ( str == null ) { return true ; } int len = str . length ( ) ; if ( len == 0 ) { return true ; } for ( int i = 0 ; i < len ; i ++ ) { switch ( str . charAt ( i ) ) { case ' ' : case '\t' : case '\n' : case '\r' : // case ' \ b ' : // case ' \ f ' : break ; default : return false ; } } return true ;
public class HttpParameter { /** * / * package */ static boolean containsFile ( List < HttpParameter > params ) { } }
boolean containsFile = false ; for ( HttpParameter param : params ) { if ( param . isFile ( ) ) { containsFile = true ; break ; } } return containsFile ;
public class RoleAssignmentsInner { /** * Gets role assignments for a resource group . * @ param resourceGroupName The name of the resource group . * @ param filter The filter to apply on the operation . Use $ filter = atScope ( ) to return all role assignments at or above the scope . Use $ filter = principalId eq { id } to return all role assignments at , above or below the scope for the specified principal . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; RoleAssignmentInner & gt ; object */ public Observable < Page < RoleAssignmentInner > > listByResourceGroupAsync ( final String resourceGroupName , final String filter ) { } }
return listByResourceGroupWithServiceResponseAsync ( resourceGroupName , filter ) . map ( new Func1 < ServiceResponse < Page < RoleAssignmentInner > > , Page < RoleAssignmentInner > > ( ) { @ Override public Page < RoleAssignmentInner > call ( ServiceResponse < Page < RoleAssignmentInner > > response ) { return response . body ( ) ; } } ) ;
public class QueryUtil { /** * Get a range query object for the given distance function for radius - based * neighbor search . ( Range queries in ELKI refers to radius - based ranges , not * rectangular query windows . ) * An index is used when possible , but it may fall back to a linear scan . * Hints include : * < ul > * < li > Range : maximum range requested < / li > * < li > { @ link de . lmu . ifi . dbs . elki . database . query . DatabaseQuery # HINT _ BULK } bulk * query needed < / li > * < / ul > * @ param < O > Object type * @ param database Database * @ param distanceFunction Distance function * @ param hints Optimizer hints * @ return KNN Query object */ public static < O > RangeQuery < O > getRangeQuery ( Database database , DistanceFunction < ? super O > distanceFunction , Object ... hints ) { } }
final Relation < O > relation = database . getRelation ( distanceFunction . getInputTypeRestriction ( ) , hints ) ; final DistanceQuery < O > distanceQuery = relation . getDistanceQuery ( distanceFunction , hints ) ; return relation . getRangeQuery ( distanceQuery , hints ) ;
public class JsJmsMessageImpl { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . mfp . JsJmsMessage # setJmsDeliveryTime ( long ) */ @ Override public void setJmsDeliveryTime ( long value ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "setJmsDeliveryTime" , Long . valueOf ( value ) ) ; getHdr2 ( ) . setField ( JsHdr2Access . JMSDELIVERYTIME_DATA , Long . valueOf ( value ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "setJmsDeliveryTime" ) ;
public class HbaseSyncService { /** * 根据对应的类型进行转换 * @ param columnItem 列项配置 * @ param hbaseMapping hbase映射配置 * @ param value 值 * @ return 复合字段rowKey */ private static byte [ ] typeConvert ( MappingConfig . ColumnItem columnItem , MappingConfig . HbaseMapping hbaseMapping , Object value ) { } }
if ( value == null ) { return null ; } byte [ ] bytes = null ; if ( columnItem == null || columnItem . getType ( ) == null || "" . equals ( columnItem . getType ( ) ) ) { if ( MappingConfig . Mode . STRING == hbaseMapping . getMode ( ) ) { bytes = Bytes . toBytes ( value . toString ( ) ) ; } else if ( MappingConfig . Mode . NATIVE == hbaseMapping . getMode ( ) ) { bytes = TypeUtil . toBytes ( value ) ; } else if ( MappingConfig . Mode . PHOENIX == hbaseMapping . getMode ( ) ) { PhType phType = PhType . getType ( value . getClass ( ) ) ; bytes = PhTypeUtil . toBytes ( value , phType ) ; } } else { if ( hbaseMapping . getMode ( ) == MappingConfig . Mode . STRING ) { bytes = Bytes . toBytes ( value . toString ( ) ) ; } else if ( hbaseMapping . getMode ( ) == MappingConfig . Mode . NATIVE ) { Type type = Type . getType ( columnItem . getType ( ) ) ; bytes = TypeUtil . toBytes ( value , type ) ; } else if ( hbaseMapping . getMode ( ) == MappingConfig . Mode . PHOENIX ) { PhType phType = PhType . getType ( columnItem . getType ( ) ) ; bytes = PhTypeUtil . toBytes ( value , phType ) ; } } return bytes ;
public class BackupShortTermRetentionPoliciesInner { /** * Gets a database ' s short term retention policy . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param databaseName The name of the database . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the BackupShortTermRetentionPolicyInner object */ public Observable < BackupShortTermRetentionPolicyInner > getAsync ( String resourceGroupName , String serverName , String databaseName ) { } }
return getWithServiceResponseAsync ( resourceGroupName , serverName , databaseName ) . map ( new Func1 < ServiceResponse < BackupShortTermRetentionPolicyInner > , BackupShortTermRetentionPolicyInner > ( ) { @ Override public BackupShortTermRetentionPolicyInner call ( ServiceResponse < BackupShortTermRetentionPolicyInner > response ) { return response . body ( ) ; } } ) ;
public class WishlistItemUrl { /** * Get Resource Url for GetWishlistItemsByWishlistName * @ param customerAccountId The unique identifier of the customer account for which to retrieve wish lists . * @ param filter A set of filter expressions representing the search parameters for a query . This parameter is optional . Refer to [ Sorting and Filtering ] ( . . / . . / . . / . . / Developer / api - guides / sorting - filtering . htm ) for a list of supported filters . * @ param pageSize When creating paged results from a query , this value indicates the zero - based offset in the complete result set where the returned entities begin . For example , with this parameter set to 25 , to get the 51st through the 75th items , set startIndex to 50. * @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss . * @ param sortBy The element to sort the results by and the channel in which the results appear . Either ascending ( a - z ) or descending ( z - a ) channel . Optional . Refer to [ Sorting and Filtering ] ( . . / . . / . . / . . / Developer / api - guides / sorting - filtering . htm ) for more information . * @ param startIndex When creating paged results from a query , this value indicates the zero - based offset in the complete result set where the returned entities begin . For example , with pageSize set to 25 , to get the 51st through the 75th items , set this parameter to 50. * @ param wishlistName The name of the wish list to retrieve . * @ return String Resource Url */ public static MozuUrl getWishlistItemsByWishlistNameUrl ( Integer customerAccountId , String filter , Integer pageSize , String responseFields , String sortBy , Integer startIndex , String wishlistName ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/wishlists/customers/{customerAccountId}/{wishlistName}/items?startIndex={startIndex}&pageSize={pageSize}&sortBy={sortBy}&filter={filter}&responseFields={responseFields}" ) ; formatter . formatUrl ( "customerAccountId" , customerAccountId ) ; formatter . formatUrl ( "filter" , filter ) ; formatter . formatUrl ( "pageSize" , pageSize ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; formatter . formatUrl ( "sortBy" , sortBy ) ; formatter . formatUrl ( "startIndex" , startIndex ) ; formatter . formatUrl ( "wishlistName" , wishlistName ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class Graphics { /** * Set clipping that controls which areas of the world will be drawn to . * Note that world clip is different from standard screen clip in that it ' s * defined in the space of the current world coordinate - i . e . it ' s affected * by translate , rotate , scale etc . * @ param x * The x coordinate of the top left corner of the allowed area * @ param y * The y coordinate of the top left corner of the allowed area * @ param width * The width of the allowed area * @ param height * The height of the allowed area */ public void setWorldClip ( float x , float y , float width , float height ) { } }
predraw ( ) ; worldClipRecord = new Rectangle ( x , y , width , height ) ; GL . glEnable ( SGL . GL_CLIP_PLANE0 ) ; worldClip . put ( 1 ) . put ( 0 ) . put ( 0 ) . put ( - x ) . flip ( ) ; GL . glClipPlane ( SGL . GL_CLIP_PLANE0 , worldClip ) ; GL . glEnable ( SGL . GL_CLIP_PLANE1 ) ; worldClip . put ( - 1 ) . put ( 0 ) . put ( 0 ) . put ( x + width ) . flip ( ) ; GL . glClipPlane ( SGL . GL_CLIP_PLANE1 , worldClip ) ; GL . glEnable ( SGL . GL_CLIP_PLANE2 ) ; worldClip . put ( 0 ) . put ( 1 ) . put ( 0 ) . put ( - y ) . flip ( ) ; GL . glClipPlane ( SGL . GL_CLIP_PLANE2 , worldClip ) ; GL . glEnable ( SGL . GL_CLIP_PLANE3 ) ; worldClip . put ( 0 ) . put ( - 1 ) . put ( 0 ) . put ( y + height ) . flip ( ) ; GL . glClipPlane ( SGL . GL_CLIP_PLANE3 , worldClip ) ; postdraw ( ) ;
public class SessionContextRegistryImpl { /** * Get the SessionContext for the specified webmodule config * Calls initialize the first time . * Will create if it doesn ' t already exist */ public IHttpSessionContext getSessionContext ( DeployedModule webModuleConfig , WebApp ctx , String vhostName , ArrayList sessionRelatedListeners [ ] ) throws Throwable { } }
if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_CORE . entering ( methodClassName , methodNames [ GET_SESSION_CONTEXT ] ) ; } if ( ! initialized ) initialize ( ) ; boolean sessionSharing = getSharing ( ctx ) ; // gets the appropriate appKey to use as a key for the sessionContexts // This takes into consideration sessionSharing String appKey = getAppKey ( vhostName , webModuleConfig , sessionSharing , false ) ; // sessionSharing ) ; String appSessionAppKey = getAppKey ( vhostName , webModuleConfig , sessionSharing , true ) ; // true is to tell us to use either the applevel or BLA level SessionManagerConfig smc = getSMC ( webModuleConfig ) ; // cmd LIDB2842 - start block // Use global sessions only if session management config has // not been overridden at the EAR or WAR level . If the session // management config HAS been overridden , issue info message and // give the app its own context so its sessions will not be global . // This behavior was necessary because the admin application requires // its own config to ensure it always runs session - in - memory . if ( ( _globalSessionContext ) && ( smc . isUsingWebContainerSM ( ) ) ) { appKey = "GLOBAL_HTTP_SESSION_CONTEXT" ; sessionSharing = true ; } else if ( _globalSessionContext ) { String parm [ ] = { appKey } ; LoggingUtil . SESSION_LOGGER_CORE . logp ( Level . INFO , methodClassName , methodNames [ GET_SESSION_CONTEXT ] , "SessionContextRegistry.SessionNotGlobalForWebApp" , parm ) ; } // cloning to support programmatic session cookie configuration // always clone ! ! ! smc = smc . clone ( ) ; // don ' t set here as we ' re still using the base config - just changing the cookie / url properties // smc . setUsingWebContainerSM ( false ) ; if ( smc . isUseContextRootForSessionCookiePath ( ) ) { // setting the cookie path to the context path if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_CORE . logp ( Level . FINE , methodClassName , methodNames [ GET_SESSION_CONTEXT ] , "Setting the cookie path to \"" + ctx . getContextPath ( ) + "\" for application - " + appKey ) ; } smc . setSessionCookiePath ( ctx . getContextPath ( ) ) ; } if ( ! ctx . getConfiguration ( ) . isModuleSessionTrackingModeSet ( ) ) { EnumSet < SessionTrackingMode > trackingModes = EnumSet . noneOf ( SessionTrackingMode . class ) ; if ( smc . getEnableCookies ( ) ) { trackingModes . add ( SessionTrackingMode . COOKIE ) ; } if ( smc . getEnableUrlRewriting ( ) ) { trackingModes . add ( SessionTrackingMode . URL ) ; } if ( smc . useSSLId ( ) ) { trackingModes . add ( SessionTrackingMode . SSL ) ; } ctx . getConfiguration ( ) . setDefaultSessionTrackingMode ( trackingModes ) ; } IHttpSessionContext iSctx = ( IHttpSessionContext ) scrSessionContexts . get ( appKey ) ; if ( iSctx != null ) { LoggingUtil . SESSION_LOGGER_CORE . logp ( Level . INFO , methodClassName , methodNames [ GET_SESSION_CONTEXT ] , "SessionContextRegistry.existingContext" , appKey ) ; SessionContext wsCtx = ( SessionContext ) iSctx ; if ( sessionSharing ) wsCtx . incrementRefCount ( ) ; } else { LoggingUtil . SESSION_LOGGER_CORE . logp ( Level . INFO , methodClassName , methodNames [ GET_SESSION_CONTEXT ] , "SessionContextRegistry.newContext" , appKey ) ; ClassLoader sessionClassLoader = getSessionClassLoader ( webModuleConfig ) ; iSctx = createSessionContext ( appKey , isDistributable ( webModuleConfig ) , isAllowDispatchRemoteInclude ( webModuleConfig ) , ctx , sessionClassLoader , smc , getJ2EEName ( webModuleConfig ) , sessionSharing , appSessionAppKey ) ; } // make sure they are the same object ctx . getConfiguration ( ) . setSessionCookieConfig ( iSctx . getWASSessionConfig ( ) . getSessionCookieConfig ( ) ) ; // clone in SessionContext does not update this smc object but the _ smc object in SessionContext // ctx . getConfiguration ( ) . setSessionManagerConfigBase ( smc ) ; ctx . getConfiguration ( ) . setSessionManagerConfig ( iSctx . getWASSessionConfig ( ) ) ; ArrayList sessionListeners = sessionRelatedListeners [ 0 ] ; ArrayList sessionAttrListeners = sessionRelatedListeners [ 1 ] ; ArrayList sessionIdListeners = sessionRelatedListeners [ 2 ] ; // Servlet 3.1 String j2eeName = null ; if ( ( _globalSessionContext && sessionSharing ) || ( ( SessionContext ) iSctx ) . _sap . getHasApplicationSession ( ) ) { // for global session , pass j2eename with listeners so we can stop listeners for each app j2eeName = getJ2EEName ( webModuleConfig ) ; } // add listeners to session context SessionContext sessCtx = ( SessionContext ) iSctx ; sessCtx . addHttpSessionListener ( sessionListeners , j2eeName ) ; // Servlet 3.1 if ( ! sessionIdListeners . isEmpty ( ) ) { addHttpSessionIdListeners ( sessionIdListeners , j2eeName , sessCtx ) ; } sessCtx . addHttpSessionAttributeListener ( sessionAttrListeners , j2eeName ) ; if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_CORE . exiting ( methodClassName , methodNames [ GET_SESSION_CONTEXT ] , iSctx ) ; } return iSctx ;
public class CassandraCellExtractor { /** * { @ inheritDoc } */ @ SuppressWarnings ( "unchecked" ) @ Override public Cells transformElement ( Pair < Map < String , ByteBuffer > , Map < String , ByteBuffer > > elem , CassandraDeepJobConfig < Cells > config ) { } }
Cells cells = new Cells ( config . getNameSpace ( ) ) ; Map < String , Cell > columnDefinitions = config . columnDefinitions ( ) ; for ( Map . Entry < String , ByteBuffer > entry : elem . left . entrySet ( ) ) { Cell cd = columnDefinitions . get ( entry . getKey ( ) ) ; cells . add ( CassandraUtils . createFromByteBuffer ( cd , entry . getValue ( ) ) ) ; } for ( Map . Entry < String , ByteBuffer > entry : elem . right . entrySet ( ) ) { Cell cd = columnDefinitions . get ( entry . getKey ( ) ) ; if ( cd == null ) { continue ; } cells . add ( CassandraUtils . createFromByteBuffer ( cd , entry . getValue ( ) ) ) ; } return cells ;
public class Pool { public void put ( PondLife pl ) throws InterruptedException { } }
int id = pl . getID ( ) ; synchronized ( this ) { if ( _running == 0 ) stopPondLife ( id ) ; else if ( _pondLife [ id ] != null ) { _index [ _available ++ ] = id ; notify ( ) ; } }
public class MethodUtils { /** * Returns a public Method object that reflects the specified public member * method of the class or interface represented by given class . < br > * Returns null if not find . * @ param clazz * the method belong to . * @ param methodName * the name of the method which you want to get . * @ param parameterTypes * the parameter types of the method which you want to get . * @ return a public Method object that reflects the specified public member * method of the class or interface represented by given class . */ public static Method findPublicMethod ( Class < ? > clazz , String methodName , Class < ? > [ ] parameterTypes ) { } }
try { return clazz . getMethod ( methodName , parameterTypes ) ; } catch ( Exception e ) { Class < ? > superclass = clazz . getSuperclass ( ) ; if ( superclass != null ) { return findPublicMethod ( superclass , methodName , parameterTypes ) ; } } return null ;
public class InjectableRegisterableItemsFactory { /** * Allows us to create instance of this class dynamically via < code > BeanManager < / code > . This is useful in case multiple * independent instances are required on runtime and that need cannot be satisfied with regular CDI practices . * @ param beanManager - bean manager instance of the container * @ param auditlogger - < code > AbstractAuditLogger < / code > logger instance to be used , might be null * @ param kieContainer - < code > KieContainer < / code > that the factory is built for * @ param ksessionName - name of the ksession defined in kmodule to be used , * if not given default ksession from kmodule will be used . * @ return */ public static RegisterableItemsFactory getFactory ( BeanManager beanManager , AbstractAuditLogger auditlogger , KieContainer kieContainer , String ksessionName ) { } }
InjectableRegisterableItemsFactory instance = getInstanceByType ( beanManager , InjectableRegisterableItemsFactory . class , new Annotation [ ] { } ) ; instance . setAuditlogger ( auditlogger ) ; instance . setKieContainer ( kieContainer ) ; instance . setKsessionName ( ksessionName ) ; return instance ;
public class MessageItem { /** * Sets the previous hop Bus name * @ param busName */ private void setOriginatingBus ( String busName ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "setOriginatingBus" , busName ) ; _busName = busName ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "setOriginatingBus" ) ;
public class Streams { /** * A stream that reads byte data from a fixed sequence of underlying * streams . * This is a multi - stream analogue of { @ link ReadStream # andThen ( ReadStream ) } * or { @ link ReadStream # butFirst ( ReadStream ) } methods . Each stream is closed * after it has been exhausted , with the last stream being closed on an end * of stream condition . * All unclosed streams are closed when { @ link CloseableStream # close ( ) } is * called . * @ param streams * streams from which byte data is to be read * @ return a stream that concatenates the byte data of multiple streams . */ public static ReadStream concatReadStreams ( ReadStream ... streams ) { } }
if ( streams == null ) throw new IllegalArgumentException ( "null streams" ) ; for ( ReadStream stream : streams ) { if ( stream == null ) throw new IllegalArgumentException ( "null stream" ) ; } return new SeqReadStream ( StreamCloser . closeStream ( ) , streams ) ;
public class AttributeValue { /** * Returns the value in the given environment . If no value is found for a specific environment the code will fallback to a less complex environment ( reduced form of the environment ) * unless a value is found or the environment is not further reduceable . * @ param in environment * @ return the value in the given environment */ public Value get ( Environment in ) { } }
while ( true ) { if ( log . isDebugEnabled ( ) ) log . debug ( "looking up in " + in + '(' + in . expandedStringForm ( ) + ')' ) ; final Value retValue = values . get ( in . expandedStringForm ( ) ) ; if ( retValue != null ) return retValue ; if ( ! in . isReduceable ( ) ) return null ; in = in . reduce ( ) ; }
public class RolloverFileOutputStream { private void removeOldFiles ( ) { } }
if ( _retainDays > 0 ) { Calendar retainDate = Calendar . getInstance ( ) ; retainDate . add ( Calendar . DATE , - _retainDays ) ; int borderYear = retainDate . get ( java . util . Calendar . YEAR ) ; int borderMonth = retainDate . get ( java . util . Calendar . MONTH ) + 1 ; int borderDay = retainDate . get ( java . util . Calendar . DAY_OF_MONTH ) ; File file = new File ( _filename ) ; File dir = new File ( file . getParent ( ) ) ; String fn = file . getName ( ) ; int s = fn . toLowerCase ( ) . indexOf ( YYYY_MM_DD ) ; if ( s < 0 ) return ; String prefix = fn . substring ( 0 , s ) ; String suffix = fn . substring ( s + YYYY_MM_DD . length ( ) ) ; String [ ] logList = dir . list ( ) ; for ( int i = 0 ; i < logList . length ; i ++ ) { fn = logList [ i ] ; if ( fn . startsWith ( prefix ) && fn . indexOf ( suffix , prefix . length ( ) ) >= 0 ) { try { StringTokenizer st = new StringTokenizer ( fn . substring ( prefix . length ( ) , prefix . length ( ) + YYYY_MM_DD . length ( ) ) , "_." ) ; int nYear = Integer . parseInt ( st . nextToken ( ) ) ; int nMonth = Integer . parseInt ( st . nextToken ( ) ) ; int nDay = Integer . parseInt ( st . nextToken ( ) ) ; if ( nYear < borderYear || ( nYear == borderYear && nMonth < borderMonth ) || ( nYear == borderYear && nMonth == borderMonth && nDay <= borderDay ) ) { log . info ( "Log age " + fn ) ; new File ( dir , fn ) . delete ( ) ; } } catch ( Exception e ) { if ( log . isDebugEnabled ( ) ) e . printStackTrace ( ) ; } } } }
public class AbcGrammar { /** * ifield - transcription : : = % 5B . % 5A . % 3A * WSP tex - text - ifield % 5D < p > * < tt > [ Z : . . ] < / tt > */ Rule IfieldTranscription ( ) { } }
return Sequence ( String ( "[Z:" ) , ZeroOrMore ( WSP ( ) ) . suppressNode ( ) , TexTextIfield ( ) , String ( "]" ) ) . label ( IfieldTranscription ) ;
public class ByteUtil { /** * Converts a long value into a byte array . * @ param val * - long value to convert * @ return decimal value with leading byte that are zeroes striped */ public static byte [ ] longToBytesNoLeadZeroes ( long val ) { } }
// todo : improve performance by while strip numbers until ( long > > 8 = = 0) if ( val == 0 ) return EMPTY_BYTE_ARRAY ; byte [ ] data = ByteBuffer . allocate ( 8 ) . putLong ( val ) . array ( ) ; return stripLeadingZeroes ( data ) ;
public class EndpointService { /** * Works only before { @ link # start ( ) } or after { @ link # stop ( ) } . * @ param listener to remove */ public void removeInventoryListener ( InventoryListener listener ) { } }
this . inventoryListenerSupport . inventoryListenerRWLock . writeLock ( ) . lock ( ) ; try { status . assertInitialOrStopped ( getClass ( ) , "removeInventoryListener()" ) ; this . inventoryListenerSupport . inventoryListeners . remove ( listener ) ; LOG . debugf ( "Removed inventory listener [%s] for endpoint [%s]" , listener , getMonitoredEndpoint ( ) ) ; } finally { this . inventoryListenerSupport . inventoryListenerRWLock . writeLock ( ) . unlock ( ) ; }
public class Utils { private static void initIdGenerator ( ) { } }
String workerID = Config . WORKER_ID ; workerId = NumberUtils . toLong ( workerID , 1 ) ; if ( workerId > MAX_WORKER_ID || workerId < 0 ) { workerId = new Random ( ) . nextInt ( ( int ) MAX_WORKER_ID + 1 ) ; } if ( dataCenterId > MAX_DATACENTER_ID || dataCenterId < 0 ) { dataCenterId = new Random ( ) . nextInt ( ( int ) MAX_DATACENTER_ID + 1 ) ; }