signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class FnObject { /** * Determines whether the target object is between min and max
* in value , this is , whether
* < tt > target . compareTo ( min ) & gt ; = 0 & amp ; & amp ; target . compareTo ( max ) & lt ; = 0 < / tt > .
* The target and the specified min and max have to implement { @ link Comparable } .
* @ param min the minimum value of the target
* @ param max the maximum value of the target
* @ return true if the target is between min and max ( or it ' s equal to any of them ) */
public static final Function < Object , Boolean > between ( final Object min , final Object max ) { } } | return new Between ( min , max ) ; |
public class FactoryPointTracker { /** * Creates a tracker which detects Shi - Tomasi corner features and describes them with NCC .
* @ see ShiTomasiCornerIntensity
* @ see DescribePointPixelRegionNCC
* @ see boofcv . abst . feature . tracker . DdaManagerDetectDescribePoint
* @ param configExtract Configuration for extracting features
* @ param describeRadius Radius of the region being described . Try 2.
* @ param imageType Type of image being processed .
* @ param derivType Type of image used to store the image derivative . null = = use default */
public static < I extends ImageGray < I > , D extends ImageGray < D > > PointTracker < I > dda_ST_NCC ( ConfigGeneralDetector configExtract , int describeRadius , Class < I > imageType , @ Nullable Class < D > derivType ) { } } | if ( derivType == null ) derivType = GImageDerivativeOps . getDerivativeType ( imageType ) ; int w = 2 * describeRadius + 1 ; DescribePointPixelRegionNCC < I > alg = FactoryDescribePointAlgs . pixelRegionNCC ( w , w , imageType ) ; GeneralFeatureDetector < I , D > corner = createShiTomasi ( configExtract , derivType ) ; EasyGeneralFeatureDetector < I , D > easy = new EasyGeneralFeatureDetector < > ( corner , imageType , derivType ) ; ScoreAssociateNccFeature score = new ScoreAssociateNccFeature ( ) ; AssociateDescription2D < NccFeature > association = new AssociateDescTo2D < > ( FactoryAssociation . greedy ( score , Double . MAX_VALUE , true ) ) ; DdaManagerGeneralPoint < I , D , NccFeature > manager = new DdaManagerGeneralPoint < > ( easy , new WrapDescribePixelRegionNCC < > ( alg , imageType ) , 1.0 ) ; return new DetectDescribeAssociate < > ( manager , association , new ConfigTrackerDda ( ) ) ; |
public class ExpiringReference { /** * Gets the value from an expiring reference but returns null if the supplied reference
* reference is null . */
public static < T > T get ( ExpiringReference < T > value ) { } } | return ( value == null ) ? null : value . getValue ( ) ; |
public class Response { /** * Returns a successful response containing the parsed result . */
public static < T > Response < T > success ( T result , Cache . Entry cacheEntry ) { } } | return new Response < T > ( result , cacheEntry ) ; |
public class HalConfiguration { /** * Returns which render mode to use to render a single link for the given { @ link LinkRelation } .
* @ param relation must not be { @ literal null } .
* @ return */
RenderSingleLinks getSingleLinkRenderModeFor ( LinkRelation relation ) { } } | return singleLinksPerPattern . entrySet ( ) . stream ( ) . filter ( entry -> MATCHER . match ( entry . getKey ( ) , relation . value ( ) ) ) . map ( Entry :: getValue ) . findFirst ( ) . orElse ( renderSingleLinks ) ; |
public class UCharacter { /** * Determines if the specified code point is a letter or digit .
* < strong > [ icu ] Note : < / strong > This method , unlike java . lang . Character does not regard the ascii
* characters ' A ' - ' Z ' and ' a ' - ' z ' as digits .
* @ param ch code point to determine if it is a letter or a digit
* @ return true if code point is a letter or a digit */
public static boolean isLetterOrDigit ( int ch ) { } } | return ( ( 1 << getType ( ch ) ) & ( ( 1 << UCharacterCategory . UPPERCASE_LETTER ) | ( 1 << UCharacterCategory . LOWERCASE_LETTER ) | ( 1 << UCharacterCategory . TITLECASE_LETTER ) | ( 1 << UCharacterCategory . MODIFIER_LETTER ) | ( 1 << UCharacterCategory . OTHER_LETTER ) | ( 1 << UCharacterCategory . DECIMAL_DIGIT_NUMBER ) ) ) != 0 ; |
public class ControllerInterceptorAdapter { /** * 返回true , 表示控制器或其方法标注了 “ 拒绝 ” 注解
* @ param controllerClazz
* @ param actionMethod
* @ return */
protected final boolean checkDenyAnnotations ( Class < ? > controllerClazz , Method actionMethod ) { } } | List < Class < ? extends Annotation > > denyAnnotations = getDenyAnnotationClasses ( ) ; if ( denyAnnotations == null || denyAnnotations . size ( ) == 0 ) { return false ; } for ( Class < ? extends Annotation > denyAnnotation : denyAnnotations ) { if ( denyAnnotation == null ) { continue ; } BitSet scopeSet = getAnnotationScope ( denyAnnotation ) ; if ( scopeSet . get ( AnnotationScope . METHOD . ordinal ( ) ) ) { if ( actionMethod . isAnnotationPresent ( denyAnnotation ) ) { return checkAnnotation ( actionMethod . getAnnotation ( denyAnnotation ) ) ; } } if ( scopeSet . get ( AnnotationScope . CLASS . ordinal ( ) ) ) { if ( controllerClazz . isAnnotationPresent ( denyAnnotation ) ) { return checkAnnotation ( actionMethod . getAnnotation ( denyAnnotation ) ) ; } } if ( scopeSet . get ( AnnotationScope . ANNOTATION . ordinal ( ) ) ) { for ( Annotation annotation : actionMethod . getAnnotations ( ) ) { if ( annotation . annotationType ( ) . isAnnotationPresent ( denyAnnotation ) ) { return checkAnnotation ( actionMethod . getAnnotation ( denyAnnotation ) ) ; } } for ( Annotation annotation : controllerClazz . getAnnotations ( ) ) { if ( annotation . annotationType ( ) . isAnnotationPresent ( denyAnnotation ) ) { return checkAnnotation ( actionMethod . getAnnotation ( denyAnnotation ) ) ; } } } } return false ; |
public class KeyVaultClientBaseImpl { /** * Lists deleted storage accounts for the specified vault .
* The Get Deleted Storage Accounts operation returns the storage accounts that have been deleted for a vault enabled for soft - delete . This operation requires the storage / list permission .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; DeletedStorageAccountItem & gt ; object */
public Observable < Page < DeletedStorageAccountItem > > getDeletedStorageAccountsNextAsync ( final String nextPageLink ) { } } | return getDeletedStorageAccountsNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < DeletedStorageAccountItem > > , Page < DeletedStorageAccountItem > > ( ) { @ Override public Page < DeletedStorageAccountItem > call ( ServiceResponse < Page < DeletedStorageAccountItem > > response ) { return response . body ( ) ; } } ) ; |
public class Futures { /** * Creates a CompletableFuture that will do nothing and complete after a specified delay , without using a thread during
* the delay .
* @ param delay The duration of the delay ( how much to wait until completing the Future ) .
* @ param executorService An ExecutorService that will be used to complete the Future on .
* @ return A CompletableFuture that will complete after the specified delay . */
public static CompletableFuture < Void > delayedFuture ( Duration delay , ScheduledExecutorService executorService ) { } } | CompletableFuture < Void > result = new CompletableFuture < > ( ) ; if ( delay . toMillis ( ) == 0 ) { // Zero delay ; no need to bother with scheduling a task in the future .
result . complete ( null ) ; } else { ScheduledFuture < Boolean > sf = executorService . schedule ( ( ) -> result . complete ( null ) , delay . toMillis ( ) , TimeUnit . MILLISECONDS ) ; result . whenComplete ( ( r , ex ) -> sf . cancel ( true ) ) ; } return result ; |
public class ArrayParameters { /** * Update the indexes of each query parameter */
static Map < String , List < Integer > > updateParameterNamesToIndexes ( Map < String , List < Integer > > parametersNameToIndex , List < ArrayParameter > arrayParametersSortedAsc ) { } } | for ( Map . Entry < String , List < Integer > > parameterNameToIndexes : parametersNameToIndex . entrySet ( ) ) { List < Integer > newParameterIndex = new ArrayList < > ( parameterNameToIndexes . getValue ( ) . size ( ) ) ; for ( Integer parameterIndex : parameterNameToIndexes . getValue ( ) ) { newParameterIndex . add ( computeNewIndex ( parameterIndex , arrayParametersSortedAsc ) ) ; } parameterNameToIndexes . setValue ( newParameterIndex ) ; } return parametersNameToIndex ; |
public class BatchGetJobsRequest { /** * A list of job names , which may be the names returned from the < code > ListJobs < / code > operation .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setJobNames ( java . util . Collection ) } or { @ link # withJobNames ( java . util . Collection ) } if you want to override
* the existing values .
* @ param jobNames
* A list of job names , which may be the names returned from the < code > ListJobs < / code > operation .
* @ return Returns a reference to this object so that method calls can be chained together . */
public BatchGetJobsRequest withJobNames ( String ... jobNames ) { } } | if ( this . jobNames == null ) { setJobNames ( new java . util . ArrayList < String > ( jobNames . length ) ) ; } for ( String ele : jobNames ) { this . jobNames . add ( ele ) ; } return this ; |
public class ListEmailIdentitiesRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListEmailIdentitiesRequest listEmailIdentitiesRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( listEmailIdentitiesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listEmailIdentitiesRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listEmailIdentitiesRequest . getPageSize ( ) , PAGESIZE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class SSLUtils { /** * This method is called for tracing in various places . It returns a string that
* represents the buffer including hashcode , position , limit , and capacity .
* @ param buffer buffer to get debug info on
* @ return string representing the buffer */
public static String getBufferTraceInfo ( WsByteBuffer buffer ) { } } | if ( null == buffer ) { return "null" ; } StringBuilder sb = new StringBuilder ( 64 ) ; sb . append ( "hc=" ) . append ( buffer . hashCode ( ) ) ; sb . append ( " pos=" ) . append ( buffer . position ( ) ) ; sb . append ( " lim=" ) . append ( buffer . limit ( ) ) ; sb . append ( " cap=" ) . append ( buffer . capacity ( ) ) ; return sb . toString ( ) ; |
public class VirtualFileChannel { /** * Returns always a lock . Locking virtual file makes no sense . Dummy
* implementation is provided so that existing applications don ' t throw
* exception .
* @ param position
* @ param size
* @ param shared
* @ return
* @ throws IOException */
@ Override public FileLock tryLock ( long position , long size , boolean shared ) throws IOException { } } | return new FileLockImpl ( this , position , size , shared ) ; |
public class ContainerGroupsInner { /** * Restarts all containers in a container group .
* Restarts all containers in a container group in place . If container image has updates , new image will be downloaded .
* @ param resourceGroupName The name of the resource group .
* @ param containerGroupName The name of the container group .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < Void > restartAsync ( String resourceGroupName , String containerGroupName ) { } } | return restartWithServiceResponseAsync ( resourceGroupName , containerGroupName ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ; |
public class DatabaseTableConfigUtil { /** * Convert the name of the @ DatabaseField fields into a number for easy processing later . */
private static int configFieldNameToNum ( String configName ) { } } | if ( configName . equals ( "columnName" ) ) { return COLUMN_NAME ; } else if ( configName . equals ( "dataType" ) ) { return DATA_TYPE ; } else if ( configName . equals ( "defaultValue" ) ) { return DEFAULT_VALUE ; } else if ( configName . equals ( "width" ) ) { return WIDTH ; } else if ( configName . equals ( "canBeNull" ) ) { return CAN_BE_NULL ; } else if ( configName . equals ( "id" ) ) { return ID ; } else if ( configName . equals ( "generatedId" ) ) { return GENERATED_ID ; } else if ( configName . equals ( "generatedIdSequence" ) ) { return GENERATED_ID_SEQUENCE ; } else if ( configName . equals ( "foreign" ) ) { return FOREIGN ; } else if ( configName . equals ( "useGetSet" ) ) { return USE_GET_SET ; } else if ( configName . equals ( "unknownEnumName" ) ) { return UNKNOWN_ENUM_NAME ; } else if ( configName . equals ( "throwIfNull" ) ) { return THROW_IF_NULL ; } else if ( configName . equals ( "persisted" ) ) { return PERSISTED ; } else if ( configName . equals ( "format" ) ) { return FORMAT ; } else if ( configName . equals ( "unique" ) ) { return UNIQUE ; } else if ( configName . equals ( "uniqueCombo" ) ) { return UNIQUE_COMBO ; } else if ( configName . equals ( "index" ) ) { return INDEX ; } else if ( configName . equals ( "uniqueIndex" ) ) { return UNIQUE_INDEX ; } else if ( configName . equals ( "indexName" ) ) { return INDEX_NAME ; } else if ( configName . equals ( "uniqueIndexName" ) ) { return UNIQUE_INDEX_NAME ; } else if ( configName . equals ( "foreignAutoRefresh" ) ) { return FOREIGN_AUTO_REFRESH ; } else if ( configName . equals ( "maxForeignAutoRefreshLevel" ) ) { return MAX_FOREIGN_AUTO_REFRESH_LEVEL ; } else if ( configName . equals ( "persisterClass" ) ) { return PERSISTER_CLASS ; } else if ( configName . equals ( "allowGeneratedIdInsert" ) ) { return ALLOW_GENERATED_ID_INSERT ; } else if ( configName . equals ( "columnDefinition" ) ) { return COLUMN_DEFINITON ; } else if ( configName . equals ( "fullColumnDefinition" ) ) { return FULL_COLUMN_DEFINITON ; } else if ( configName . equals ( "foreignAutoCreate" ) ) { return FOREIGN_AUTO_CREATE ; } else if ( configName . equals ( "version" ) ) { return VERSION ; } else if ( configName . equals ( "foreignColumnName" ) ) { return FOREIGN_COLUMN_NAME ; } else if ( configName . equals ( "readOnly" ) ) { return READ_ONLY ; } else { throw new IllegalStateException ( "Could not find support for DatabaseField " + configName ) ; } |
public class ServerSetup { /** * Create a deep copy .
* @ param bindAddress overwrites bind address when creating deep copy .
* @ return a copy of the server setup configuration . */
public ServerSetup createCopy ( String bindAddress ) { } } | ServerSetup setup = new ServerSetup ( getPort ( ) , bindAddress , getProtocol ( ) ) ; setup . setServerStartupTimeout ( getServerStartupTimeout ( ) ) ; setup . setConnectionTimeout ( getConnectionTimeout ( ) ) ; setup . setReadTimeout ( getReadTimeout ( ) ) ; setup . setWriteTimeout ( getWriteTimeout ( ) ) ; setup . setVerbose ( isVerbose ( ) ) ; return setup ; |
public class MergeRequestApi { /** * Get all merge requests for the specified project .
* < pre > < code > GitLab Endpoint : GET / projects / : id / merge _ requests < / code > < / pre >
* @ param projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance
* @ return all merge requests for the specified project
* @ throws GitLabApiException if any exception occurs */
public List < MergeRequest > getMergeRequests ( Object projectIdOrPath ) throws GitLabApiException { } } | return ( getMergeRequests ( projectIdOrPath , getDefaultPerPage ( ) ) . all ( ) ) ; |
public class DocumentFactory { /** * Creates a document with the given id and content written in the given language having the given set of attributes .
* @ param id the id
* @ param content the content
* @ param language the language
* @ param attributeMap the attribute map
* @ return the document */
public Document create ( @ NonNull String id , @ NonNull String content , @ NonNull Language language , @ NonNull Map < AttributeType , ? > attributeMap ) { } } | Document document = new Document ( id , normalizer . normalize ( content , language ) , language ) ; document . putAll ( attributeMap ) ; document . setLanguage ( language ) ; return document ; |
public class GridCell { /** * Tests whether any atom in this cell has a contact with the specified query atom
* @ param iAtoms the first set of atoms to which the iIndices correspond
* @ param jAtoms the second set of atoms to which the jIndices correspond , or null
* @ param query test point
* @ param cutoff
* @ return */
public boolean hasContactToAtom ( Point3d [ ] iAtoms , Point3d [ ] jAtoms , Point3d query , double cutoff ) { } } | for ( int i : iIndices ) { double distance = iAtoms [ i ] . distance ( query ) ; if ( distance < cutoff ) return true ; } if ( jAtoms != null ) { for ( int i : jIndices ) { double distance = jAtoms [ i ] . distance ( query ) ; if ( distance < cutoff ) return true ; } } return false ; |
public class VisualizationTree { /** * Process new result combinations of an object type1 ( in first hierarchy ) and
* any child of type2 ( in second hierarchy )
* This is a bit painful , because we have two hierarchies with different
* types : results , and visualizations .
* @ param context Context
* @ param start Starting point
* @ param type1 First type , in first hierarchy
* @ param type2 Second type , in second hierarchy
* @ param handler Handler */
public static < A extends Result , B extends VisualizationItem > void findNewSiblings ( VisualizerContext context , Object start , Class < ? super A > type1 , Class < ? super B > type2 , BiConsumer < A , B > handler ) { } } | // Search start in first hierarchy :
final ResultHierarchy hier = context . getHierarchy ( ) ; final Hierarchy < Object > vistree = context . getVisHierarchy ( ) ; if ( start instanceof Result ) { // New result :
for ( It < A > it1 = hier . iterDescendantsSelf ( ( Result ) start ) . filter ( type1 ) ; it1 . valid ( ) ; it1 . advance ( ) ) { final A result = it1 . get ( ) ; // Existing visualization :
for ( It < B > it2 = vistree . iterDescendantsSelf ( context . getBaseResult ( ) ) . filter ( type2 ) ; it2 . valid ( ) ; it2 . advance ( ) ) { handler . accept ( result , it2 . get ( ) ) ; } } } // New visualization :
for ( It < B > it2 = vistree . iterDescendantsSelf ( start ) . filter ( type2 ) ; it2 . valid ( ) ; it2 . advance ( ) ) { final B vis = it2 . get ( ) ; // Existing result :
for ( It < A > it1 = hier . iterAll ( ) . filter ( type1 ) ; it1 . valid ( ) ; it1 . advance ( ) ) { handler . accept ( it1 . get ( ) , vis ) ; } } |
public class PhoneNumberUtil { /** * format phone number in common format with cursor position handling .
* @ param pphoneNumber phone number as String to format with cursor position
* @ return formated phone number as String with new cursor position */
public final ValueWithPos < String > formatCommon ( final ValueWithPos < String > pphoneNumber ) { } } | return valueWithPosDefaults ( this . formatCommonWithPos ( this . parsePhoneNumber ( pphoneNumber ) , defaultCountryData ) , pphoneNumber ) ; |
public class ConsumerSessionImpl { /** * ( non - Javadoc )
* @ see com . ibm . wsspi . sib . core . ConsumerSession # deregisterAsynchConsumerCallback ( ) */
@ Override public void deregisterStoppableAsynchConsumerCallback ( ) throws SISessionUnavailableException , SISessionDroppedException , SIErrorException , SIIncorrectCallException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConsumerSession . tc . isEntryEnabled ( ) ) SibTr . entry ( CoreSPIConsumerSession . tc , "deregisterStoppableAsynchConsumerCallback" , this ) ; // Deregister the AsynchConsumer by registering null with the LCP
// the other params don ' t matter
registerStoppableAsynchConsumerCallback ( null , 1 , 0 , 1 , null , 0 , 0 ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConsumerSession . tc . isEntryEnabled ( ) ) SibTr . exit ( CoreSPIConsumerSession . tc , "deregisterStoppableAsynchConsumerCallback" ) ; |
public class FolderLayout { /** * Checks if the source folder location has been explicitly set by the " xcode . sourceDirectory " . If
* not the default " src / xcode " is returned . */
static File getSourceFolder ( MavenProject project ) { } } | final Properties projectProperties = project . getProperties ( ) ; if ( projectProperties . containsKey ( XCodeDefaultConfigurationMojo . XCODE_SOURCE_DIRECTORY ) ) { return new File ( project . getBasedir ( ) , projectProperties . getProperty ( XCodeDefaultConfigurationMojo . XCODE_SOURCE_DIRECTORY ) ) ; } return new File ( project . getBasedir ( ) , XCodeDefaultConfigurationMojo . DEFAULT_XCODE_SOURCE_DIRECTORY ) ; |
public class AvroGenericRecordAccessor { /** * Recurse down record types to set the right value */
private void set ( String fieldName , Object value ) { } } | try { String subField ; Iterator < String > levels = Splitter . on ( "." ) . split ( fieldName ) . iterator ( ) ; GenericRecord toInsert = record ; subField = levels . next ( ) ; Object subRecord = toInsert ; while ( levels . hasNext ( ) ) { if ( subRecord instanceof GenericRecord ) { subRecord = ( ( GenericRecord ) subRecord ) . get ( subField ) ; } else if ( subRecord instanceof List ) { subRecord = ( ( List ) subRecord ) . get ( Integer . parseInt ( subField ) ) ; } else if ( subRecord instanceof Map ) { subRecord = ( ( Map ) subRecord ) . get ( subField ) ; } if ( subRecord == null ) { throw new FieldDoesNotExistException ( "Field " + subField + " not found when trying to set " + fieldName ) ; } subField = levels . next ( ) ; } if ( ! ( subRecord instanceof GenericRecord ) ) { throw new IllegalArgumentException ( "Field " + fieldName + " does not refer to a record type." ) ; } toInsert = ( GenericRecord ) subRecord ; Object oldValue = toInsert . get ( subField ) ; toInsert . put ( subField , value ) ; Schema . Field changedField = toInsert . getSchema ( ) . getField ( subField ) ; GenericData genericData = GenericData . get ( ) ; boolean valid = genericData . validate ( changedField . schema ( ) , genericData . getField ( toInsert , changedField . name ( ) , changedField . pos ( ) ) ) ; if ( ! valid ) { toInsert . put ( subField , oldValue ) ; throw new IncorrectTypeException ( "Incorrect type - can't insert a " + value . getClass ( ) . getCanonicalName ( ) + " into an Avro record of type " + changedField . schema ( ) . getType ( ) . toString ( ) ) ; } } catch ( AvroRuntimeException e ) { throw new FieldDoesNotExistException ( "Field not found setting name " + fieldName , e ) ; } |
public class BuildContext { /** * Associate the given type with the given name within this ObjectContext .
* This will throw an EvaluationException if the type is already defined .
* @ param name
* name to associate to the type
* @ param fullType
* data type to use for the definition
* @ param template
* template where the type is defined ( used for error handling )
* @ param sourceRange
* location in the template where the type is defined ( used for
* error handling )
* @ throws EvaluationException
* if there is already a type associated with the given name */
public void setFullType ( String name , FullType fullType , Template template , SourceRange sourceRange ) throws EvaluationException { } } | types . put ( name , fullType , template , sourceRange ) ; |
public class CloudStorageOptions { /** * < code > . google . privacy . dlp . v2 . CloudStorageOptions . SampleMethod sample _ method = 6 ; < / code > */
public com . google . privacy . dlp . v2 . CloudStorageOptions . SampleMethod getSampleMethod ( ) { } } | @ SuppressWarnings ( "deprecation" ) com . google . privacy . dlp . v2 . CloudStorageOptions . SampleMethod result = com . google . privacy . dlp . v2 . CloudStorageOptions . SampleMethod . valueOf ( sampleMethod_ ) ; return result == null ? com . google . privacy . dlp . v2 . CloudStorageOptions . SampleMethod . UNRECOGNIZED : result ; |
public class GSCOLImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } } | switch ( featureID ) { case AfplibPackage . GSCOL__COL : return COL_EDEFAULT == null ? col != null : ! COL_EDEFAULT . equals ( col ) ; } return super . eIsSet ( featureID ) ; |
public class Href { /** * Get URI without params .
* @ return Bare URI
* @ since 0.14 */
public String bare ( ) { } } | final StringBuilder text = new StringBuilder ( this . uri . toString ( ) ) ; if ( this . uri . getPath ( ) . isEmpty ( ) ) { text . append ( '/' ) ; } return text . toString ( ) ; |
public class QueryController { /** * Returns whether query is archived
* @ param query query
* @ return whether query is archived */
public boolean isQueryArchived ( Query query ) { } } | if ( query . getArchived ( ) ) { return true ; } return resourceController . isFolderArchived ( query . getParentFolder ( ) ) ; |
public class TimeStampUtils { /** * Gets the current time stamp based on the DEFAULT format .
* The DEFAULT format is defined in Messages . properties .
* @ return current formatted time stamp as { @ code String } */
public static String currentDefaultFormattedTimeStamp ( ) { } } | try { SimpleDateFormat sdf = new SimpleDateFormat ( DEFAULT_TIME_STAMP_FORMAT ) ; return sdf . format ( new Date ( ) ) ; } catch ( IllegalArgumentException e ) { SimpleDateFormat sdf = new SimpleDateFormat ( SAFE_TIME_STAMP_FORMAT ) ; return sdf . format ( new Date ( ) ) ; } |
public class ResourceGroupsInner { /** * Updates a resource group .
* Resource groups can be updated through a simple PATCH operation to a group address . The format of the request is the same as that for creating a resource group . If a field is unspecified , the current value is retained .
* @ param resourceGroupName The name of the resource group to update . The name is case insensitive .
* @ param parameters Parameters supplied to update a resource group .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < ResourceGroupInner > patchAsync ( String resourceGroupName , ResourceGroupInner parameters , final ServiceCallback < ResourceGroupInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( patchWithServiceResponseAsync ( resourceGroupName , parameters ) , serviceCallback ) ; |
public class BigDecimalMath { /** * Returns whether the specified { @ link BigDecimal } value can be represented as < code > double < / code > .
* < p > If this returns < code > true < / code > you can call { @ link BigDecimal # doubleValue ( ) }
* without fear of getting { @ link Double # POSITIVE _ INFINITY } or { @ link Double # NEGATIVE _ INFINITY } as result . < / p >
* < p > Example : < code > BigDecimalMath . isDoubleValue ( new BigDecimal ( " 1E309 " ) ) < / code > returns < code > false < / code > ,
* because < code > new BigDecimal ( " 1E309 " ) . doubleValue ( ) < / code > returns < code > Infinity < / code > . < / p >
* < p > Note : This method does < strong > not < / strong > check for possible loss of precision . < / p >
* < p > For example < code > BigDecimalMath . isDoubleValue ( new BigDecimal ( " 1.234000001 " ) ) < / code > will return < code > true < / code > ,
* because < code > new BigDecimal ( " 1.234000001 " ) . doubleValue ( ) < / code > returns a valid double value ,
* although it loses precision and returns < code > 1.234 < / code > . < / p >
* < p > < code > BigDecimalMath . isDoubleValue ( new BigDecimal ( " 1E - 325 " ) ) < / code > will return < code > true < / code >
* although this value is smaller than { @ link Double # MIN _ VALUE } ( and therefore outside the range of values that can be represented as < code > double < / code > )
* because < code > new BigDecimal ( " 1E - 325 " ) . doubleValue ( ) < / code > returns < code > 0 < / code > which is a legal value with loss of precision . < / p >
* @ param value the { @ link BigDecimal } to check
* @ return < code > true < / code > if the value can be represented as < code > double < / code > value */
public static boolean isDoubleValue ( BigDecimal value ) { } } | if ( value . compareTo ( DOUBLE_MAX_VALUE ) > 0 ) { return false ; } if ( value . compareTo ( DOUBLE_MAX_VALUE . negate ( ) ) < 0 ) { return false ; } return true ; |
public class NfsStatus { /** * Convenience function to get the instance from the int status value .
* @ param value
* The int status value .
* @ return The instance . */
public static NfsStatus fromValue ( int value ) { } } | NfsStatus status = VALUES . get ( value ) ; if ( status == null ) { status = new NfsStatus ( value ) ; VALUES . put ( value , status ) ; } return status ; |
public class HELM2NotationUtils { /** * method to check if any of the rna polymers have a modified nucleotide
* @ param polymers list of { @ link PolymerNotation }
* @ return true if at least one rna polymer has a modified nucleotide
* @ throws NotationException if notation is not valid */
public static boolean hasNucleotideModification ( List < PolymerNotation > polymers ) throws NotationException { } } | for ( PolymerNotation polymer : getRNAPolymers ( polymers ) ) { if ( RNAUtils . hasNucleotideModification ( polymer ) ) { return true ; } } return false ; |
public class JsonAssert { /** * Compares part of the JSON and fails if they are equal .
* Path has this format " root . array [ 0 ] . value " . */
public static void assertJsonPartNotEquals ( Object expected , Object fullJson , String path , Configuration configuration ) { } } | Diff diff = create ( expected , fullJson , FULL_JSON , path , configuration ) ; if ( diff . similar ( ) ) { if ( ROOT . equals ( path ) ) { doFail ( "Expected different values but the values were equal." ) ; } else { doFail ( String . format ( "Expected different values in node \"%s\" but the values were equal." , path ) ) ; } } |
public class SOM { /** * Cluster a new instance to the nearest neuron . For clustering purpose ,
* one should build a sufficient large map to capture the structure of
* data space . Then the neurons of map can be clustered into a small number
* of clusters . Finally the sample should be assign to the cluster of
* its nearest neurons .
* @ param x a new instance .
* @ return the cluster label . If the method { @ link # partition ( int ) } is
* called before , this is the cluster label of the nearest neuron .
* Otherwise , this is the index of neuron ( i * width + j ) . */
@ Override public int predict ( double [ ] x ) { } } | double best = Double . MAX_VALUE ; int ii = - 1 , jj = - 1 ; for ( int i = 0 ; i < height ; i ++ ) { for ( int j = 0 ; j < width ; j ++ ) { double dist = Math . squaredDistance ( neurons [ i ] [ j ] , x ) ; if ( dist < best ) { best = dist ; ii = i ; jj = j ; } } } if ( y == null ) { return ii * width + jj ; } else { return y [ ii * width + jj ] ; } |
public class XMLConfiguration { protected void initSecurityConstraint ( XmlParser . Node node ) { } } | try { SecurityConstraint scBase = new SecurityConstraint ( ) ; XmlParser . Node auths = node . get ( "auth-constraint" ) ; if ( auths != null ) { scBase . setAuthenticate ( true ) ; // auth - constraint
Iterator iter = auths . iterator ( "role-name" ) ; while ( iter . hasNext ( ) ) { String role = ( ( XmlParser . Node ) iter . next ( ) ) . toString ( false , true ) ; scBase . addRole ( role ) ; } } XmlParser . Node data = node . get ( "user-data-constraint" ) ; if ( data != null ) { data = data . get ( "transport-guarantee" ) ; String guarantee = data . toString ( false , true ) . toUpperCase ( ) ; if ( guarantee == null || guarantee . length ( ) == 0 || "NONE" . equals ( guarantee ) ) scBase . setDataConstraint ( SecurityConstraint . DC_NONE ) ; else if ( "INTEGRAL" . equals ( guarantee ) ) scBase . setDataConstraint ( SecurityConstraint . DC_INTEGRAL ) ; else if ( "CONFIDENTIAL" . equals ( guarantee ) ) scBase . setDataConstraint ( SecurityConstraint . DC_CONFIDENTIAL ) ; else { log . warn ( "Unknown user-data-constraint:" + guarantee ) ; scBase . setDataConstraint ( SecurityConstraint . DC_CONFIDENTIAL ) ; } } Iterator iter = node . iterator ( "web-resource-collection" ) ; while ( iter . hasNext ( ) ) { XmlParser . Node collection = ( XmlParser . Node ) iter . next ( ) ; String name = collection . getString ( "web-resource-name" , false , true ) ; SecurityConstraint sc = ( SecurityConstraint ) scBase . clone ( ) ; sc . setName ( name ) ; Iterator iter2 = collection . iterator ( "http-method" ) ; while ( iter2 . hasNext ( ) ) sc . addMethod ( ( ( XmlParser . Node ) iter2 . next ( ) ) . toString ( false , true ) ) ; iter2 = collection . iterator ( "url-pattern" ) ; while ( iter2 . hasNext ( ) ) { String url = ( ( XmlParser . Node ) iter2 . next ( ) ) . toString ( false , true ) ; getWebApplicationContext ( ) . addSecurityConstraint ( url , sc ) ; } } } catch ( CloneNotSupportedException e ) { log . fatal ( e ) ; } |
public class JaxbUtils { /** * Obtains the { @ link JAXBContext } from the given type , by using the cache if possible .
* The original code in { @ link JAXB } class claimed that { @ code volatile } on the { @ code
* WeakReference } variable that they stored the cache in was enough to provide thread safety , but
* I don ' t think so as the reference itself , inside the { @ code WeakReference } wrapper isn ' t
* volatile .
* My improvement */
private synchronized static < T > JAXBContext getContext ( Class < T > type ) throws JAXBException { } } | final SoftReference < Cache > existingCacheRef = CACHE ; if ( existingCacheRef != null ) { Cache existingCache = existingCacheRef . get ( ) ; if ( existingCache != null && existingCache . type == type ) { return existingCache . context ; } } // overwrite the cache
Cache newCache = new Cache ( type ) ; CACHE = new SoftReference < Cache > ( newCache ) ; return newCache . context ; |
public class HashtableOnDisk { /** * Removes the mapping for this key and deletes the object from disk .
* @ param key The key of the object being removed .
* @ return true if successful , false otherwise
* @ exception FileManagerException The underlying file manager has a problem .
* @ exception ClassNotFoundException Some key in the hash bucket cannot be
* deserialized while searching to see if the object already exists . The
* underlying file is likely corrupted .
* @ exception IOException The underlying file has a problem and is likely
* corrupt .
* @ exception EOFxception We were asked to seek beyond the end of the file .
* The file is likely corrupt .
* @ exception HashtableOnDiskException The hashtable header is readable but invalid .
* One or more of the following is true : the magic string is invalid , the header
* pointers are null , the header pointers do not point to a recognizable hashtable . */
public synchronized boolean remove ( Object key ) throws IOException , EOFException , FileManagerException , ClassNotFoundException , HashtableOnDiskException { } } | if ( filemgr == null ) { throw new HashtableOnDiskException ( "No Filemanager" ) ; } if ( key == null ) return false ; // no null keys allowed
HashtableEntry e = findEntry ( key , RETRIEVE_KEY , ! CHECK_EXPIRED ) ; if ( e == null ) return false ; // not found
boolean answer = remove ( e ) ; htoddc . returnToHashtableEntryPool ( e ) ; return answer ; |
public class WorkUnitState { /** * Get the actual high { @ link Watermark } . If the { @ code WorkUnitState } does not contain the actual high watermark
* ( which may be caused by task failures ) , the low watermark in the corresponding { @ link WorkUnit } will be returned .
* @ param watermarkClass the watermark class for this { @ code WorkUnitState } .
* @ param gson a { @ link Gson } object used to deserialize the watermark .
* @ return the actual high watermark in this { @ code WorkUnitState } . null is returned if this { @ code WorkUnitState }
* does not contain an actual high watermark , and the corresponding { @ code WorkUnit } does not contain a low
* watermark . */
public < T extends Watermark > T getActualHighWatermark ( Class < T > watermarkClass , Gson gson ) { } } | JsonElement json = getActualHighWatermark ( ) ; if ( json == null ) { json = this . workUnit . getLowWatermark ( ) ; if ( json == null ) { return null ; } } return gson . fromJson ( json , watermarkClass ) ; |
public class JmsDestinationImpl { /** * This method informs us whether we can carry out type checking on
* the producer connect call . */
protected boolean isProducerTypeCheck ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "isProducerTypeCheck" ) ; boolean checking = true ; // We can carry out checking if there is no FRP , or the FRP has 0 size .
StringArrayWrapper frp = ( StringArrayWrapper ) properties . get ( FORWARD_ROUTING_PATH ) ; if ( frp != null ) { List totalPath = frp . getMsgForwardRoutingPath ( ) ; if ( ( totalPath != null ) && ( totalPath . size ( ) > 0 ) ) checking = false ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "isProducerTypeCheck" , checking ) ; return checking ; |
public class MarkdownParser { /** * Generate intermediate markup to delimit custom nodes representing entities for further processing by the
* Markdown parser . */
private String enrichMarkdown ( String message , JsonNode entitiesNode , JsonNode mediaNode ) throws InvalidInputException { } } | TreeMap < Integer , JsonNode > entities = new TreeMap < > ( ) ; TreeMap < Integer , JsonNode > media = new TreeMap < > ( ) ; if ( entitiesNode != null ) { validateEntities ( entitiesNode ) ; for ( JsonNode node : entitiesNode . findParents ( INDEX_START ) ) { entities . put ( node . get ( INDEX_START ) . intValue ( ) , node ) ; } } if ( mediaNode != null ) { validateMedia ( mediaNode ) ; for ( JsonNode node : mediaNode . findParents ( INDEX ) ) { media . put ( node . get ( INDEX ) . intValue ( ) , node . get ( TEXT ) ) ; } } // If entity indices are outside the message , pad the message to the necessary length
int lastIndex = Math . max ( ( ! entities . isEmpty ( ) ) ? entities . lastKey ( ) : 0 , ( ! media . isEmpty ( ) ) ? media . lastKey ( ) : 0 ) ; if ( message . length ( ) <= lastIndex ) { message = StringUtils . rightPad ( message , lastIndex + 1 ) ; } StringBuilder output = new StringBuilder ( ) ; for ( int i = 0 ; i < message . length ( ) ; i ++ ) { char c = message . charAt ( i ) ; if ( entities . containsKey ( i ) ) { JsonNode entity = entities . get ( i ) ; String entityType = entity . get ( TYPE ) . asText ( ) . toUpperCase ( ) ; String id = entity . get ( ID ) . asText ( ) ; output . append ( ENTITY_DELIMITER ) ; output . append ( entityType ) . append ( FIELD_DELIMITER ) ; output . append ( id ) ; output . append ( ENTITY_DELIMITER ) ; // We explicitly check the entity indices above , but make double sure that we don ' t get into an infinite loop here
int endIndex = entity . get ( INDEX_END ) . intValue ( ) - 1 ; i = Math . max ( endIndex , i ) ; } else if ( media . containsKey ( i ) ) { JsonNode table = media . get ( i ) ; output . append ( ENTITY_DELIMITER ) ; output . append ( TABLE ) . append ( FIELD_DELIMITER ) ; for ( JsonNode row : table ) { for ( JsonNode cell : row ) { String text = cell . asText ( ) ; output . append ( text ) . append ( RECORD_DELIMITER ) ; } output . append ( GROUP_DELIMITER ) ; } output . append ( ENTITY_DELIMITER ) ; output . append ( c ) ; } else { output . append ( c ) ; } } return output . toString ( ) ; |
public class FSImageCompression { /** * Create a compression instance using the codec specified by
* < code > codecClassName < / code > */
private static FSImageCompression createCompression ( Configuration conf , String codecClassName ) throws IOException { } } | CompressionCodecFactory factory = new CompressionCodecFactory ( conf ) ; CompressionCodec codec = factory . getCodecByClassName ( codecClassName ) ; if ( codec == null ) { throw new IOException ( "Not a supported codec: " + codecClassName ) ; } return new FSImageCompression ( codec ) ; |
public class Matrix4f { /** * / * ( non - Javadoc )
* @ see org . joml . Matrix4fc # rotateTowardsXY ( float , float , org . joml . Matrix4f ) */
public Matrix4f rotateTowardsXY ( float dirX , float dirY , Matrix4f dest ) { } } | if ( ( properties & PROPERTY_IDENTITY ) != 0 ) return dest . rotationTowardsXY ( dirX , dirY ) ; float rm00 = dirY ; float rm01 = dirX ; float rm10 = - dirX ; float rm11 = dirY ; float nm00 = m00 * rm00 + m10 * rm01 ; float nm01 = m01 * rm00 + m11 * rm01 ; float nm02 = m02 * rm00 + m12 * rm01 ; float nm03 = m03 * rm00 + m13 * rm01 ; dest . _m10 ( m00 * rm10 + m10 * rm11 ) ; dest . _m11 ( m01 * rm10 + m11 * rm11 ) ; dest . _m12 ( m02 * rm10 + m12 * rm11 ) ; dest . _m13 ( m03 * rm10 + m13 * rm11 ) ; dest . _m00 ( nm00 ) ; dest . _m01 ( nm01 ) ; dest . _m02 ( nm02 ) ; dest . _m03 ( nm03 ) ; dest . _m20 ( m20 ) ; dest . _m21 ( m21 ) ; dest . _m22 ( m22 ) ; dest . _m23 ( m23 ) ; dest . _m30 ( m30 ) ; dest . _m31 ( m31 ) ; dest . _m32 ( m32 ) ; dest . _m33 ( m33 ) ; dest . _properties ( properties & ~ ( PROPERTY_PERSPECTIVE | PROPERTY_IDENTITY | PROPERTY_TRANSLATION ) ) ; return dest ; |
public class DictionaryTrie { /** * 获取字符对应的根节点
* 如果不存在 , 则返回NULL
* @ param character 字符
* @ return 字符对应的根节点 */
private TrieNode getRootNode ( char character ) { } } | // 计算节点的存储索引
int index = character % INDEX_LENGTH ; TrieNode trieNode = ROOT_NODES_INDEX [ index ] ; while ( trieNode != null && character != trieNode . getCharacter ( ) ) { // 如果节点和其他节点冲突 , 则需要链式查找
trieNode = trieNode . getSibling ( ) ; } return trieNode ; |
public class AbstractIterativeTask { /** * Creates a new solution set update output collector .
* < p > This collector is used by { @ link IterationIntermediateTask } or { @ link IterationTailTask } to update the
* solution set of workset iterations . Depending on the task configuration , either a fast ( non - probing )
* { @ link org . apache . flink . runtime . iterative . io . SolutionSetFastUpdateOutputCollector } or normal ( re - probing )
* { @ link SolutionSetUpdateOutputCollector } is created .
* < p > If a non - null delegate is given , the new { @ link Collector } will write back to the solution set and also call
* collect ( T ) of the delegate .
* @ param delegate null - OR - a delegate collector to be called by the newly created collector
* @ return a new { @ link org . apache . flink . runtime . iterative . io . SolutionSetFastUpdateOutputCollector } or
* { @ link SolutionSetUpdateOutputCollector } */
protected Collector < OT > createSolutionSetUpdateOutputCollector ( Collector < OT > delegate ) { } } | Broker < Object > solutionSetBroker = SolutionSetBroker . instance ( ) ; Object ss = solutionSetBroker . get ( brokerKey ( ) ) ; if ( ss instanceof CompactingHashTable ) { @ SuppressWarnings ( "unchecked" ) CompactingHashTable < OT > solutionSet = ( CompactingHashTable < OT > ) ss ; return new SolutionSetUpdateOutputCollector < OT > ( solutionSet , delegate ) ; } else if ( ss instanceof JoinHashMap ) { @ SuppressWarnings ( "unchecked" ) JoinHashMap < OT > map = ( JoinHashMap < OT > ) ss ; return new SolutionSetObjectsUpdateOutputCollector < OT > ( map , delegate ) ; } else { throw new RuntimeException ( "Unrecognized solution set handle: " + ss ) ; } |
public class SourceSnippets { /** * Creates a snippet that evaluates to an invocation of the named method on
* the given package fragment .
* < p > Used when generating an intermediate invoker method ; see
* { @ link MethodCallUtil # createMethodCallWithInjection } . */
public static SourceSnippet callMethod ( final String methodName , final String fragmentPackageName , final Iterable < String > parameters ) { } } | return new SourceSnippet ( ) { public String getSource ( InjectorWriteContext writeContext ) { return writeContext . callMethod ( methodName , fragmentPackageName , parameters ) ; } } ; |
public class GerritQueryHandler { /** * Runs the query and returns the result as a list of JSON formatted strings .
* This is the equivalent of calling queryJava ( queryString , true , true , false , false ) .
* @ param queryString the query .
* @ return a List of JSON formatted strings .
* @ throws SshException if there is an error in the SSH Connection .
* @ throws IOException for some other IO problem . */
public List < String > queryJson ( String queryString ) throws SshException , IOException { } } | return queryJson ( queryString , true , true , false , false ) ; |
public class ClassPathResolver { /** * scan the jar to get the URLS of the Classes .
* @ param rootDirResource which is " Jar "
* @ param subPattern subPattern
* @ return the URLs of all the matched classes */
private static Collection < ? extends URL > doFindPathMatchingJarResources ( final URL rootDirResource , final String subPattern ) { } } | final Set < URL > result = new LinkedHashSet < URL > ( ) ; JarFile jarFile = null ; String jarFileUrl ; String rootEntryPath = null ; URLConnection con ; boolean newJarFile = false ; try { con = rootDirResource . openConnection ( ) ; if ( con instanceof JarURLConnection ) { final JarURLConnection jarCon = ( JarURLConnection ) con ; jarCon . setUseCaches ( false ) ; jarFile = jarCon . getJarFile ( ) ; jarFileUrl = jarCon . getJarFileURL ( ) . toExternalForm ( ) ; final JarEntry jarEntry = jarCon . getJarEntry ( ) ; rootEntryPath = jarEntry != null ? jarEntry . getName ( ) : "" ; } else { // No JarURLConnection - > need to resort to URL file parsing .
// We ' ll assume URLs of the format " jar : path ! / entry " , with the
// protocol
// being arbitrary as long as following the entry format .
// We ' ll also handle paths with and without leading " file : "
// prefix .
final String urlFile = rootDirResource . getFile ( ) ; final int separatorIndex = urlFile . indexOf ( JAR_URL_SEPARATOR ) ; if ( separatorIndex != - 1 ) { jarFileUrl = urlFile . substring ( 0 , separatorIndex ) ; rootEntryPath = urlFile . substring ( separatorIndex + JAR_URL_SEPARATOR . length ( ) ) ; jarFile = getJarFile ( jarFileUrl ) ; } else { jarFile = new JarFile ( urlFile ) ; jarFileUrl = urlFile ; rootEntryPath = "" ; } newJarFile = true ; } } catch ( final IOException e ) { LOGGER . log ( Level . ERROR , "reslove jar File error" , e ) ; return result ; } try { if ( ! "" . equals ( rootEntryPath ) && ! rootEntryPath . endsWith ( "/" ) ) { // Root entry path must end with slash to allow for proper
// matching .
// The Sun JRE does not return a slash here , but BEA JRockit
// does .
rootEntryPath = rootEntryPath + "/" ; } for ( final Enumeration < JarEntry > entries = jarFile . entries ( ) ; entries . hasMoreElements ( ) ; ) { final JarEntry entry = ( JarEntry ) entries . nextElement ( ) ; final String entryPath = entry . getName ( ) ; String relativePath = null ; if ( entryPath . startsWith ( rootEntryPath ) ) { relativePath = entryPath . substring ( rootEntryPath . length ( ) ) ; if ( AntPathMatcher . match ( subPattern , relativePath ) ) { if ( relativePath . startsWith ( "/" ) ) { relativePath = relativePath . substring ( 1 ) ; } result . add ( new URL ( rootDirResource , relativePath ) ) ; } } } return result ; } catch ( final IOException e ) { LOGGER . log ( Level . ERROR , "parse the JarFile error" , e ) ; } finally { // Close jar file , but only if freshly obtained -
// not from JarURLConnection , which might cache the file reference .
if ( newJarFile ) { try { jarFile . close ( ) ; } catch ( final IOException e ) { LOGGER . log ( Level . WARN , " occur error when closing jarFile" , e ) ; } } } return result ; |
public class authorizationaction { /** * Use this API to fetch all the authorizationaction resources that are configured on netscaler . */
public static authorizationaction [ ] get ( nitro_service service ) throws Exception { } } | authorizationaction obj = new authorizationaction ( ) ; authorizationaction [ ] response = ( authorizationaction [ ] ) obj . get_resources ( service ) ; return response ; |
public class BPMNLayout { /** * Returns a boolean indicating if the given < em > mxCell < / em > should be ignored as a vertex . This returns true if the cell has no connections .
* @ param vertex
* Object that represents the vertex to be tested .
* @ return Returns true if the vertex should be ignored . */
public boolean isVertexIgnored ( Object vertex ) { } } | return super . isVertexIgnored ( vertex ) || graph . isSwimlane ( vertex ) || graph . getModel ( ) . getGeometry ( vertex ) . isRelative ( ) || graph . getConnections ( vertex ) . length == 0 ; |
public class CameraPlaneProjection { /** * Given a point on the plane find the normalized image coordinate
* @ param pointX ( input ) Point on the plane , x - axis
* @ param pointY ( input ) Point on the plane , y - axis
* @ param normalized ( output ) Normalized image coordinate of pixel
* @ return true if the point is in front of the camera . False if not . */
public boolean planeToNormalized ( double pointX , double pointY , Point2D_F64 normalized ) { } } | // convert it into a 3D coordinate and transform into camera reference frame
plain3D . set ( - pointY , 0 , pointX ) ; SePointOps_F64 . transform ( planeToCamera , plain3D , camera3D ) ; // if it ' s behind the camera it can ' t be seen
if ( camera3D . z <= 0 ) return false ; // normalized image coordinates and convert into pixels
normalized . x = camera3D . x / camera3D . z ; normalized . y = camera3D . y / camera3D . z ; return true ; |
public class NetworkService { /** * { @ inheritDoc } */
@ Override public boolean isDisconnected ( ) { } } | NetworkInfo networkInfo = isAvailable ( ) ; return ( networkInfo == null ) ? false : ( networkInfo . getState ( ) . equals ( State . DISCONNECTED ) ) ? true : false ; |
public class Space { /** * Returns a subspace around the given point , with just one more
* neighbor left and right on each dimension .
* @ param centerthe center of the new " universe " ; - )
* @ returnthe new space */
public Space subspace ( Point < Integer > center ) { } } | Space result ; SpaceDimension [ ] dimensions ; int i ; dimensions = new SpaceDimension [ dimensions ( ) ] ; for ( i = 0 ; i < dimensions . length ; i ++ ) dimensions [ i ] = getDimension ( i ) . subdimension ( center . getValue ( i ) - 1 , center . getValue ( i ) + 1 ) ; result = new Space ( dimensions ) ; return result ; |
public class AssociationValue { /** * Sets the specified string attribute to the specified value .
* @ param name name of the attribute
* @ param value value of the attribute
* @ since 1.9.0 */
public void setStringAttribute ( String name , String value ) { } } | ensureAttributes ( ) ; Attribute attribute = new StringAttribute ( value ) ; attribute . setEditable ( isEditable ( name ) ) ; getAllAttributes ( ) . put ( name , attribute ) ; |
public class Job { /** * Checks if this job has completed its execution , either failing or succeeding . If the job does
* not exist this method returns { @ code true } .
* < p > Example of waiting for a job until it reports that it is done .
* < pre > { @ code
* while ( ! job . isDone ( ) ) {
* Thread . sleep ( 1000L ) ;
* } < / pre >
* @ return { @ code true } if this job is in { @ link JobStatus . State # DONE } state or if it does not
* exist , { @ code false } if the state is not { @ link JobStatus . State # DONE }
* @ throws BigQueryException upon failure */
public boolean isDone ( ) { } } | checkNotDryRun ( "isDone" ) ; Job job = bigquery . getJob ( getJobId ( ) , JobOption . fields ( BigQuery . JobField . STATUS ) ) ; return job == null || JobStatus . State . DONE . equals ( job . getStatus ( ) . getState ( ) ) ; |
public class PdfLine { /** * Returns the left indentation of the line taking the alignment of the line into account .
* @ returna value */
float indentLeft ( ) { } } | if ( isRTL ) { switch ( alignment ) { case Element . ALIGN_LEFT : return left + width ; case Element . ALIGN_CENTER : return left + ( width / 2f ) ; default : return left ; } } else if ( this . getSeparatorCount ( ) == 0 ) { switch ( alignment ) { case Element . ALIGN_RIGHT : return left + width ; case Element . ALIGN_CENTER : return left + ( width / 2f ) ; } } return left ; |
public class FastAdapter { /** * Util function which recursively iterates over all items of a ` IExpandable ` parent if and only if it is ` expanded ` and has ` subItems `
* This is usually only used in
* @ param lastParentAdapter the last ` IAdapter ` managing the last ( visible ) parent item ( that might also be a parent of a parent , . . )
* @ param lastParentPosition the global position of the last ( visible ) parent item , holding this sub item ( that might also be a parent of a parent , . . )
* @ param parent the ` IExpandableParent ` to start from
* @ param predicate the predicate to run on every item , to check for a match or do some changes ( e . g . select )
* @ param stopOnMatch defines if we should stop iterating after the first match
* @ param < Item > the type of the ` Item `
* @ return Triple & lt ; Boolean , IItem , Integer & gt ; The first value is true ( it is always not null ) , the second contains the item and the third the position ( if the item is visible ) if we had a match , ( always false and null and null in case of stopOnMatch = = false ) */
@ SuppressWarnings ( "unchecked" ) public static < Item extends IItem > Triple < Boolean , Item , Integer > recursiveSub ( IAdapter < Item > lastParentAdapter , int lastParentPosition , IExpandable parent , AdapterPredicate < Item > predicate , boolean stopOnMatch ) { } } | // in case it ' s expanded it can be selected via the normal way
if ( ! parent . isExpanded ( ) && parent . getSubItems ( ) != null ) { for ( int ii = 0 ; ii < parent . getSubItems ( ) . size ( ) ; ii ++ ) { Item sub = ( Item ) parent . getSubItems ( ) . get ( ii ) ; if ( predicate . apply ( lastParentAdapter , lastParentPosition , sub , - 1 ) && stopOnMatch ) { return new Triple < > ( true , sub , null ) ; } if ( sub instanceof IExpandable ) { Triple < Boolean , Item , Integer > res = FastAdapter . recursiveSub ( lastParentAdapter , lastParentPosition , ( IExpandable ) sub , predicate , stopOnMatch ) ; if ( res . first ) { return res ; } } } } return new Triple < > ( false , null , null ) ; |
public class UdpServer { /** * Run IO loops on a supplied { @ link EventLoopGroup } from the { @ link LoopResources }
* container .
* @ param channelResources a { @ link LoopResources } accepting native runtime
* expectation and returning an eventLoopGroup .
* @ param family a specific { @ link InternetProtocolFamily } to run with
* @ return a new { @ link UdpServer } */
public final UdpServer runOn ( LoopResources channelResources , InternetProtocolFamily family ) { } } | return new UdpServerRunOn ( this , channelResources , false , family ) ; |
public class CmsMessageBundleEditorModel { /** * Unlock all files opened for writing . */
public void unlock ( ) { } } | for ( Locale l : m_lockedBundleFiles . keySet ( ) ) { LockedFile f = m_lockedBundleFiles . get ( l ) ; f . tryUnlock ( ) ; } if ( null != m_descFile ) { m_descFile . tryUnlock ( ) ; } |
public class ExamplesUtil { /** * Retrieves example payloads for body parameter either from examples or from vendor extensions .
* @ param parameter parameter to get the examples for
* @ return examples if found otherwise null */
private static Object getExamplesFromBodyParameter ( Parameter parameter ) { } } | Object examples = ( ( BodyParameter ) parameter ) . getExamples ( ) ; if ( examples == null ) { examples = parameter . getVendorExtensions ( ) . get ( "x-examples" ) ; } return examples ; |
public class SearchIndex { /** * Sets the class name for the { @ link ExcerptProvider } that should be used
* for the rep : excerpt pseudo property in a query .
* @ param className
* the name of a class that implements { @ link ExcerptProvider } . */
@ SuppressWarnings ( "unchecked" ) public void setExcerptProviderClass ( String className ) { } } | try { Class < ? > clazz = ClassLoading . forName ( className , this ) ; if ( ExcerptProvider . class . isAssignableFrom ( clazz ) ) { excerptProviderClass = ( Class < ? extends ExcerptProvider > ) clazz ; } else { log . warn ( "Invalid value for excerptProviderClass, {} does " + "not implement ExcerptProvider interface." , className ) ; } } catch ( ClassNotFoundException e ) { log . warn ( "Invalid value for excerptProviderClass, class {} not found." , className ) ; } |
public class CDKAtomTypeMatcher { /** * { @ inheritDoc } */
@ Override public IAtomType findMatchingAtomType ( IAtomContainer atomContainer , IAtom atom ) throws CDKException { } } | return findMatchingAtomType ( atomContainer , atom , null , null ) ; |
public class Stream { /** * Returns { @ code Stream } with distinct elements ( as determined by { @ code hashCode } and { @ code equals } methods ) .
* < p > This is a stateful intermediate operation .
* < p > Example :
* < pre >
* stream : [ 1 , 4 , 2 , 3 , 3 , 4 , 1]
* result : [ 1 , 4 , 2 , 3]
* < / pre >
* @ return the new stream */
@ NotNull public Stream < T > distinct ( ) { } } | return new Stream < T > ( params , new ObjDistinct < T > ( iterator ) ) ; |
public class Task { /** * / * package */
final RESULT executeInner ( ) { } } | try { mResult = execute ( ) ; } catch ( Throwable t ) { Log . e ( "Task" , getClass ( ) . getName ( ) + " crashed" , t ) ; } mCountDownLatch . countDown ( ) ; return mResult ; |
public class ZipUtil { /** * Copies an existing ZIP file and transforms the given entries in it .
* @ param is
* a ZIP input stream .
* @ param entries
* ZIP entry transformers .
* @ param os
* a ZIP output stream .
* @ return < code > true < / code > if at least one entry was replaced . */
public static boolean transformEntries ( InputStream is , ZipEntryTransformerEntry [ ] entries , OutputStream os ) { } } | if ( log . isDebugEnabled ( ) ) log . debug ( "Copying '" + is + "' to '" + os + "' and transforming entries " + Arrays . asList ( entries ) + "." ) ; try { ZipOutputStream out = new ZipOutputStream ( os ) ; TransformerZipEntryCallback action = new TransformerZipEntryCallback ( Arrays . asList ( entries ) , out ) ; iterate ( is , action ) ; // Finishes writing the contents of the ZIP output stream without closing
// the underlying stream .
out . finish ( ) ; return action . found ( ) ; } catch ( IOException e ) { throw ZipExceptionUtil . rethrow ( e ) ; } |
public class ColumnOrSuperColumn { /** * Returns true if field corresponding to fieldID is set ( has been assigned a value ) and false otherwise */
public boolean isSet ( _Fields field ) { } } | if ( field == null ) { throw new IllegalArgumentException ( ) ; } switch ( field ) { case COLUMN : return isSetColumn ( ) ; case SUPER_COLUMN : return isSetSuper_column ( ) ; case COUNTER_COLUMN : return isSetCounter_column ( ) ; case COUNTER_SUPER_COLUMN : return isSetCounter_super_column ( ) ; } throw new IllegalStateException ( ) ; |
public class Configuration { /** * Returns the value associated with the given config option as a string .
* @ param configOption The configuration option
* @ return the ( default ) value associated with the given config option */
@ PublicEvolving public String getValue ( ConfigOption < ? > configOption ) { } } | Object o = getValueOrDefaultFromOption ( configOption ) ; return o == null ? null : o . toString ( ) ; |
public class Types { /** * Returns type information for a Java { @ link java . util . List } . A list must not be null . Null values
* in elements are not supported .
* < p > By default , lists are untyped and treated as a generic type in Flink ; therefore , it is useful
* to pass type information whenever a list is used .
* < p > < strong > Note : < / strong > Flink does not preserve the concrete { @ link List } type . It converts a list into { @ link ArrayList } when
* copying or deserializing .
* @ param elementType type information for the list ' s elements */
public static < E > TypeInformation < List < E > > LIST ( TypeInformation < E > elementType ) { } } | return new ListTypeInfo < > ( elementType ) ; |
public class CmsUserTable { /** * Fills the container item for a user . < p >
* @ param item the item
* @ param user the user */
protected void fillItem ( Item item , CmsUser user ) { } } | item . getItemProperty ( TableProperty . Name ) . setValue ( user . getSimpleName ( ) ) ; item . getItemProperty ( TableProperty . FullName ) . setValue ( user . getFullName ( ) ) ; item . getItemProperty ( TableProperty . SystemName ) . setValue ( user . getName ( ) ) ; boolean disabled = ! user . isEnabled ( ) ; item . getItemProperty ( TableProperty . DISABLED ) . setValue ( new Boolean ( disabled ) ) ; boolean newUser = user . getLastlogin ( ) == 0L ; item . getItemProperty ( TableProperty . NEWUSER ) . setValue ( new Boolean ( newUser ) ) ; try { item . getItemProperty ( TableProperty . OU ) . setValue ( OpenCms . getOrgUnitManager ( ) . readOrganizationalUnit ( m_cms , user . getOuFqn ( ) ) . getDisplayName ( A_CmsUI . get ( ) . getLocale ( ) ) ) ; } catch ( CmsException e ) { LOG . error ( "Can't read OU" , e ) ; } item . getItemProperty ( TableProperty . LastLogin ) . setValue ( new Long ( user . getLastlogin ( ) ) ) ; item . getItemProperty ( TableProperty . Created ) . setValue ( new Long ( user . getDateCreated ( ) ) ) ; item . getItemProperty ( TableProperty . INDIRECT ) . setValue ( new Boolean ( m_indirects . contains ( user ) ) ) ; item . getItemProperty ( TableProperty . FROMOTHEROU ) . setValue ( new Boolean ( ! user . getOuFqn ( ) . equals ( m_ou ) ) ) ; item . getItemProperty ( TableProperty . STATUS ) . setValue ( getStatusInt ( disabled , newUser ) ) ; |
public class Intersectionf { /** * Determine the signed distance of the given point < code > ( pointX , pointY , pointZ ) < / code > to the plane specified via its general plane equation
* < i > a * x + b * y + c * z + d = 0 < / i > .
* @ param pointX
* the x coordinate of the point
* @ param pointY
* the y coordinate of the point
* @ param pointZ
* the z coordinate of the point
* @ param a
* the x factor in the plane equation
* @ param b
* the y factor in the plane equation
* @ param c
* the z factor in the plane equation
* @ param d
* the constant in the plane equation
* @ return the distance between the point and the plane */
public static float distancePointPlane ( float pointX , float pointY , float pointZ , float a , float b , float c , float d ) { } } | float denom = ( float ) Math . sqrt ( a * a + b * b + c * c ) ; return ( a * pointX + b * pointY + c * pointZ + d ) / denom ; |
public class IOUtils { /** * Get the contents of an InputStream as a byte [ ] .
* This method buffers the input internally , so there is no need to use a
* BufferedInputStream .
* @ param stream
* the InputStream to read from
* @ return the requested byte array
* @ throws NullPointerException
* if the input is null
* @ throws IOException
* if an I / O error occurs */
public static byte [ ] toByteArray ( InputStream stream ) throws IOException { } } | return org . apache . commons . io . IOUtils . toByteArray ( stream ) ; |
public class Matrix3d { /** * Set this matrix to a rotation matrix which rotates the given radians about a given axis .
* When used with a right - handed coordinate system , the produced rotation will rotate a vector
* counter - clockwise around the rotation axis , when viewing along the negative axis direction towards the origin .
* When used with a left - handed coordinate system , the rotation is clockwise .
* The resulting matrix can be multiplied against another transformation
* matrix to obtain an additional rotation .
* In order to post - multiply a rotation transformation directly to a
* matrix , use { @ link # rotate ( double , Vector3dc ) rotate ( ) } instead .
* @ see # rotate ( double , Vector3dc )
* @ param angle
* the angle in radians
* @ param axis
* the axis to rotate about ( needs to be { @ link Vector3d # normalize ( ) normalized } )
* @ return this */
public Matrix3d rotation ( double angle , Vector3dc axis ) { } } | return rotation ( angle , axis . x ( ) , axis . y ( ) , axis . z ( ) ) ; |
public class InvocationSender { /** * Requests that the specified invocation notification be packaged up and sent to the supplied
* target client . */
public static void sendNotification ( ClientObject target , String receiverCode , int methodId , Object [ ] args ) { } } | sendNotification ( target , receiverCode , methodId , args , Transport . DEFAULT ) ; |
public class CreateSymbols { /** * Create sig files for ct . sym reading the classes description from the directory that contains
* { @ code ctDescriptionFile } , using the file as a recipe to create the sigfiles . */
@ SuppressWarnings ( "unchecked" ) public void createSymbols ( String ctDescriptionFile , String ctSymLocation , CtSymKind ctSymKind ) throws IOException { } } | ClassList classes = load ( Paths . get ( ctDescriptionFile ) ) ; splitHeaders ( classes ) ; for ( ClassDescription classDescription : classes ) { for ( ClassHeaderDescription header : classDescription . header ) { switch ( ctSymKind ) { case JOINED_VERSIONS : Set < String > jointVersions = new HashSet < > ( ) ; jointVersions . add ( header . versions ) ; limitJointVersion ( jointVersions , classDescription . fields ) ; limitJointVersion ( jointVersions , classDescription . methods ) ; writeClassesForVersions ( ctSymLocation , classDescription , header , jointVersions ) ; break ; case SEPARATE : Set < String > versions = new HashSet < > ( ) ; for ( char v : header . versions . toCharArray ( ) ) { versions . add ( "" + v ) ; } writeClassesForVersions ( ctSymLocation , classDescription , header , versions ) ; break ; } } } |
public class CompilerOptions { /** * Deserializes compiler options from a stream . */
@ GwtIncompatible ( "ObjectInputStream" ) public static CompilerOptions deserialize ( InputStream objectInputStream ) throws IOException , ClassNotFoundException { } } | return ( CompilerOptions ) new java . io . ObjectInputStream ( objectInputStream ) . readObject ( ) ; |
public class State { /** * { @ inheritDoc } */
@ Override public < B > State < S , B > discardL ( Applicative < B , State < S , ? > > appB ) { } } | return Monad . super . discardL ( appB ) . coerce ( ) ; |
public class IFileInputStream { /** * Close the input stream . Note that we need to read to the end of the
* stream to validate the checksum . */
@ Override public void close ( ) throws IOException { } } | if ( currentOffset < dataLength ) { byte [ ] t = new byte [ Math . min ( ( int ) ( Integer . MAX_VALUE & ( dataLength - currentOffset ) ) , 32 * 1024 ) ] ; while ( currentOffset < dataLength ) { int n = read ( t , 0 , t . length ) ; if ( 0 == n ) { throw new EOFException ( "Could not validate checksum" ) ; } } } in . close ( ) ; |
public class AbstractParser { /** * Check to make sure the JSONObject has the specified key and if so return
* the value as a string . If no key is found " " is returned .
* @ param index index of the field to fetch from the json array
* @ param jsonArray array from which to fetch the value
* @ return string value corresponding to the index or null if index not found */
protected String getString ( final int index , final JSONArray jsonArray ) { } } | String value = null ; if ( jsonArray != null && index > - 1 && jsonArray . length ( ) > index ) { try { Object o = jsonArray . get ( index ) ; if ( o != null ) { value = o . toString ( ) ; } } catch ( JSONException e ) { LOGGER . error ( "Could not get String from JSONObject for index: " + index , e ) ; } } return value ; |
public class JoynrRuntimeImpl { /** * Registers a provider in the joynr framework
* @ param domain
* The domain the provider should be registered for . Has to be identical at the client to be able to find
* the provider .
* @ param provider
* Instance of the provider implementation ( has to extend a generated . . . AbstractProvider ) .
* @ param providerQos
* the provider ' s quality of service settings
* @ return Returns a Future which can be used to check the registration status . */
@ Override public Future < Void > registerProvider ( String domain , Object provider , ProviderQos providerQos ) { } } | final boolean awaitGlobalRegistration = false ; return registerProvider ( domain , provider , providerQos , awaitGlobalRegistration ) ; |
public class DatabaseAccountsInner { /** * Lists the read - only access keys for the specified Azure Cosmos DB database account .
* @ param resourceGroupName Name of an Azure resource group .
* @ param accountName Cosmos DB database account name .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < DatabaseAccountListReadOnlyKeysResultInner > getReadOnlyKeysAsync ( String resourceGroupName , String accountName , final ServiceCallback < DatabaseAccountListReadOnlyKeysResultInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( getReadOnlyKeysWithServiceResponseAsync ( resourceGroupName , accountName ) , serviceCallback ) ; |
public class Matrix4d { /** * Set this matrix to a rotation transformation about the Y axis .
* When used with a right - handed coordinate system , the produced rotation will rotate a vector
* counter - clockwise around the rotation axis , when viewing along the negative axis direction towards the origin .
* When used with a left - handed coordinate system , the rotation is clockwise .
* Reference : < a href = " http : / / en . wikipedia . org / wiki / Rotation _ matrix # Basic _ rotations " > http : / / en . wikipedia . org < / a >
* @ param ang
* the angle in radians
* @ return this */
public Matrix4d rotationY ( double ang ) { } } | double sin , cos ; sin = Math . sin ( ang ) ; cos = Math . cosFromSin ( sin , ang ) ; if ( ( properties & PROPERTY_IDENTITY ) == 0 ) this . _identity ( ) ; m00 = cos ; m02 = - sin ; m20 = sin ; m22 = cos ; properties = PROPERTY_AFFINE | PROPERTY_ORTHONORMAL ; return this ; |
public class Waiter { /** * Asserts that the { @ code expected } values equals the { @ code actual } value
* @ throws AssertionError when the assertion fails */
public void assertEquals ( Object expected , Object actual ) { } } | if ( expected == null && actual == null ) return ; if ( expected != null && expected . equals ( actual ) ) return ; fail ( format ( expected , actual ) ) ; |
public class Messager { /** * Print warning message , increment warning count .
* @ param key selects message from resource */
public void warning ( SourcePosition pos , String key , Object ... args ) { } } | printWarning ( pos , getText ( key , args ) ) ; |
public class CmsPreferences { /** * Builds the html for the workplace start site select box . < p >
* @ param htmlAttributes optional html attributes for the & lgt ; select & gt ; tag
* @ return the html for the workplace start site select box */
public String buildSelectUpload ( String htmlAttributes ) { } } | List < String > options = new ArrayList < String > ( ) ; List < String > values = new ArrayList < String > ( ) ; int selectedIndex = 0 ; int pos = 0 ; UploadVariant currentVariant = getParamTabWpUploadVariant ( ) ; for ( UploadVariant variant : UploadVariant . values ( ) ) { values . add ( variant . toString ( ) ) ; options . add ( getUploadVariantMessage ( variant ) ) ; if ( variant . equals ( currentVariant ) ) { selectedIndex = pos ; } pos ++ ; } return buildSelect ( htmlAttributes , options , values , selectedIndex ) ; |
public class SelfCalls { /** * Scan a method for self call sites .
* @ param node
* the CallGraphNode for the method to be scanned */
private void scan ( CallGraphNode node ) throws CFGBuilderException { } } | Method method = node . getMethod ( ) ; CFG cfg = classContext . getCFG ( method ) ; if ( method . isSynchronized ( ) ) { hasSynchronization = true ; } Iterator < BasicBlock > i = cfg . blockIterator ( ) ; while ( i . hasNext ( ) ) { BasicBlock block = i . next ( ) ; Iterator < InstructionHandle > j = block . instructionIterator ( ) ; while ( j . hasNext ( ) ) { InstructionHandle handle = j . next ( ) ; Instruction ins = handle . getInstruction ( ) ; if ( ins instanceof InvokeInstruction ) { InvokeInstruction inv = ( InvokeInstruction ) ins ; Method called = isSelfCall ( inv ) ; if ( called != null ) { // Add edge to call graph
CallSite callSite = new CallSite ( method , block , handle ) ; callGraph . createEdge ( node , callGraph . getNodeForMethod ( called ) , callSite ) ; // Add to called method set
calledMethodSet . add ( called ) ; } } else if ( ins instanceof MONITORENTER || ins instanceof MONITOREXIT ) { hasSynchronization = true ; } } } |
public class CharSequences { /** * Are we on a character boundary ?
* @ deprecated This API is ICU internal only .
* @ hide draft / provisional / internal are hidden on Android */
@ Deprecated public static boolean onCharacterBoundary ( CharSequence s , int i ) { } } | return i <= 0 || i >= s . length ( ) || ! Character . isHighSurrogate ( s . charAt ( i - 1 ) ) || ! Character . isLowSurrogate ( s . charAt ( i ) ) ; |
public class StopTrainingEntityRecognizerRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( StopTrainingEntityRecognizerRequest stopTrainingEntityRecognizerRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( stopTrainingEntityRecognizerRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( stopTrainingEntityRecognizerRequest . getEntityRecognizerArn ( ) , ENTITYRECOGNIZERARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class DeleteConfigurationSetRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteConfigurationSetRequest deleteConfigurationSetRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( deleteConfigurationSetRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteConfigurationSetRequest . getConfigurationSetName ( ) , CONFIGURATIONSETNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class HttpConnection { /** * Set the String of request body data to be sent to the server .
* @ param input String of request body data to be sent to the server
* @ return an { @ link HttpConnection } for method chaining */
public HttpConnection setRequestBody ( final String input ) { } } | try { final byte [ ] inputBytes = input . getBytes ( "UTF-8" ) ; return setRequestBody ( inputBytes ) ; } catch ( UnsupportedEncodingException e ) { // This should never happen as every implementation of the java platform is required
// to support UTF - 8.
throw new RuntimeException ( e ) ; } |
public class XTypeLiteralImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } } | switch ( featureID ) { case XbasePackage . XTYPE_LITERAL__TYPE : return type != null ; case XbasePackage . XTYPE_LITERAL__ARRAY_DIMENSIONS : return arrayDimensions != null && ! arrayDimensions . isEmpty ( ) ; } return super . eIsSet ( featureID ) ; |
public class Messenger { /** * Leave group
* @ param gid group ' s id
* @ return Promise of Void */
@ NotNull @ ObjectiveCName ( "leaveGroupWithGid:" ) public Promise < Void > leaveGroup ( final int gid ) { } } | return modules . getGroupsModule ( ) . leaveGroup ( gid ) ; |
public class DescribeInstanceHealthResult { /** * Information about the health of the instances .
* @ return Information about the health of the instances . */
public java . util . List < InstanceState > getInstanceStates ( ) { } } | if ( instanceStates == null ) { instanceStates = new com . amazonaws . internal . SdkInternalList < InstanceState > ( ) ; } return instanceStates ; |
public class PlannerWriter { /** * This method writes calendar data to a Planner file .
* @ throws JAXBException on xml creation errors */
private void writeCalendars ( ) throws JAXBException { } } | // Create the new Planner calendar list
Calendars calendars = m_factory . createCalendars ( ) ; m_plannerProject . setCalendars ( calendars ) ; writeDayTypes ( calendars ) ; List < net . sf . mpxj . planner . schema . Calendar > calendar = calendars . getCalendar ( ) ; // Process each calendar in turn
for ( ProjectCalendar mpxjCalendar : m_projectFile . getCalendars ( ) ) { net . sf . mpxj . planner . schema . Calendar plannerCalendar = m_factory . createCalendar ( ) ; calendar . add ( plannerCalendar ) ; writeCalendar ( mpxjCalendar , plannerCalendar ) ; } |
public class AbstractRadixAddressableHeap { /** * { @ inheritDoc }
* The cost of this operation is amortized O ( logC ) assuming the heap
* contains keys in the range [ 0 , C ] or equivalently [ a , a + C ] . */
@ Override @ LogarithmicTime ( amortized = true ) public Handle < K , V > deleteMin ( ) { } } | if ( size == 0 ) { throw new NoSuchElementException ( ) ; } // updated last deleted key
Node result = currentMin ; lastDeletedKey = currentMin . key ; if ( currentMin . bucket == 0 ) { Node head = buckets [ currentMin . bucket ] ; if ( currentMin . next != null ) { currentMin . next . prev = currentMin . prev ; } if ( currentMin . prev != null ) { currentMin . prev . next = currentMin . next ; } if ( head == currentMin ) { currentMin . prev = null ; buckets [ currentMin . bucket ] = currentMin . next ; } currentMin . next = null ; currentMin . prev = null ; currentMin . bucket = EMPTY ; // update minimum cache
currentMin = buckets [ 0 ] ; if ( -- size > 0 ) { findAndCacheMinimum ( 0 ) ; } } else { // redistribute all elements based on new lastDeletedKey
Node newMin = null ; int currentMinBucket = currentMin . bucket ; Node val = buckets [ currentMinBucket ] ; while ( val != null ) { // remove first from list
buckets [ currentMinBucket ] = val . next ; if ( buckets [ currentMinBucket ] != null ) { buckets [ currentMinBucket ] . prev = null ; } val . next = null ; val . prev = null ; val . bucket = EMPTY ; // redistribute
if ( val != currentMin ) { int b = computeBucket ( val . key , lastDeletedKey ) ; assert b < currentMinBucket ; val . next = buckets [ b ] ; if ( buckets [ b ] != null ) { buckets [ b ] . prev = val ; } buckets [ b ] = val ; val . bucket = b ; if ( newMin == null || compare ( val . key , newMin . key ) < 0 ) { newMin = val ; } } val = buckets [ currentMinBucket ] ; } // update minimum cache
currentMin = newMin ; if ( -- size > 0 ) { findAndCacheMinimum ( currentMinBucket + 1 ) ; } } return result ; |
public class SystemPropertyContext { /** * Resolves the base directory . If the system property is set that value will be used . Otherwise the path is
* resolved from the home directory .
* @ param name the system property name
* @ param dirName the directory name relative to the base directory
* @ return the resolved base directory */
Path resolveBaseDir ( final String name , final String dirName ) { } } | final String currentDir = SecurityActions . getPropertyPrivileged ( name ) ; if ( currentDir == null ) { return jbossHomeDir . resolve ( dirName ) ; } return Paths . get ( currentDir ) ; |
public class AbstractExtraLanguageGenerator { /** * Generate the given object .
* @ param object the object .
* @ param appendable the target for the generated content .
* @ param context the context . */
protected void generate ( EObject object , ExtraLanguageAppendable appendable , IExtraLanguageGeneratorContext context ) { } } | this . generateDispatcher2 . invoke ( object , appendable , context ) ; |
public class AtomCountDescriptor { /** * this could be useful for other descriptors like polar surface area . . . */
@ Override public DescriptorValue calculate ( IAtomContainer container ) { } } | int atomCount = 0 ; if ( container == null ) { return new DescriptorValue ( getSpecification ( ) , getParameterNames ( ) , getParameters ( ) , new IntegerResult ( ( int ) Double . NaN ) , getDescriptorNames ( ) , new CDKException ( "The supplied AtomContainer was NULL" ) ) ; } if ( container . getAtomCount ( ) == 0 ) { return new DescriptorValue ( getSpecification ( ) , getParameterNames ( ) , getParameters ( ) , new IntegerResult ( ( int ) Double . NaN ) , getDescriptorNames ( ) , new CDKException ( "The supplied AtomContainer did not have any atoms" ) ) ; } if ( elementName . equals ( "*" ) ) { for ( int i = 0 ; i < container . getAtomCount ( ) ; i ++ ) { // we assume that UNSET is equivalent to 0 implicit H ' s
Integer hcount = container . getAtom ( i ) . getImplicitHydrogenCount ( ) ; if ( hcount != CDKConstants . UNSET ) atomCount += hcount ; } atomCount += container . getAtomCount ( ) ; } else if ( elementName . equals ( "H" ) ) { for ( int i = 0 ; i < container . getAtomCount ( ) ; i ++ ) { if ( container . getAtom ( i ) . getSymbol ( ) . equals ( elementName ) ) { atomCount += 1 ; } else { // we assume that UNSET is equivalent to 0 implicit H ' s
Integer hcount = container . getAtom ( i ) . getImplicitHydrogenCount ( ) ; if ( hcount != CDKConstants . UNSET ) atomCount += hcount ; } } } else { for ( int i = 0 ; i < container . getAtomCount ( ) ; i ++ ) { if ( container . getAtom ( i ) . getSymbol ( ) . equals ( elementName ) ) { atomCount += 1 ; } } } return new DescriptorValue ( getSpecification ( ) , getParameterNames ( ) , getParameters ( ) , new IntegerResult ( atomCount ) , getDescriptorNames ( ) ) ; |
public class CassandraArchiveRepository { /** * Get all of the rows in in the table . Attempts to reduce the load on cassandra by splitting up the query into smaller sub - queries
* @ param columns which columns to select
* @ return result rows */
protected Iterable < Row < String , String > > getRows ( EnumSet < ? > columns ) throws Exception { } } | int shardCount = config . getShardCount ( ) ; List < Future < Rows < String , String > > > futures = new ArrayList < Future < Rows < String , String > > > ( ) ; for ( int i = 0 ; i < shardCount ; i ++ ) { futures . add ( cassandra . selectAsync ( generateSelectByShardCql ( columns , i ) ) ) ; } List < Row < String , String > > rows = new LinkedList < Row < String , String > > ( ) ; for ( Future < Rows < String , String > > f : futures ) { Rows < String , String > shardRows = f . get ( ) ; Iterables . addAll ( rows , shardRows ) ; } return rows ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.