signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class MetadataReceivedAlert { /** * This method construct the torrent lazily . If the metadata
* is very big it can be a problem in memory constrained devices .
* Internally it uses a lock synchronization to make it thread - safe .
* @ param extra this controls if extra data , like trackers and web seeds
* are included
* @ return the torrent info bencoded data */
public byte [ ] torrentData ( boolean extra ) { } } | if ( invalid ) { return null ; } if ( data != null ) { return data ; } sync . lock ( ) ; try { if ( invalid ) { return null ; } if ( data != null ) { return data ; } torrent_handle th = alert . getHandle ( ) ; if ( th == null || ! th . is_valid ( ) ) { invalid = true ; return null ; } torrent_info ti = th . torrent_file_ptr ( ) ; if ( ti == null || ! ti . is_valid ( ) ) { invalid = true ; return null ; } size = ti . metadata_size ( ) ; data = createTorrent ( th , ti , extra ) ; } catch ( Throwable e ) { invalid = true ; } finally { sync . unlock ( ) ; } return data ; |
public class Relation { /** * Returns the value at the given row and column indexes
* @ param r the row index , 0 based
* @ param c the column index , 0 based */
public Object get ( int r , int c ) { } } | Column < ? > column = column ( c ) ; return column . get ( r ) ; |
public class PdfLayerMembership { /** * Adds a new member to the layer .
* @ param layer the new member to the layer */
public void addMember ( PdfLayer layer ) { } } | if ( ! layers . contains ( layer ) ) { members . add ( layer . getRef ( ) ) ; layers . add ( layer ) ; } |
public class BroxWarpingSpacial { /** * Compute Psi - data using equation 6 and approximation in equation 5 */
protected void computePsiDataPsiGradient ( GrayF32 image1 , GrayF32 image2 , GrayF32 deriv1x , GrayF32 deriv1y , GrayF32 deriv2x , GrayF32 deriv2y , GrayF32 deriv2xx , GrayF32 deriv2yy , GrayF32 deriv2xy , GrayF32 du , GrayF32 dv , GrayF32 psiData , GrayF32 psiGradient ) { } } | int N = image1 . width * image1 . height ; for ( int i = 0 ; i < N ; i ++ ) { float du_ = du . data [ i ] ; float dv_ = dv . data [ i ] ; // compute Psi - data
float taylor2 = image2 . data [ i ] + deriv2x . data [ i ] * du_ + deriv2y . data [ i ] * dv_ ; float v = taylor2 - image1 . data [ i ] ; psiData . data [ i ] = ( float ) ( 1.0 / ( 2.0 * Math . sqrt ( v * v + EPSILON * EPSILON ) ) ) ; // compute Psi - gradient
float dIx = deriv2x . data [ i ] + deriv2xx . data [ i ] * du_ + deriv2xy . data [ i ] * dv_ - deriv1x . data [ i ] ; float dIy = deriv2y . data [ i ] + deriv2xy . data [ i ] * du_ + deriv2yy . data [ i ] * dv_ - deriv1y . data [ i ] ; float dI2 = dIx * dIx + dIy * dIy ; psiGradient . data [ i ] = ( float ) ( 1.0 / ( 2.0 * Math . sqrt ( dI2 + EPSILON * EPSILON ) ) ) ; } |
public class HttpService { /** * - - - - - private methods - - - - - */
private List < ContextHandler > collectResourceHandlers ( ) throws ClassNotFoundException , InstantiationException , IllegalAccessException { } } | final List < ContextHandler > resourceHandlers = new LinkedList < > ( ) ; final String resourceHandlerList = Settings . ResourceHandlers . getValue ( ) ; if ( resourceHandlerList != null ) { for ( String resourceHandlerName : resourceHandlerList . split ( "[ \\t]+" ) ) { if ( StringUtils . isNotBlank ( resourceHandlerName ) ) { final String contextPath = Settings . getOrCreateStringSetting ( resourceHandlerName , "contextPath" ) . getValue ( ) ; if ( contextPath != null ) { final String resourceBase = Settings . getOrCreateStringSetting ( resourceHandlerName , "resourceBase" ) . getValue ( ) ; if ( resourceBase != null ) { final ResourceHandler resourceHandler = new ResourceHandler ( ) ; resourceHandler . setDirectoriesListed ( Settings . getBooleanSetting ( resourceHandlerName , "directoriesListed" ) . getValue ( ) ) ; final String welcomeFiles = Settings . getOrCreateStringSetting ( resourceHandlerName , "welcomeFiles" ) . getValue ( ) ; if ( welcomeFiles != null ) { resourceHandler . setWelcomeFiles ( StringUtils . split ( welcomeFiles ) ) ; } resourceHandler . setResourceBase ( resourceBase ) ; resourceHandler . setCacheControl ( "max-age=0" ) ; // resourceHandler . setEtags ( true ) ;
final ContextHandler staticResourceHandler = new ContextHandler ( ) ; staticResourceHandler . setContextPath ( contextPath ) ; staticResourceHandler . setHandler ( resourceHandler ) ; resourceHandlers . add ( staticResourceHandler ) ; } else { logger . warn ( "Unable to register resource handler {}, missing {}.resourceBase" , resourceHandlerName , resourceHandlerName ) ; } } else { logger . warn ( "Unable to register resource handler {}, missing {}.contextPath" , resourceHandlerName , resourceHandlerName ) ; } } } } else { logger . warn ( "No resource handlers configured for HttpService." ) ; } return resourceHandlers ; |
public class Database { public void delete_class_property ( String name , String [ ] propnames ) throws DevFailed { } } | databaseDAO . delete_class_property ( this , name , propnames ) ; |
public class PnPInfinitesimalPlanePoseEstimation { /** * R = R _ v * [ R22 , sgn * c ; sgn * b ^ T , a ] */
static void constructR ( DMatrixRMaj R , DMatrixRMaj R_v , DMatrix2x2 R22 , double b1 , double b2 , Vector3D_F64 ca , double sign , DMatrixRMaj tmp ) { } } | tmp . data [ 0 ] = R22 . a11 ; tmp . data [ 1 ] = R22 . a12 ; tmp . data [ 2 ] = sign * ca . x ; tmp . data [ 3 ] = R22 . a21 ; tmp . data [ 4 ] = R22 . a22 ; tmp . data [ 5 ] = sign * ca . y ; tmp . data [ 6 ] = sign * b1 ; tmp . data [ 7 ] = sign * b2 ; tmp . data [ 8 ] = ca . z ; CommonOps_DDRM . mult ( R_v , tmp , R ) ; |
public class MockingApi { /** * Creates a stub with the specified type . If enclosed in a variable assignment , the variable name will be
* used as the stub ' s name .
* Example :
* < pre >
* def person = Stub ( Person ) / / type is Person . class , name is " person "
* < / pre >
* @ param type the interface or class type of the stub
* @ param < T > the interface or class type of the stub
* @ return a stub with the specified type */
@ Override @ Beta public < T > T Stub ( Class < T > type ) { } } | invalidMockCreation ( ) ; return null ; |
public class AbstractCobolSourceCleaner { /** * Examine characters before an assumed level . If these characters are not
* terminated by a COBOL delimiter then the level is actually an argument to
* a previous keyword , not an actual level .
* @ param fragment a fragment of code preceding an assumed level
* @ return true if the assumed level is an argument */
protected boolean isArgument ( final String fragment ) { } } | String s = fragment . trim ( ) ; if ( s . length ( ) > 0 ) { return s . charAt ( s . length ( ) - 1 ) != COBOL_DELIMITER ; } return false ; |
public class TransferServlet { /** * Returns a { @ link AbstractUploadRequestProcessor } for { @ link DefaultManagedArtifact }
* @ param transferContext
* Instance of { @ link TransferContext }
* @ return Instance of { @ link UploadRequestProcessor } . */
private UploadRequestProcessor getUploadRequestProcessor ( TransferContext transferContext ) { } } | LOGGER . entering ( transferContext ) ; String contentType = transferContext . getHttpServletRequest ( ) . getContentType ( ) != null ? transferContext . getHttpServletRequest ( ) . getContentType ( ) . toLowerCase ( ) : "unknown" ; if ( contentType . contains ( AbstractUploadRequestProcessor . MULTIPART_CONTENT_TYPE ) ) { // Return a Multipart request processor
UploadRequestProcessor uploadRequestProcessor = new MultipartUploadRequestProcessor ( transferContext ) ; LOGGER . exiting ( uploadRequestProcessor ) ; return uploadRequestProcessor ; } if ( contentType . contains ( AbstractUploadRequestProcessor . APPLICATION_URLENCODED_CONTENT_TYPE ) ) { // Return normal Urlencoded request processor
UploadRequestProcessor uploadRequestProcessor = new ApplicationUploadRequestProcessor ( transferContext ) ; LOGGER . exiting ( uploadRequestProcessor ) ; return uploadRequestProcessor ; } throw new ArtifactUploadException ( "Content-Type should be either: " + AbstractUploadRequestProcessor . MULTIPART_CONTENT_TYPE + " or: " + AbstractUploadRequestProcessor . APPLICATION_URLENCODED_CONTENT_TYPE + " for file uploads" ) ; |
public class ParameterTool { /** * Returns the Long value for the given key .
* The method fails if the key does not exist . */
public long getLong ( String key ) { } } | addToDefaults ( key , null ) ; String value = getRequired ( key ) ; return Long . parseLong ( value ) ; |
public class DistanceCorrelationDependenceMeasure { /** * Compute the double - centered delta matrix .
* @ param adapter Data adapter
* @ param data Input data
* @ return Double - centered delta matrix . */
protected static < A > double [ ] computeDistances ( NumberArrayAdapter < ? , A > adapter , A data ) { } } | final int size = adapter . size ( data ) ; double [ ] dMatrix = new double [ ( size * ( size + 1 ) ) >> 1 ] ; for ( int i = 0 , c = 0 ; i < size ; i ++ ) { for ( int j = 0 ; j < i ; j ++ ) { double dx = adapter . getDouble ( data , i ) - adapter . getDouble ( data , j ) ; dMatrix [ c ++ ] = ( dx < 0 ) ? - dx : dx ; // Absolute difference .
} c ++ ; // Diagonal entry : zero
} doubleCenterMatrix ( dMatrix , size ) ; return dMatrix ; |
public class HashCodeCalculator { /** * Object hash code generation .
* @ param nPrevHashCode
* The previous hash code used as the basis for calculation
* @ param x
* Object to add . May be < code > null < / code > .
* @ return The updated hash code */
public static int append ( final int nPrevHashCode , @ Nullable final Object x ) { } } | return append ( nPrevHashCode , HashCodeImplementationRegistry . getHashCode ( x ) ) ; |
public class CLI { /** * Set a Properties object with the CLI parameters for evaluation .
* @ param model
* the model parameter
* @ param testset
* the reference set
* @ return the properties object */
private Properties setParsevalProperties ( final String language , final String model , final String testset ) { } } | final Properties parsevalProperties = new Properties ( ) ; parsevalProperties . setProperty ( "language" , language ) ; parsevalProperties . setProperty ( "model" , model ) ; parsevalProperties . setProperty ( "testset" , testset ) ; return parsevalProperties ; |
public class RedisStateMachine { /** * Safely sets { @ link CommandOutput # set ( ByteBuffer ) } . Completes a command exceptionally in case an exception occurs .
* @ param output
* @ param bytes
* @ param command */
protected void safeSet ( CommandOutput < ? , ? , ? > output , ByteBuffer bytes , RedisCommand < ? , ? , ? > command ) { } } | try { output . set ( bytes ) ; } catch ( Exception e ) { command . completeExceptionally ( e ) ; } |
public class Post { /** * Set the date as a date
* @ param date the date to set */
public void setDate ( Date date ) { } } | DateFormat df = new SimpleDateFormat ( "yyyy/MM/dd HH:mm:ss" ) ; df . setTimeZone ( TimeZone . getTimeZone ( "GMT" ) ) ; setDate ( df . format ( date ) ) ; |
public class JSTypeRegistry { /** * Restores the derived state .
* Note : This should be only used when deserializing the compiler state and needs to be done at
* the end , after deserializing CompilerState . */
@ SuppressWarnings ( "unchecked" ) @ GwtIncompatible ( "ObjectInputStream" ) public void restoreContents ( ObjectInputStream in ) throws IOException , ClassNotFoundException { } } | eachRefTypeIndexedByProperty = ( Map < String , Map < String , ObjectType > > ) in . readObject ( ) ; interfaceToImplementors = ( Multimap < String , FunctionType > ) in . readObject ( ) ; typesIndexedByProperty = ( Multimap < String , JSType > ) in . readObject ( ) ; |
public class ExceptionUtils { /** * < p > Creates a compact stack trace for the root cause of the supplied
* < code > Throwable < / code > . < / p >
* < p > The output of this method is consistent across JDK versions .
* It consists of the root exception followed by each of its wrapping
* exceptions separated by ' [ wrapped ] ' . Note that this is the opposite
* order to the JDK1.4 display . < / p >
* @ param throwable the throwable to examine , may be null
* @ return an array of stack trace frames , never null
* @ since 2.0 */
@ GwtIncompatible ( "incompatible method" ) public static String [ ] getRootCauseStackTrace ( final Throwable throwable ) { } } | if ( throwable == null ) { return ArrayUtils . EMPTY_STRING_ARRAY ; } final Throwable throwables [ ] = getThrowables ( throwable ) ; final int count = throwables . length ; final List < String > frames = new ArrayList < > ( ) ; List < String > nextTrace = getStackFrameList ( throwables [ count - 1 ] ) ; for ( int i = count ; -- i >= 0 ; ) { final List < String > trace = nextTrace ; if ( i != 0 ) { nextTrace = getStackFrameList ( throwables [ i - 1 ] ) ; removeCommonFrames ( trace , nextTrace ) ; } if ( i == count - 1 ) { frames . add ( throwables [ i ] . toString ( ) ) ; } else { frames . add ( WRAPPED_MARKER + throwables [ i ] . toString ( ) ) ; } frames . addAll ( trace ) ; } return frames . toArray ( new String [ frames . size ( ) ] ) ; |
public class SessionBeanTypeImpl { /** * If not already created , a new < code > post - activate < / code > element will be created and returned .
* Otherwise , the first existing < code > post - activate < / code > element will be returned .
* @ return the instance defined for the element < code > post - activate < / code > */
public LifecycleCallbackType < SessionBeanType < T > > getOrCreatePostActivate ( ) { } } | List < Node > nodeList = childNode . get ( "post-activate" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new LifecycleCallbackTypeImpl < SessionBeanType < T > > ( this , "post-activate" , childNode , nodeList . get ( 0 ) ) ; } return createPostActivate ( ) ; |
public class ApiOvhMe { /** * Alter this object properties
* REST : PUT / me / sshKey / { keyName }
* @ param body [ required ] New object properties
* @ param keyName [ required ] Name of this public SSH key */
public void sshKey_keyName_PUT ( String keyName , OvhSshKey body ) throws IOException { } } | String qPath = "/me/sshKey/{keyName}" ; StringBuilder sb = path ( qPath , keyName ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ; |
public class WhileContextDef { /** * < pre >
* Name of the pivot tensor .
* < / pre >
* < code > optional string pivot _ name = 5 ; < / code > */
public java . lang . String getPivotName ( ) { } } | java . lang . Object ref = pivotName_ ; if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; pivotName_ = s ; return s ; } |
public class JvmTypesBuilder { /** * / * @ Nullable */
protected < T extends EObject > T initializeSafely ( /* @ Nullable */
T targetElement , /* @ Nullable */
Procedure1 < ? super T > initializer ) { } } | if ( targetElement != null && initializer != null ) { try { initializer . apply ( targetElement ) ; } catch ( Exception e ) { LOG . error ( "Error initializing JvmElement" , e ) ; } } return targetElement ; |
public class AnalyticsHandler { /** * Clean up the query states after all rows have been consumed . */
private void cleanupQueryStates ( ) { } } | completeRequestSpan ( currentRequest ( ) ) ; finishedDecoding ( ) ; queryInfoObservable = null ; queryRowObservable = null ; queryErrorObservable = null ; queryStatusObservable = null ; querySignatureObservable = null ; queryParsingState = QUERY_STATE_INITIAL ; |
public class RevisionUtils { /** * in CBLDatabase + Insertion . m
* - ( NSString * ) generateRevID : ( CBL _ Revision * ) rev
* withJSON : ( NSData * ) json
* attachments : ( NSDictionary * ) attachments
* prevID : ( NSString * ) prevID
* @ exclude */
@ InterfaceAudience . Private public static String generateRevID ( byte [ ] json , boolean deleted , String prevID ) { } } | MessageDigest md5Digest ; // Revision IDs have a generation count , a hyphen , and a UUID .
int generation = 0 ; if ( prevID != null ) { generation = RevisionInternal . generationFromRevID ( prevID ) ; if ( generation == 0 ) { return null ; } } // Generate a getDigest for this revision based on the previous revision ID , document JSON ,
// and attachment digests . This doesn ' t need to be secure ; we just need to ensure that this
// code consistently generates the same ID given equivalent revisions .
try { md5Digest = MessageDigest . getInstance ( "MD5" ) ; } catch ( NoSuchAlgorithmException e ) { throw new RuntimeException ( e ) ; } // single byte - length of previous revision id
// previous revision id
int length = 0 ; byte [ ] prevIDUTF8 = null ; if ( prevID != null ) { prevIDUTF8 = prevID . getBytes ( Charset . forName ( "UTF-8" ) ) ; length = prevIDUTF8 . length ; } if ( length > 0xFF ) { return null ; } byte lengthByte = ( byte ) ( length & 0xFF ) ; byte [ ] lengthBytes = new byte [ ] { lengthByte } ; md5Digest . update ( lengthBytes ) ; // prefix with length byte
if ( length > 0 && prevIDUTF8 != null ) { md5Digest . update ( prevIDUTF8 ) ; } // single byte - deletion flag
int isDeleted = ( ( deleted != false ) ? 1 : 0 ) ; byte [ ] deletedByte = new byte [ ] { ( byte ) isDeleted } ; md5Digest . update ( deletedByte ) ; // json
if ( json != null ) { md5Digest . update ( json ) ; } byte [ ] md5DigestResult = md5Digest . digest ( ) ; String digestAsHex = Utils . bytesToHex ( md5DigestResult ) ; int generationIncremented = generation + 1 ; return String . format ( Locale . ENGLISH , "%d-%s" , generationIncremented , digestAsHex ) . toLowerCase ( ) ; |
public class DirectoryResourceFileSource { /** * / * ( non - Javadoc )
* @ see org . archive . wayback . resourcestore . resourcefile . ResourceFileSource # getSources ( ) */
public List < ResourceFileSource > getSources ( ) { } } | List < ResourceFileSource > sources = new ArrayList < ResourceFileSource > ( ) ; sources . add ( this ) ; return sources ; |
public class CaptureToUrlSearchResultIterator { /** * / * ( non - Javadoc )
* @ see java . util . Iterator # next ( ) */
public UrlSearchResult next ( ) { } } | if ( cachedNext == null ) { throw new NoSuchElementException ( "use hasNext!" ) ; } UrlSearchResult tmp = cachedNext ; cachedNext = null ; return tmp ; |
public class TableSession { /** * Add add this data to the file .
* @ param data A vector object containing the raw data for the record .
* @ exception DBException File exception . */
public Object add ( Object data , int iOpenMode ) throws DBException , RemoteException { } } | Object bookmarkLast = null ; Record record = this . getMainRecord ( ) ; int iOldOpenMode = record . getOpenMode ( ) ; try { Utility . getLogger ( ) . info ( "EJB Add" ) ; synchronized ( this . getTask ( ) ) { record . setOpenMode ( ( iOldOpenMode & ~ DBConstants . LOCK_TYPE_MASK ) | ( iOpenMode & DBConstants . LOCK_TYPE_MASK ) ) ; // Use client ' s lock data
record . addNew ( ) ; Record recordBase = record . getTable ( ) . getCurrentTable ( ) . getRecord ( ) ; int iFieldTypes = this . getFieldTypes ( recordBase ) ; int iErrorCode = this . moveBufferToFields ( data , iFieldTypes , recordBase ) ; // Screen move . . . need to validate !
if ( iErrorCode != DBConstants . NORMAL_RETURN ) { throw new DatabaseException ( iErrorCode ) ; } if ( DBConstants . TRUE . equals ( record . getTable ( ) . getProperty ( DBParams . SUPRESSREMOTEDBMESSAGES ) ) ) record . setSupressRemoteMessages ( true ) ; record . add ( ) ; bookmarkLast = record . getLastModified ( DBConstants . BOOKMARK_HANDLE ) ; } } catch ( DBException ex ) { throw ex ; } catch ( Exception ex ) { ex . printStackTrace ( ) ; throw new DBException ( ex . getMessage ( ) ) ; } finally { record . setSupressRemoteMessages ( false ) ; this . getMainRecord ( ) . setOpenMode ( iOldOpenMode ) ; } return bookmarkLast ; |
public class RedisInner { /** * Lists all Redis caches in a resource group .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; RedisResourceInner & gt ; object */
public Observable < Page < RedisResourceInner > > listByResourceGroupNextAsync ( final String nextPageLink ) { } } | return listByResourceGroupNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < RedisResourceInner > > , Page < RedisResourceInner > > ( ) { @ Override public Page < RedisResourceInner > call ( ServiceResponse < Page < RedisResourceInner > > response ) { return response . body ( ) ; } } ) ; |
public class LofCalculator { /** * 指定したK値 、 データセットを基に学習データセットの初期化を行う 。 < br >
* データセット中の各対象点の以下の値を再計算する 。
* < ol >
* < li > K距離値 < / li >
* < li > K距離近傍データのIDリスト < / li >
* < li > 局所到達可能密度 < / li >
* < / ol >
* @ param kn K値
* @ param dataSet 学習データセット */
public static void initDataSet ( int kn , LofDataSet dataSet ) { } } | Collection < LofPoint > pointList = dataSet . getDataMap ( ) . values ( ) ; // K距離 、 K距離近傍を全て更新した後局所到達可能密度を更新する必要があるため 、 2ブロックに分けて行う 。
for ( LofPoint targetPoint : pointList ) { // 対象点のK距離 、 K距離近傍を更新する 。
updateKDistance ( kn , targetPoint , dataSet ) ; } for ( LofPoint targetPoint : pointList ) { // 対象点の局所到達可能密度を更新する 。
updateLrd ( targetPoint , dataSet ) ; } |
public class CacheEntry { static public CacheEntryPool createCacheEntryPool ( DCache cache , int size ) { } } | CacheEntryPool pool = new CacheEntryPool ( size , cache ) ; return pool ; |
public class FileSystemResourceReader { /** * ( non - Javadoc )
* @ see
* net . jawr . web . resource . handler . reader . TextResourceReader # getResource ( net .
* jawr . web . resource . bundle . JoinableResourceBundle , java . lang . String ,
* boolean ) */
@ Override public Reader getResource ( JoinableResourceBundle bundle , String resourceName , boolean processingBundle ) { } } | Reader rd = null ; FileInputStream fis = ( FileInputStream ) getResourceAsStream ( resourceName ) ; if ( fis != null ) { FileChannel inchannel = fis . getChannel ( ) ; rd = Channels . newReader ( inchannel , charset . newDecoder ( ) , - 1 ) ; } return rd ; |
public class ExportManager { /** * line that contains a non escaped $
* @ param key input
* @ return line with variables replaced with their value */
public String getValue ( String key ) { } } | if ( key . indexOf ( DOLLAR ) == - 1 ) { String value = getVariable ( key ) ; if ( value == null ) return null ; if ( value . indexOf ( DOLLAR ) == - 1 ) return value ; else return parseValue ( value ) ; } return parseValue ( key ) ; |
public class Configuration { /** * Returns the value associated with the given config option as a float .
* @ param configOption The configuration option
* @ return the ( default ) value associated with the given config option */
@ PublicEvolving public float getFloat ( ConfigOption < Float > configOption ) { } } | Object o = getValueOrDefaultFromOption ( configOption ) ; return convertToFloat ( o , configOption . defaultValue ( ) ) ; |
public class Parsers { /** * Checks the current token with the { @ code fromToken } object . If the
* { @ link TokenMap # map ( Token ) } method returns null , an unexpected token error occurs ;
* if the method returns a non - null value , the value is returned and the parser succeeds .
* @ param fromToken the { @ code FromToken } object .
* @ return the new Parser object . */
public static < T > Parser < T > token ( final TokenMap < ? extends T > fromToken ) { } } | return new Parser < T > ( ) { @ Override boolean apply ( final ParseContext ctxt ) { if ( ctxt . isEof ( ) ) { ctxt . missing ( fromToken ) ; return false ; } Token token = ctxt . getToken ( ) ; Object v = fromToken . map ( token ) ; if ( v == null ) { ctxt . missing ( fromToken ) ; return false ; } ctxt . result = v ; ctxt . next ( ) ; return true ; } @ Override public String toString ( ) { return fromToken . toString ( ) ; } } ; |
public class FieldDefinition { /** * Serialize this field definition into a { @ link UNode } tree and return the root node .
* @ return This field definition serialized into a UNode tree . */
public UNode toDoc ( ) { } } | // Root node is a MAP and its name is the field name . Set its tag name to " field "
// for XML .
UNode fieldNode = UNode . createMapNode ( m_name , "field" ) ; // Groups and non - groups are handled differently .
if ( m_type != FieldType . GROUP ) { // Non - group : Add a " type " attribute , marked as an XML attribute .
fieldNode . addValueNode ( "type" , m_type . toString ( ) , true ) ; // Add ' collection ' , marked as an XML attribute .
if ( m_type . isLinkType ( ) || m_bIsCollection ) { fieldNode . addValueNode ( "collection" , Boolean . toString ( m_bIsCollection ) , true ) ; } // Add ' analyzer ' attribute , if specified , marked as an XML attribute .
if ( m_analyzerName != null ) { fieldNode . addValueNode ( "analyzer" , m_analyzerName , true ) ; } // Add ' encoding ' if applicable , marked as an XML attribute .
if ( m_type == FieldType . BINARY ) { fieldNode . addValueNode ( "encoding" , m_encoding . toString ( ) , true ) ; } // If this field is a link , add the ' inverse ' and ' table ' attributes , each
// marked as XML attributes . Add the link field number only if it is being used .
if ( m_type . isLinkType ( ) ) { fieldNode . addValueNode ( "inverse" , m_linkInverse , true ) ; fieldNode . addValueNode ( "table" , m_linkExtent , true ) ; } // Add ' sharded ' for link if true .
if ( m_type == FieldType . LINK && m_bIsSharded ) { fieldNode . addValueNode ( "sharded" , Boolean . toString ( m_bIsSharded ) , true ) ; } // Add ' junction ' for xlink .
if ( m_type == FieldType . XLINK ) { fieldNode . addValueNode ( "junction" , m_junctionField , true ) ; } } else { // Create a MAP node called " fields " .
assert m_nestedFieldMap . size ( ) > 0 ; UNode fieldsNode = fieldNode . addMapNode ( "fields" ) ; // Recursing to nested fields , adding them as child nodes .
for ( FieldDefinition nestedFieldDef : getNestedFields ( ) ) { fieldsNode . addChildNode ( nestedFieldDef . toDoc ( ) ) ; } } return fieldNode ; |
public class ConsoleReporter { /** * Print only if the attribute is enabled
* @ param type Metric attribute
* @ param status Status to be logged */
private void printIfEnabled ( MetricAttribute type , String status ) { } } | if ( getDisabledMetricAttributes ( ) . contains ( type ) ) { return ; } output . println ( status ) ; |
public class MiscUtils { /** * Simple code to copy a file from one place to another . . .
* Java should have this built in . . . stupid java . . . */
public static void copyFile ( String fromPath , String toPath ) throws Exception { } } | File inputFile = new File ( fromPath ) ; File outputFile = new File ( toPath ) ; com . google_voltpatches . common . io . Files . copy ( inputFile , outputFile ) ; |
public class Graphs { /** * Find all strongly - connected components in a graph . When a new SCC is found , the { @ link
* SCCListener # foundSCC ( java . util . Collection ) } method is invoked . The listener object may hence not be null .
* Tarjan ' s algorithm is used for realizing the SCC search .
* @ param graph
* the graph
* @ param sccListener
* the SCC listener
* @ see TarjanSCCVisitor
* @ see SCCs */
public static < N , E > void findSCCs ( Graph < N , E > graph , SCCListener < N > sccListener ) { } } | SCCs . findSCCs ( graph , sccListener ) ; |
public class TextOnlyLayout { /** * Ends one line of content . */
@ Override public void endContentLine ( ChainWriter out , WebSiteRequest req , HttpServletResponse resp , int rowspan , boolean endsInternal ) { } } | out . print ( " </td>\n" + " </tr>\n" ) ; |
public class OAuth20Utils { /** * Is authorized grant type for service ?
* @ param context the context
* @ param registeredService the registered service
* @ return true / false */
public static boolean isAuthorizedGrantTypeForService ( final J2EContext context , final OAuthRegisteredService registeredService ) { } } | return isAuthorizedGrantTypeForService ( context . getRequestParameter ( OAuth20Constants . GRANT_TYPE ) , registeredService ) ; |
public class JavacParser { /** * SwitchBlockStatementGroups = { SwitchBlockStatementGroup }
* SwitchBlockStatementGroup = SwitchLabel BlockStatements
* SwitchLabel = CASE ConstantExpression " : " | DEFAULT " : " */
List < JCCase > switchBlockStatementGroups ( ) { } } | ListBuffer < JCCase > cases = new ListBuffer < JCCase > ( ) ; while ( true ) { int pos = token . pos ; switch ( token . kind ) { case CASE : case DEFAULT : cases . append ( switchBlockStatementGroup ( ) ) ; break ; case RBRACE : case EOF : return cases . toList ( ) ; default : nextToken ( ) ; // to ensure progress
syntaxError ( pos , "expected3" , CASE , DEFAULT , RBRACE ) ; } } |
public class PoolsImpl { /** * Updates the properties of the specified pool .
* This only replaces the pool properties specified in the request . For example , if the pool has a start task associated with it , and a request does not specify a start task element , then the pool keeps the existing start task .
* @ param poolId The ID of the pool to update .
* @ param poolPatchParameter The parameters for the request .
* @ param poolPatchOptions Additional parameters for the operation
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < Void > patchAsync ( String poolId , PoolPatchParameter poolPatchParameter , PoolPatchOptions poolPatchOptions , final ServiceCallback < Void > serviceCallback ) { } } | return ServiceFuture . fromHeaderResponse ( patchWithServiceResponseAsync ( poolId , poolPatchParameter , poolPatchOptions ) , serviceCallback ) ; |
public class BaseRequest { /** * Download this resource asynchronously , with the given JSON object as the request body .
* The download progress will be monitored with a { @ link ProgressListener } .
* If the Content - Type header was not previously set , this method will set it to " application / json " .
* < b > Note : < / b > This method consumes the < code > InputStream < / code > from the response and closes it ,
* so the { @ link Response # getResponseByteStream ( ) } method will always return null for this request .
* @ param json The JSON object to put in the request body
* @ param progressListener The listener that monitors the download progress
* @ param responseListener The listener whose onSuccess or onFailure methods will be called when this request finishes */
protected void download ( JSONObject json , final ProgressListener progressListener , ResponseListener responseListener ) { } } | String contentType = headers . get ( CONTENT_TYPE ) ; if ( contentType == null ) { contentType = JSON_CONTENT_TYPE ; } RequestBody body = RequestBody . create ( MediaType . parse ( contentType ) , json . toString ( ) ) ; sendRequest ( progressListener , responseListener , body ) ; |
public class XPathParser { /** * Parses the the rule CastableExpr according to the following production
* rule :
* [ 18 ] CastableExpr : : = CastExpr ( < " castable " " as " > SingleType ) ? .
* @ throws TTXPathException */
private void parseCastableExpr ( ) throws TTXPathException { } } | parseCastExpr ( ) ; if ( is ( "castable" , true ) ) { consume ( "as" , true ) ; mPipeBuilder . addCastableExpr ( getTransaction ( ) , parseSingleType ( ) ) ; } |
public class KeyUsageExtension { /** * Set the attribute value . */
public void set ( String name , Object obj ) throws IOException { } } | if ( ! ( obj instanceof Boolean ) ) { throw new IOException ( "Attribute must be of type Boolean." ) ; } boolean val = ( ( Boolean ) obj ) . booleanValue ( ) ; if ( name . equalsIgnoreCase ( DIGITAL_SIGNATURE ) ) { set ( 0 , val ) ; } else if ( name . equalsIgnoreCase ( NON_REPUDIATION ) ) { set ( 1 , val ) ; } else if ( name . equalsIgnoreCase ( KEY_ENCIPHERMENT ) ) { set ( 2 , val ) ; } else if ( name . equalsIgnoreCase ( DATA_ENCIPHERMENT ) ) { set ( 3 , val ) ; } else if ( name . equalsIgnoreCase ( KEY_AGREEMENT ) ) { set ( 4 , val ) ; } else if ( name . equalsIgnoreCase ( KEY_CERTSIGN ) ) { set ( 5 , val ) ; } else if ( name . equalsIgnoreCase ( CRL_SIGN ) ) { set ( 6 , val ) ; } else if ( name . equalsIgnoreCase ( ENCIPHER_ONLY ) ) { set ( 7 , val ) ; } else if ( name . equalsIgnoreCase ( DECIPHER_ONLY ) ) { set ( 8 , val ) ; } else { throw new IOException ( "Attribute name not recognized by" + " CertAttrSet:KeyUsage." ) ; } encodeThis ( ) ; |
public class Paginator { /** * Returns total count of records based on provided criteria .
* @ return total count of records based on provided criteria */
public Long getCount ( ) { } } | if ( count == 0L || ! suppressCounts ) { if ( metaModel . cached ( ) ) { count = ( Long ) QueryCache . instance ( ) . getItem ( metaModel . getTableName ( ) , countQueryFull , params ) ; if ( count == null || count == 0 ) { count = doCount ( ) ; QueryCache . instance ( ) . addItem ( metaModel . getTableName ( ) , countQueryFull , params , count ) ; } else { LogFilter . logQuery ( LOGGER , countQueryFull , params , System . currentTimeMillis ( ) , true ) ; } } else { count = doCount ( ) ; } return count ; } else { return count ; } |
public class DatanodeInfo { /** * A formatted string for reporting the status of the DataNode . */
public String getDatanodeReport ( ) { } } | StringBuffer buffer = new StringBuffer ( ) ; long c = getCapacity ( ) ; long r = getRemaining ( ) ; long u = getDfsUsed ( ) ; long nonDFSUsed = getNonDfsUsed ( ) ; float usedPercent = getDfsUsedPercent ( ) ; float remainingPercent = getRemainingPercent ( ) ; buffer . append ( "Name: " + name + "\n" ) ; if ( ! NetworkTopology . DEFAULT_RACK . equals ( location ) ) { buffer . append ( "Rack: " + location + "\n" ) ; } buffer . append ( "Decommission Status : " ) ; if ( isDecommissioned ( ) ) { buffer . append ( "Decommissioned\n" ) ; } else if ( isDecommissionInProgress ( ) ) { buffer . append ( "Decommission in progress\n" ) ; } else { buffer . append ( "Normal\n" ) ; } buffer . append ( "Configured Capacity: " + c + " (" + StringUtils . byteDesc ( c ) + ")" + "\n" ) ; buffer . append ( "DFS Used: " + u + " (" + StringUtils . byteDesc ( u ) + ")" + "\n" ) ; buffer . append ( "Non DFS Used: " + nonDFSUsed + " (" + StringUtils . byteDesc ( nonDFSUsed ) + ")" + "\n" ) ; buffer . append ( "DFS Remaining: " + r + "(" + StringUtils . byteDesc ( r ) + ")" + "\n" ) ; buffer . append ( "DFS Used%: " + StringUtils . limitDecimalTo2 ( usedPercent ) + "%\n" ) ; buffer . append ( "DFS Remaining%: " + StringUtils . limitDecimalTo2 ( remainingPercent ) + "%\n" ) ; buffer . append ( "Last contact: " + new Date ( lastUpdate ) + "\n" ) ; return buffer . toString ( ) ; |
public class Intersectionf { /** * Compute the distance of the given point < code > ( pX , pY , pZ ) < / code > to the line defined by the two points < code > ( x0 , y0 , z0 ) < / code > and < code > ( x1 , y1 , z1 ) < / code > .
* Reference : < a href = " http : / / mathworld . wolfram . com / Point - LineDistance3 - Dimensional . html " > http : / / mathworld . wolfram . com < / a >
* @ param pX
* the x coordinate of the point
* @ param pY
* the y coordinate of the point
* @ param pZ
* the z coordinate of the point
* @ param x0
* the x coordinate of the first point on the line
* @ param y0
* the y coordinate of the first point on the line
* @ param z0
* the z coordinate of the first point on the line
* @ param x1
* the x coordinate of the second point on the line
* @ param y1
* the y coordinate of the second point on the line
* @ param z1
* the z coordinate of the second point on the line
* @ return the distance between the point and the line */
public static float distancePointLine ( float pX , float pY , float pZ , float x0 , float y0 , float z0 , float x1 , float y1 , float z1 ) { } } | float d21x = x1 - x0 , d21y = y1 - y0 , d21z = z1 - z0 ; float d10x = x0 - pX , d10y = y0 - pY , d10z = z0 - pZ ; float cx = d21y * d10z - d21z * d10y , cy = d21z * d10x - d21x * d10z , cz = d21x * d10y - d21y * d10x ; return ( float ) Math . sqrt ( ( cx * cx + cy * cy + cz * cz ) / ( d21x * d21x + d21y * d21y + d21z * d21z ) ) ; |
public class BaseDaoEnabled { /** * A call through to the { @ link Dao # objectToString ( Object ) } . */
public String objectToString ( ) { } } | try { checkForDao ( ) ; } catch ( SQLException e ) { throw new IllegalArgumentException ( e ) ; } @ SuppressWarnings ( "unchecked" ) T t = ( T ) this ; return dao . objectToString ( t ) ; |
public class Serializer { /** * Serialize document to the writer initialized by constructor . This method accept both XML and HTML documents and write
* prolog accordingly : HTML document type , respective XML declaration . After prolog write nodes tree recursively , invoking
* { @ link # write ( Node ) } with document root .
* When document nodes tree is complete flush the writer but does not close it .
* @ param doc document to serialize .
* @ throws IOException if write operation fails . */
public void serialize ( DocumentImpl doc ) throws IOException { } } | org . w3c . dom . Document w3cDoc = doc . getDocument ( ) ; if ( w3cDoc instanceof HTMLDocumentImpl ) { // if w3cDoc has no document type default to html5 : < ! DOCTYPE html >
writer . write ( "<!DOCTYPE html" ) ; DocumentType dt = w3cDoc . getDoctype ( ) ; if ( dt != null ) { if ( dt . getPublicId ( ) != null ) { writer . write ( " PUBLIC \"" ) ; writer . write ( dt . getPublicId ( ) ) ; writer . write ( "\"" ) ; } if ( dt . getSystemId ( ) != null ) { writer . write ( " \"" ) ; writer . write ( dt . getSystemId ( ) ) ; writer . write ( "\"" ) ; } } writer . write ( ">\r\n" ) ; } else { // if not html write xml declaration
writer . write ( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\r\n" ) ; } if ( doc . getRoot ( ) != null ) { write ( ( ( ElementImpl ) doc . getRoot ( ) ) . getNode ( ) ) ; } writer . flush ( ) ; |
public class StructuredObject { /** * Validate this object against the specific schema for this type from the official repo .
* @ param parser A parser to do the validation .
* @ return A context containing the detailed error report if any .
* @ throws SchemaValidationException If this object is not valid according to it ' s schema .
* @ throws NoSchemaException If there is no official schema for this type .
* @ throws InvalidSchemaException If the schema exists but is not itself valid or cannot be read . */
public IEntityJsonSchemaContext validate ( EntityJsonParser parser ) throws SchemaValidationException , NoSchemaException , InvalidSchemaException { } } | StringBuffer ubuf = new StringBuffer ( "https://symphonyosf.github.io/symphony-object/proposed" ) ; for ( String part : type_ . split ( "\\." ) ) { ubuf . append ( "/" ) ; ubuf . append ( part ) ; } ubuf . append ( "-v" ) ; ubuf . append ( majorVersion_ ) ; ubuf . append ( "_" ) ; ubuf . append ( minorVersion_ ) ; ubuf . append ( ".json" ) ; try { return parser . validate ( new URL ( ubuf . toString ( ) ) , instanceSource_ , jsonNode_ ) ; } catch ( MalformedURLException e ) { throw new InvalidSchemaException ( null , e ) ; } |
public class Slice { /** * Gets a 64 - bit double at the specified absolute { @ code index } in
* this buffer .
* @ throws IndexOutOfBoundsException if the specified { @ code index } is less than { @ code 0 } or
* { @ code index + 8 } is greater than { @ code this . length ( ) } */
public double getDouble ( int index ) { } } | checkIndexLength ( index , SizeOf . SIZE_OF_DOUBLE ) ; return unsafe . getDouble ( base , address + index ) ; |
public class LabelInfo { /** * Configures the given label with the property values described by this instance and then sets
* it as the label for the given component .
* @ param label The label to be configured .
* @ param component The component that the label is ' for ' .
* @ throws IllegalArgumentException if either argument is null .
* @ see JLabel # setLabelFor ( java . awt . Component ) */
public void configureLabelFor ( JLabel label , JComponent component ) { } } | Assert . notNull ( label , "label" ) ; Assert . notNull ( component , "component" ) ; configureLabel ( label ) ; if ( ! ( component instanceof JPanel ) ) { String labelText = label . getText ( ) ; if ( ! labelText . endsWith ( ":" ) ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Appending colon to text field label text '" + this . text + "'" ) ; } label . setText ( labelText + ":" ) ; } } label . setLabelFor ( component ) ; |
public class AbstractNotification { /** * Returns the helper for { @ link # toString ( ) } . */
protected Objects . ToStringHelper toStringHelper ( ) { } } | return Objects . toStringHelper ( this ) . add ( "messageNumber" , messageNumber ) . add ( "resourceState" , resourceState ) . add ( "resourceId" , resourceId ) . add ( "resourceUri" , resourceUri ) . add ( "channelId" , channelId ) . add ( "channelExpiration" , channelExpiration ) . add ( "channelToken" , channelToken ) . add ( "changed" , changed ) ; |
public class CSV { /** * Reads in a CSV dataset as a classification dataset .
* @ param classification _ target the column index ( starting from zero ) of the
* feature that will be the categorical target value
* @ param reader the reader for the CSV content
* @ param delimiter the delimiter to separate columns , usually a comma
* @ param lines _ to _ skip the number of lines to skip when reading in the CSV
* ( used to skip header information )
* @ param comment the character used to indicate the start of a comment .
* Once this character is reached , anything at and after the character will
* be ignored .
* @ param cat _ cols a set of the indices to treat as categorical features .
* @ return the classification dataset from the given CSV file
* @ throws IOException */
public static ClassificationDataSet readC ( int classification_target , Reader reader , char delimiter , int lines_to_skip , char comment , Set < Integer > cat_cols ) throws IOException { } } | return ( ClassificationDataSet ) readCSV ( reader , lines_to_skip , delimiter , comment , cat_cols , - 1 , classification_target ) ; |
public class ChessboardCornerClusterFinder { /** * Given the current graph , attempt to replace edges from vertexes which lost them in an apparent bad decision .
* This is a very simple algorithm and gives up if the graph around the current node is less than perfect .
* 1 ) Can only connect to vertexes which lost edges
* 2 ) Can ' t modify an existing edge
* 3 ) Parallel lines must be parallel
* 4 ) If current graph can ' t predict direction no decision is made */
private void repairVertexes ( ) { } } | // System . out . println ( " # # # # # Repair " ) ;
for ( int idxV = 0 ; idxV < dirtyVertexes . size ( ) ; idxV ++ ) { final Vertex v = dirtyVertexes . get ( idxV ) ; // System . out . println ( " dirty = " + v . index ) ;
bestSolution . clear ( ) ; for ( int idxE = 0 ; idxE < v . perpendicular . size ( ) ; idxE ++ ) { // Assume this edge is in the solutions
Edge e = v . perpendicular . get ( idxE ) ; // only can connect new modified vertexes or ones already connected too
if ( ! ( e . dst . marked || - 1 != v . connections . find ( e . dst ) ) ) { continue ; } // System . out . println ( " e [ 0 ] . dst = " + e . dst . index ) ;
solution . clear ( ) ; solution . add ( e ) ; // search for connections until there ' s no more to be found
for ( int count = 0 ; count < 3 ; count ++ ) { // Find the an edge which is about to 90 degrees CCW of the previous edge ' v '
Vertex va = null ; double dir90 = UtilAngle . bound ( e . direction + Math . PI / 2.0 ) ; for ( int i = 0 ; i < e . dst . connections . size ( ) ; i ++ ) { Edge ei = e . dst . connections . get ( i ) ; if ( UtilAngle . dist ( ei . direction , dir90 ) < Math . PI / 3 ) { va = ei . dst ; break ; } } // Search for an edge in v which has a connection to ' va ' and is ccw of ' e '
boolean matched = false ; for ( int i = 0 ; i < v . perpendicular . size ( ) ; i ++ ) { Edge ei = v . perpendicular . get ( i ) ; if ( e == ei ) continue ; // angle test
double ccw = UtilAngle . distanceCCW ( e . direction , ei . direction ) ; if ( ccw > Math . PI * 0.9 ) continue ; if ( ! ( ei . dst . marked || - 1 != v . connections . find ( ei . dst ) ) ) { continue ; } // connection test
if ( ei . dst . connections . find ( va ) != - 1 ) { // System . out . println ( " e [ i ] . dst = " + ei . dst . index + " va = " + va . index ) ;
e = ei ; solution . add ( ei ) ; matched = true ; break ; } } if ( ! matched ) break ; } if ( solution . size ( ) > bestSolution . size ( ) ) { bestSolution . clear ( ) ; bestSolution . addAll ( solution ) ; } } if ( bestSolution . size ( ) > 1 ) { // See if any connection that was there before is now gone . If that ' s the case the destination
// will need to be checked for mutual matches
for ( int i = 0 ; i < v . connections . edges . size ( ) ; i ++ ) { if ( ! bestSolution . contains ( v . connections . edges . get ( i ) ) ) { v . connections . edges . get ( i ) . dst . marked = true ; break ; } } // Save the new connections
v . connections . edges . clear ( ) ; v . connections . edges . addAll ( bestSolution ) ; } } |
public class GregorianCalendar { /** * Adds a signed amount to the specified calendar field without changing larger fields .
* A negative roll amount means to subtract from field without changing
* larger fields . If the specified amount is 0 , this method performs nothing .
* < p > This method calls { @ link # complete ( ) } before adding the
* amount so that all the calendar fields are normalized . If there
* is any calendar field having an out - of - range value in non - lenient mode , then an
* < code > IllegalArgumentException < / code > is thrown .
* < em > Example < / em > : Consider a < code > GregorianCalendar < / code >
* originally set to August 31 , 1999 . Calling < code > roll ( Calendar . MONTH ,
* 8 ) < / code > sets the calendar to April 30 , < strong > 1999 < / strong > . Using a
* < code > GregorianCalendar < / code > , the < code > DAY _ OF _ MONTH < / code > field cannot
* be 31 in the month April . < code > DAY _ OF _ MONTH < / code > is set to the closest possible
* value , 30 . The < code > YEAR < / code > field maintains the value of 1999 because it
* is a larger field than < code > MONTH < / code > .
* < em > Example < / em > : Consider a < code > GregorianCalendar < / code >
* originally set to Sunday June 6 , 1999 . Calling
* < code > roll ( Calendar . WEEK _ OF _ MONTH , - 1 ) < / code > sets the calendar to
* Tuesday June 1 , 1999 , whereas calling
* < code > add ( Calendar . WEEK _ OF _ MONTH , - 1 ) < / code > sets the calendar to
* Sunday May 30 , 1999 . This is because the roll rule imposes an
* additional constraint : The < code > MONTH < / code > must not change when the
* < code > WEEK _ OF _ MONTH < / code > is rolled . Taken together with add rule 1,
* the resultant date must be between Tuesday June 1 and Saturday June
* 5 . According to add rule 2 , the < code > DAY _ OF _ WEEK < / code > , an invariant
* when changing the < code > WEEK _ OF _ MONTH < / code > , is set to Tuesday , the
* closest possible value to Sunday ( where Sunday is the first day of the
* week ) . < / p >
* @ param field the calendar field .
* @ param amount the signed amount to add to < code > field < / code > .
* @ exception IllegalArgumentException if < code > field < / code > is
* < code > ZONE _ OFFSET < / code > , < code > DST _ OFFSET < / code > , or unknown ,
* or if any calendar fields have out - of - range values in
* non - lenient mode .
* @ see # roll ( int , boolean )
* @ see # add ( int , int )
* @ see # set ( int , int )
* @ since 1.2 */
@ Override public void roll ( int field , int amount ) { } } | // If amount = = 0 , do nothing even the given field is out of
// range . This is tested by JCK .
if ( amount == 0 ) { return ; } if ( field < 0 || field >= ZONE_OFFSET ) { throw new IllegalArgumentException ( ) ; } // Sync the time and calendar fields .
complete ( ) ; int min = getMinimum ( field ) ; int max = getMaximum ( field ) ; switch ( field ) { case AM_PM : case ERA : case YEAR : case MINUTE : case SECOND : case MILLISECOND : // These fields are handled simply , since they have fixed minima
// and maxima . The field DAY _ OF _ MONTH is almost as simple . Other
// fields are complicated , since the range within they must roll
// varies depending on the date .
break ; case HOUR : case HOUR_OF_DAY : { int unit = max + 1 ; // 12 or 24 hours
int h = internalGet ( field ) ; int nh = ( h + amount ) % unit ; if ( nh < 0 ) { nh += unit ; } time += ONE_HOUR * ( nh - h ) ; // The day might have changed , which could happen if
// the daylight saving time transition brings it to
// the next day , although it ' s very unlikely . But we
// have to make sure not to change the larger fields .
CalendarDate d = calsys . getCalendarDate ( time , getZone ( ) ) ; if ( internalGet ( DAY_OF_MONTH ) != d . getDayOfMonth ( ) ) { d . setDate ( internalGet ( YEAR ) , internalGet ( MONTH ) + 1 , internalGet ( DAY_OF_MONTH ) ) ; if ( field == HOUR ) { assert ( internalGet ( AM_PM ) == PM ) ; d . addHours ( + 12 ) ; // restore PM
} time = calsys . getTime ( d ) ; } int hourOfDay = d . getHours ( ) ; internalSet ( field , hourOfDay % unit ) ; if ( field == HOUR ) { internalSet ( HOUR_OF_DAY , hourOfDay ) ; } else { internalSet ( AM_PM , hourOfDay / 12 ) ; internalSet ( HOUR , hourOfDay % 12 ) ; } // Time zone offset and / or daylight saving might have changed .
int zoneOffset = d . getZoneOffset ( ) ; int saving = d . getDaylightSaving ( ) ; internalSet ( ZONE_OFFSET , zoneOffset - saving ) ; internalSet ( DST_OFFSET , saving ) ; return ; } case MONTH : // Rolling the month involves both pinning the final value to [ 0 , 11]
// and adjusting the DAY _ OF _ MONTH if necessary . We only adjust the
// DAY _ OF _ MONTH if , after updating the MONTH field , it is illegal .
// E . g . , < jan31 > . roll ( MONTH , 1 ) - > < feb28 > or < feb29 > .
{ if ( ! isCutoverYear ( cdate . getNormalizedYear ( ) ) ) { int mon = ( internalGet ( MONTH ) + amount ) % 12 ; if ( mon < 0 ) { mon += 12 ; } set ( MONTH , mon ) ; // Keep the day of month in the range . We don ' t want to spill over
// into the next month ; e . g . , we don ' t want jan31 + 1 mo - > feb31 - >
// mar3.
int monthLen = monthLength ( mon ) ; if ( internalGet ( DAY_OF_MONTH ) > monthLen ) { set ( DAY_OF_MONTH , monthLen ) ; } } else { // We need to take care of different lengths in
// year and month due to the cutover .
int yearLength = getActualMaximum ( MONTH ) + 1 ; int mon = ( internalGet ( MONTH ) + amount ) % yearLength ; if ( mon < 0 ) { mon += yearLength ; } set ( MONTH , mon ) ; int monthLen = getActualMaximum ( DAY_OF_MONTH ) ; if ( internalGet ( DAY_OF_MONTH ) > monthLen ) { set ( DAY_OF_MONTH , monthLen ) ; } } return ; } case WEEK_OF_YEAR : { int y = cdate . getNormalizedYear ( ) ; max = getActualMaximum ( WEEK_OF_YEAR ) ; set ( DAY_OF_WEEK , internalGet ( DAY_OF_WEEK ) ) ; int woy = internalGet ( WEEK_OF_YEAR ) ; int value = woy + amount ; if ( ! isCutoverYear ( y ) ) { // If the new value is in between min and max
// ( exclusive ) , then we can use the value .
if ( value > min && value < max ) { set ( WEEK_OF_YEAR , value ) ; return ; } long fd = getCurrentFixedDate ( ) ; // Make sure that the min week has the current DAY _ OF _ WEEK
long day1 = fd - ( 7 * ( woy - min ) ) ; if ( calsys . getYearFromFixedDate ( day1 ) != y ) { min ++ ; } // Make sure the same thing for the max week
fd += 7 * ( max - internalGet ( WEEK_OF_YEAR ) ) ; if ( calsys . getYearFromFixedDate ( fd ) != y ) { max -- ; } break ; } // Handle cutover here .
long fd = getCurrentFixedDate ( ) ; BaseCalendar cal ; if ( gregorianCutoverYear == gregorianCutoverYearJulian ) { cal = getCutoverCalendarSystem ( ) ; } else if ( y == gregorianCutoverYear ) { cal = gcal ; } else { cal = getJulianCalendarSystem ( ) ; } long day1 = fd - ( 7 * ( woy - min ) ) ; // Make sure that the min week has the current DAY _ OF _ WEEK
if ( cal . getYearFromFixedDate ( day1 ) != y ) { min ++ ; } // Make sure the same thing for the max week
fd += 7 * ( max - woy ) ; cal = ( fd >= gregorianCutoverDate ) ? gcal : getJulianCalendarSystem ( ) ; if ( cal . getYearFromFixedDate ( fd ) != y ) { max -- ; } // value : the new WEEK _ OF _ YEAR which must be converted
// to month and day of month .
value = getRolledValue ( woy , amount , min , max ) - 1 ; BaseCalendar . Date d = getCalendarDate ( day1 + value * 7 ) ; set ( MONTH , d . getMonth ( ) - 1 ) ; set ( DAY_OF_MONTH , d . getDayOfMonth ( ) ) ; return ; } case WEEK_OF_MONTH : { boolean isCutoverYear = isCutoverYear ( cdate . getNormalizedYear ( ) ) ; // dow : relative day of week from first day of week
int dow = internalGet ( DAY_OF_WEEK ) - getFirstDayOfWeek ( ) ; if ( dow < 0 ) { dow += 7 ; } long fd = getCurrentFixedDate ( ) ; long month1 ; // fixed date of the first day ( usually 1 ) of the month
int monthLength ; // actual month length
if ( isCutoverYear ) { month1 = getFixedDateMonth1 ( cdate , fd ) ; monthLength = actualMonthLength ( ) ; } else { month1 = fd - internalGet ( DAY_OF_MONTH ) + 1 ; monthLength = calsys . getMonthLength ( cdate ) ; } // the first day of week of the month .
long monthDay1st = BaseCalendar . getDayOfWeekDateOnOrBefore ( month1 + 6 , getFirstDayOfWeek ( ) ) ; // if the week has enough days to form a week , the
// week starts from the previous month .
if ( ( int ) ( monthDay1st - month1 ) >= getMinimalDaysInFirstWeek ( ) ) { monthDay1st -= 7 ; } max = getActualMaximum ( field ) ; // value : the new WEEK _ OF _ MONTH value
int value = getRolledValue ( internalGet ( field ) , amount , 1 , max ) - 1 ; // nfd : fixed date of the rolled date
long nfd = monthDay1st + value * 7 + dow ; // Unlike WEEK _ OF _ YEAR , we need to change day of week if the
// nfd is out of the month .
if ( nfd < month1 ) { nfd = month1 ; } else if ( nfd >= ( month1 + monthLength ) ) { nfd = month1 + monthLength - 1 ; } int dayOfMonth ; if ( isCutoverYear ) { // If we are in the cutover year , convert nfd to
// its calendar date and use dayOfMonth .
BaseCalendar . Date d = getCalendarDate ( nfd ) ; dayOfMonth = d . getDayOfMonth ( ) ; } else { dayOfMonth = ( int ) ( nfd - month1 ) + 1 ; } set ( DAY_OF_MONTH , dayOfMonth ) ; return ; } case DAY_OF_MONTH : { if ( ! isCutoverYear ( cdate . getNormalizedYear ( ) ) ) { max = calsys . getMonthLength ( cdate ) ; break ; } // Cutover year handling
long fd = getCurrentFixedDate ( ) ; long month1 = getFixedDateMonth1 ( cdate , fd ) ; // It may not be a regular month . Convert the date and range to
// the relative values , perform the roll , and
// convert the result back to the rolled date .
int value = getRolledValue ( ( int ) ( fd - month1 ) , amount , 0 , actualMonthLength ( ) - 1 ) ; BaseCalendar . Date d = getCalendarDate ( month1 + value ) ; assert d . getMonth ( ) - 1 == internalGet ( MONTH ) ; set ( DAY_OF_MONTH , d . getDayOfMonth ( ) ) ; return ; } case DAY_OF_YEAR : { max = getActualMaximum ( field ) ; if ( ! isCutoverYear ( cdate . getNormalizedYear ( ) ) ) { break ; } // Handle cutover here .
long fd = getCurrentFixedDate ( ) ; long jan1 = fd - internalGet ( DAY_OF_YEAR ) + 1 ; int value = getRolledValue ( ( int ) ( fd - jan1 ) + 1 , amount , min , max ) ; BaseCalendar . Date d = getCalendarDate ( jan1 + value - 1 ) ; set ( MONTH , d . getMonth ( ) - 1 ) ; set ( DAY_OF_MONTH , d . getDayOfMonth ( ) ) ; return ; } case DAY_OF_WEEK : { if ( ! isCutoverYear ( cdate . getNormalizedYear ( ) ) ) { // If the week of year is in the same year , we can
// just change DAY _ OF _ WEEK .
int weekOfYear = internalGet ( WEEK_OF_YEAR ) ; if ( weekOfYear > 1 && weekOfYear < 52 ) { set ( WEEK_OF_YEAR , weekOfYear ) ; // update stamp [ WEEK _ OF _ YEAR ]
max = SATURDAY ; break ; } } // We need to handle it in a different way around year
// boundaries and in the cutover year . Note that
// changing era and year values violates the roll
// rule : not changing larger calendar fields . . .
amount %= 7 ; if ( amount == 0 ) { return ; } long fd = getCurrentFixedDate ( ) ; long dowFirst = BaseCalendar . getDayOfWeekDateOnOrBefore ( fd , getFirstDayOfWeek ( ) ) ; fd += amount ; if ( fd < dowFirst ) { fd += 7 ; } else if ( fd >= dowFirst + 7 ) { fd -= 7 ; } BaseCalendar . Date d = getCalendarDate ( fd ) ; set ( ERA , ( d . getNormalizedYear ( ) <= 0 ? BCE : CE ) ) ; set ( d . getYear ( ) , d . getMonth ( ) - 1 , d . getDayOfMonth ( ) ) ; return ; } case DAY_OF_WEEK_IN_MONTH : { min = 1 ; // after normalized , min should be 1.
if ( ! isCutoverYear ( cdate . getNormalizedYear ( ) ) ) { int dom = internalGet ( DAY_OF_MONTH ) ; int monthLength = calsys . getMonthLength ( cdate ) ; int lastDays = monthLength % 7 ; max = monthLength / 7 ; int x = ( dom - 1 ) % 7 ; if ( x < lastDays ) { max ++ ; } set ( DAY_OF_WEEK , internalGet ( DAY_OF_WEEK ) ) ; break ; } // Cutover year handling
long fd = getCurrentFixedDate ( ) ; long month1 = getFixedDateMonth1 ( cdate , fd ) ; int monthLength = actualMonthLength ( ) ; int lastDays = monthLength % 7 ; max = monthLength / 7 ; int x = ( int ) ( fd - month1 ) % 7 ; if ( x < lastDays ) { max ++ ; } int value = getRolledValue ( internalGet ( field ) , amount , min , max ) - 1 ; fd = month1 + value * 7 + x ; BaseCalendar cal = ( fd >= gregorianCutoverDate ) ? gcal : getJulianCalendarSystem ( ) ; BaseCalendar . Date d = ( BaseCalendar . Date ) cal . newCalendarDate ( TimeZone . NO_TIMEZONE ) ; cal . getCalendarDateFromFixedDate ( d , fd ) ; set ( DAY_OF_MONTH , d . getDayOfMonth ( ) ) ; return ; } } set ( field , getRolledValue ( internalGet ( field ) , amount , min , max ) ) ; |
public class ValueDataUtil { /** * Read value data from file .
* @ param type
* property type , { @ link PropertyType }
* @ param file
* File
* @ param orderNumber
* value data order number
* @ param spoolConfig
* contains threshold for spooling
* @ return PersistedValueData
* @ throws IOException
* if any error is occurred */
public static ValueDataWrapper readValueData ( int type , int orderNumber , File file , SpoolConfig spoolConfig ) throws IOException { } } | ValueDataWrapper vdDataWrapper = new ValueDataWrapper ( ) ; long fileSize = file . length ( ) ; vdDataWrapper . size = fileSize ; if ( fileSize > spoolConfig . maxBufferSize ) { vdDataWrapper . value = new FilePersistedValueData ( orderNumber , file , spoolConfig ) ; } else { // JCR - 2463 In case the file was renamed to be removed / changed ,
// but the transaction wasn ' t rollbacked cleanly
file = fixFileName ( file ) ; FileInputStream is = new FileInputStream ( file ) ; try { byte [ ] data = new byte [ ( int ) fileSize ] ; byte [ ] buff = new byte [ ValueFileIOHelper . IOBUFFER_SIZE > fileSize ? ValueFileIOHelper . IOBUFFER_SIZE : ( int ) fileSize ] ; int rpos = 0 ; int read ; while ( ( read = is . read ( buff ) ) >= 0 ) { System . arraycopy ( buff , 0 , data , rpos , read ) ; rpos += read ; } vdDataWrapper . value = createValueData ( type , orderNumber , data ) ; } finally { is . close ( ) ; } } return vdDataWrapper ; |
public class Assembler { /** * ge succeeds if and only if value & gt ; = 0
* @ param target
* @ throws IOException */
public void ifge ( String target ) throws IOException { } } | if ( wideIndex ) { out . writeByte ( NOT_IFGE ) ; out . writeShort ( WIDEFIXOFFSET ) ; Branch branch = createBranch ( target ) ; out . writeByte ( GOTO_W ) ; out . writeInt ( branch ) ; } else { Branch branch = createBranch ( target ) ; out . writeOpCode ( IFGE ) ; out . writeShort ( branch ) ; } |
public class Dcs_qr { /** * Sparse QR factorization of an m - by - n matrix A , A = Q * R
* @ param A
* column - compressed matrix
* @ param S
* symbolic QR analysis
* @ return numeric QR factorization , null on error */
public static Dcsn cs_qr ( Dcs A , Dcss S ) { } } | double Rx [ ] , Vx [ ] , Ax [ ] , x [ ] , Beta [ ] ; int i , k , p , n , vnz , p1 , top , m2 , len , col , rnz , s [ ] , leftmost [ ] , Ap [ ] , Ai [ ] , parent [ ] , Rp [ ] , Ri [ ] , Vp [ ] , Vi [ ] , w [ ] , pinv [ ] , q [ ] ; Dcs R , V ; Dcsn N ; if ( ! Dcs_util . CS_CSC ( A ) || S == null ) return ( null ) ; n = A . n ; Ap = A . p ; Ai = A . i ; Ax = A . x ; q = S . q ; parent = S . parent ; pinv = S . pinv ; m2 = S . m2 ; vnz = S . lnz ; rnz = S . unz ; leftmost = S . leftmost ; w = new int [ m2 + n ] ; /* get int workspace */
x = new double [ m2 ] ; /* get double workspace */
N = new Dcsn ( ) ; /* allocate result */
s = w ; int s_offset = m2 ; /* s is size n */
for ( k = 0 ; k < m2 ; k ++ ) x [ k ] = 0 ; /* clear workspace x */
N . L = V = Dcs_util . cs_spalloc ( m2 , n , vnz , true , false ) ; /* allocate result V */
N . U = R = Dcs_util . cs_spalloc ( m2 , n , rnz , true , false ) ; /* allocate result R */
N . B = Beta = new double [ n ] ; /* allocate result Beta */
Rp = R . p ; Ri = R . i ; Rx = R . x ; Vp = V . p ; Vi = V . i ; Vx = V . x ; for ( i = 0 ; i < m2 ; i ++ ) w [ i ] = - 1 ; /* clear w , to mark nodes */
rnz = 0 ; vnz = 0 ; for ( k = 0 ; k < n ; k ++ ) /* compute V and R */
{ Rp [ k ] = rnz ; /* R ( : , k ) starts here */
Vp [ k ] = p1 = vnz ; /* V ( : , k ) starts here */
w [ k ] = k ; /* add V ( k , k ) to pattern of V */
Vi [ vnz ++ ] = k ; top = n ; col = q != null ? q [ k ] : k ; for ( p = Ap [ col ] ; p < Ap [ col + 1 ] ; p ++ ) /* find R ( : , k ) pattern */
{ i = leftmost [ Ai [ p ] ] ; /* i = min ( find ( A ( i , q ) ) ) */
for ( len = 0 ; w [ i ] != k ; i = parent [ i ] ) /* traverse up to k */
{ s [ s_offset + ( len ++ ) ] = i ; w [ i ] = k ; } while ( len > 0 ) s [ s_offset + ( -- top ) ] = s [ s_offset + ( -- len ) ] ; /* push path on stack */
i = pinv [ Ai [ p ] ] ; /* i = permuted row of A ( : , col ) */
x [ i ] = Ax [ p ] ; /* x ( i ) = A ( : , col ) */
if ( i > k && w [ i ] < k ) /* pattern of V ( : , k ) = x ( k + 1 : m ) */
{ Vi [ vnz ++ ] = i ; /* add i to pattern of V ( : , k ) */
w [ i ] = k ; } } for ( p = top ; p < n ; p ++ ) /* for each i in pattern of R ( : , k ) */
{ i = s [ s_offset + p ] ; /* R ( i , k ) is nonzero */
Dcs_happly . cs_happly ( V , i , Beta [ i ] , x ) ; /* apply ( V ( i ) , Beta ( i ) ) to x */
Ri [ rnz ] = i ; /* R ( i , k ) = x ( i ) */
Rx [ rnz ++ ] = x [ i ] ; x [ i ] = 0 ; if ( parent [ i ] == k ) vnz = Dcs_scatter . cs_scatter ( V , i , 0 , w , null , k , V , vnz ) ; } for ( p = p1 ; p < vnz ; p ++ ) /* gather V ( : , k ) = x */
{ Vx [ p ] = x [ Vi [ p ] ] ; x [ Vi [ p ] ] = 0 ; } Ri [ rnz ] = k ; /* R ( k , k ) = norm ( x ) */
double [ ] beta = new double [ 1 ] ; beta [ 0 ] = Beta [ k ] ; Rx [ rnz ++ ] = Dcs_house . cs_house ( Vx , p1 , beta , vnz - p1 ) ; /* [ v , beta ] = house ( x ) */
Beta [ k ] = beta [ 0 ] ; } Rp [ n ] = rnz ; /* finalize R */
Vp [ n ] = vnz ; /* finalize V */
return N ; |
public class DSLSentence { /** * Build the Values from the Definition . */
private void parseDefinition ( ) { } } | values = new ArrayList < DSLVariableValue > ( ) ; if ( getDefinition ( ) == null ) { return ; } int variableStart = definition . indexOf ( "{" ) ; while ( variableStart >= 0 ) { int variableEnd = getIndexForEndOfVariable ( definition , variableStart ) ; String variable = definition . substring ( variableStart + 1 , variableEnd ) ; values . add ( parseValue ( variable ) ) ; variableStart = definition . indexOf ( "{" , variableEnd ) ; } |
public class ApiKeyResource1 { /** * Creates an API Key . */
@ POST @ Consumes ( "application/x.json-create-api-key" ) public Response createApiKey ( CreateEmoApiKeyRequest request , @ QueryParam ( "key" ) String key , @ Authenticated Subject subject ) { } } | if ( key != null ) { request . setCustomRequestParameter ( "key" , key ) ; } CreateEmoApiKeyResponse response = _uac . createApiKey ( subject , request ) ; return Response . created ( URI . create ( response . getId ( ) ) ) . entity ( response ) . build ( ) ; |
public class FaxServiceImp { /** * ( non - Javadoc )
* @ see com . popbill . api . FaxService # sendFAX ( java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . io . File [ ] , java . util . Date , java . lang . String , java . lang . String ) */
@ Override public String sendFAX ( String corpNum , String sendNum , String receiveNum , String receiveName , File [ ] files , Date reserveDT , String userID , String title ) throws PopbillException { } } | Receiver receiver = new Receiver ( ) ; receiver . setReceiveNum ( receiveNum ) ; receiver . setReceiveName ( receiveName ) ; return requestFax ( corpNum , sendNum , null , new Receiver [ ] { receiver } , files , reserveDT , userID , false , title , null ) ; |
public class AbstractSamlProfileHandlerController { /** * Gets registered service and verify .
* @ param serviceId the service id
* @ return the registered service and verify */
protected SamlRegisteredService verifySamlRegisteredService ( final String serviceId ) { } } | if ( StringUtils . isBlank ( serviceId ) ) { throw new UnauthorizedServiceException ( UnauthorizedServiceException . CODE_UNAUTHZ_SERVICE , "Could not verify/locate SAML registered service since no serviceId is provided" ) ; } LOGGER . debug ( "Checking service access in CAS service registry for [{}]" , serviceId ) ; val registeredService = samlProfileHandlerConfigurationContext . getServicesManager ( ) . findServiceBy ( samlProfileHandlerConfigurationContext . getWebApplicationServiceFactory ( ) . createService ( serviceId ) ) ; if ( registeredService == null || ! registeredService . getAccessStrategy ( ) . isServiceAccessAllowed ( ) ) { LOGGER . warn ( "[{}] is not found in the registry or service access is denied. Ensure service is registered in service registry" , serviceId ) ; throw new UnauthorizedServiceException ( UnauthorizedServiceException . CODE_UNAUTHZ_SERVICE ) ; } if ( registeredService instanceof SamlRegisteredService ) { val samlRegisteredService = ( SamlRegisteredService ) registeredService ; LOGGER . debug ( "Located SAML service in the registry as [{}] with the metadata location of [{}]" , samlRegisteredService . getServiceId ( ) , samlRegisteredService . getMetadataLocation ( ) ) ; return samlRegisteredService ; } LOGGER . error ( "CAS has found a match for service [{}] in registry but the match is not defined as a SAML service" , serviceId ) ; throw new UnauthorizedServiceException ( UnauthorizedServiceException . CODE_UNAUTHZ_SERVICE ) ; |
public class DefaultJsonMapper { /** * Extracts JSON data for a field according to its { @ code Facebook } annotation and returns it converted to the proper
* Java type .
* @ param fieldWithAnnotation
* The field / annotation pair which specifies what Java type to convert to .
* @ param jsonObject
* " Raw " JSON object to pull data from .
* @ param facebookFieldName
* Specifies what JSON field to pull " raw " data from .
* @ return A
* @ throws ParseException
* If an error occurs while mapping JSON to Java .
* @ throws FacebookJsonMappingException
* If an error occurs while mapping JSON to Java . */
protected Object toJavaType ( FieldWithAnnotation < Facebook > fieldWithAnnotation , JsonObject jsonObject , String facebookFieldName ) { } } | Class < ? > type = fieldWithAnnotation . getField ( ) . getType ( ) ; JsonValue rawValue = jsonObject . get ( facebookFieldName ) ; // Short - circuit right off the bat if we ' ve got a null value .
if ( rawValue . isNull ( ) ) { return null ; } if ( String . class . equals ( type ) ) { /* * Special handling here for better error checking .
* Since { @ code JsonObject . getString ( ) } will return literal JSON text even if it ' s _ not _ a JSON string , we check
* the marshaled type and bail if needed . For example , calling { @ code JsonObject . getString ( " results " ) } on the
* below JSON . . .
* < code > { " results " : [ { " name " : " Mark Allen " } ] } < / code >
* . . . would return the string { @ code " [ { " name " : " Mark Allen " } ] " } instead of throwing an error . So we throw the
* error ourselves .
* Per Antonello Naccarato , sometimes FB will return an empty JSON array instead of an empty string . Look for that
* here . */
if ( rawValue . isArray ( ) && rawValue . asArray ( ) . isEmpty ( ) ) { MAPPER_LOGGER . trace ( "Coercing an empty JSON array to an empty string for {}" , fieldWithAnnotation ) ; return "" ; } /* * If the user wants a string , _ always _ give her a string .
* This is useful if , for example , you ' ve got a @ Facebook - annotated string field that you ' d like to have a numeric
* type shoved into .
* User beware : this will turn * anything * into a string , which might lead to results you don ' t expect . */
return jsonHelper . getStringFrom ( rawValue ) ; } if ( Integer . class . equals ( type ) || Integer . TYPE . equals ( type ) ) { return jsonHelper . getIntegerFrom ( rawValue ) ; } if ( Boolean . class . equals ( type ) || Boolean . TYPE . equals ( type ) ) { return jsonHelper . getBooleanFrom ( rawValue ) ; } if ( Long . class . equals ( type ) || Long . TYPE . equals ( type ) ) { return jsonHelper . getLongFrom ( rawValue ) ; } if ( Double . class . equals ( type ) || Double . TYPE . equals ( type ) ) { return jsonHelper . getDoubleFrom ( rawValue ) ; } if ( Float . class . equals ( type ) || Float . TYPE . equals ( type ) ) { return jsonHelper . getFloatFrom ( rawValue ) ; } if ( BigInteger . class . equals ( type ) ) { return jsonHelper . getBigIntegerFrom ( rawValue ) ; } if ( BigDecimal . class . equals ( type ) ) { return jsonHelper . getBigDecimalFrom ( rawValue ) ; } if ( List . class . equals ( type ) ) { return toJavaList ( rawValue . toString ( ) , getFirstParameterizedTypeArgument ( fieldWithAnnotation . getField ( ) ) ) ; } if ( Map . class . equals ( type ) ) { return convertJsonObjectToMap ( rawValue . toString ( ) , fieldWithAnnotation . getField ( ) ) ; } if ( type . isEnum ( ) ) { Class < ? extends Enum > enumType = type . asSubclass ( Enum . class ) ; try { return Enum . valueOf ( enumType , rawValue . asString ( ) ) ; } catch ( IllegalArgumentException iae ) { MAPPER_LOGGER . debug ( "Cannot map string {} to enum {}, try fallback toUpperString next..." , rawValue . asString ( ) , enumType . getName ( ) ) ; } try { return Enum . valueOf ( enumType , rawValue . asString ( ) . toUpperCase ( ) ) ; } catch ( IllegalArgumentException iae ) { MAPPER_LOGGER . debug ( "Mapping string {} to enum {} not possible" , rawValue . asString ( ) , enumType . getName ( ) ) ; } } if ( Date . class . equals ( type ) ) { return DateUtils . toDateFromLongFormat ( jsonHelper . getStringFrom ( rawValue ) ) ; } String rawValueAsString = jsonHelper . getStringFrom ( rawValue ) ; // Hack for issue # 76 where FB will sometimes return a Post ' s Comments as
// " [ ] " instead of an object type ( wtf ) F
if ( Comments . class . isAssignableFrom ( type ) && rawValue instanceof JsonArray ) { MAPPER_LOGGER . debug ( "Encountered comment array '{}' but expected a {} object instead. Working around that by coercing " + "into an empty {} instance..." , rawValueAsString , Comments . class . getSimpleName ( ) , Comments . class . getSimpleName ( ) ) ; JsonObject workaroundJsonObject = new JsonObject ( ) ; workaroundJsonObject . add ( "total_count" , 0 ) ; workaroundJsonObject . add ( "data" , new JsonArray ( ) ) ; rawValueAsString = workaroundJsonObject . toString ( ) ; } // Some other type - recurse into it
return toJavaObject ( rawValueAsString , type ) ; |
public class HashUtils { /** * Calculate the RipeMd160 value of the SHA - 256 of an array of bytes . This is
* how a Bitcoin address is derived from public key bytes .
* @ param pubkeyBytes
* A Bitcoin public key as an array of bytes .
* @ return The Bitcoin address as an array of bytes . */
public static synchronized byte [ ] addressHash ( byte [ ] pubkeyBytes ) { } } | try { byte [ ] sha256 = MessageDigest . getInstance ( SHA256 ) . digest ( pubkeyBytes ) ; byte [ ] out = new byte [ 20 ] ; ripeMD160 . update ( sha256 , 0 , sha256 . length ) ; ripeMD160 . doFinal ( out , 0 ) ; // This also resets the hash function for
// next use
return out ; } catch ( NoSuchAlgorithmException e ) { throw new RuntimeException ( e ) ; // Cannot happen .
} |
public class BoxApiMetadata { /** * Gets a request that retrieves a metadata template schema ( scope defaults to BOX _ API _ SCOPE _ ENTERPRISE )
* @ param scope currently only global and enterprise scopes are supported
* @ param template metadata template to use
* @ return request to retrieve a metadata template schema */
public BoxRequestsMetadata . GetMetadataTemplateSchema getMetadataTemplateSchemaRequest ( String scope , String template ) { } } | BoxRequestsMetadata . GetMetadataTemplateSchema request = new BoxRequestsMetadata . GetMetadataTemplateSchema ( getMetadataTemplatesUrl ( scope , template ) , mSession ) ; return request ; |
public class RelationalOperations { /** * Returns true if polyline _ a overlaps envelope _ b . */
private static boolean polylineOverlapsEnvelope_ ( Polyline polyline_a , Envelope envelope_b , double tolerance , ProgressTracker progress_tracker ) { } } | Envelope2D env_a = new Envelope2D ( ) , env_b = new Envelope2D ( ) ; polyline_a . queryEnvelope2D ( env_a ) ; envelope_b . queryEnvelope2D ( env_b ) ; if ( envelopeInfContainsEnvelope_ ( env_a , env_b , tolerance ) || envelopeInfContainsEnvelope_ ( env_b , env_a , tolerance ) ) return false ; if ( envelopeInfContainsEnvelope_ ( env_b , env_a , tolerance ) ) return false ; if ( env_b . getHeight ( ) > tolerance && env_b . getWidth ( ) > tolerance ) return false ; // lines cannot overlap areas
if ( env_b . getHeight ( ) <= tolerance && env_b . getWidth ( ) <= tolerance ) return false ; // lines cannot overlap points
// Treat as polyline
Polyline polyline_b = new Polyline ( ) ; Point p = new Point ( ) ; envelope_b . queryCornerByVal ( 0 , p ) ; polyline_b . startPath ( p ) ; envelope_b . queryCornerByVal ( 2 , p ) ; polyline_b . lineTo ( p ) ; return linearPathOverlapsLinearPath_ ( polyline_a , polyline_b , tolerance ) ; |
public class CmsFlexCacheKey { /** * Calculates the cache key name that is used as key in
* the first level of the FlexCache . < p >
* @ param resourcename the full name of the resource including site root
* @ param online must be true for an online resource , false for offline resources
* @ return the FlexCache key name */
public static String getKeyName ( String resourcename , boolean online ) { } } | return resourcename . concat ( online ? CmsFlexCache . CACHE_ONLINESUFFIX : CmsFlexCache . CACHE_OFFLINESUFFIX ) ; |
public class TransformerFactoryImpl { /** * Process the stylesheet from a DOM tree , if the
* processor supports the " http : / / xml . org / trax / features / dom / input "
* feature .
* @ param node A DOM tree which must contain
* valid transform instructions that this processor understands .
* @ param systemID The systemID from where xsl : includes and xsl : imports
* should be resolved from .
* @ return A Templates object capable of being used for transformation purposes .
* @ throws TransformerConfigurationException */
javax . xml . transform . Templates processFromNode ( Node node , String systemID ) throws TransformerConfigurationException { } } | m_DOMsystemID = systemID ; return processFromNode ( node ) ; |
public class MethodProxy { /** * Add a proxy for a method declared in class { @ code declaringClass } .
* Each call to the method will be routed to the invocationHandler instead . */
public static void proxy ( Class < ? > declaringClass , String methodName , InvocationHandler invocationHandler ) { } } | assertInvocationHandlerNotNull ( invocationHandler ) ; if ( declaringClass == null ) { throw new IllegalArgumentException ( "declaringClass cannot be null" ) ; } if ( methodName == null || methodName . length ( ) == 0 ) { throw new IllegalArgumentException ( "methodName cannot be empty" ) ; } Method [ ] methods = Whitebox . getMethods ( declaringClass , methodName ) ; if ( methods . length == 0 ) { throw new MethodNotFoundException ( String . format ( "Couldn't find a method with name %s in the class hierarchy of %s" , methodName , declaringClass . getName ( ) ) ) ; } else if ( methods . length > 1 ) { throw new TooManyMethodsFoundException ( String . format ( "Found %d methods with name %s in the class hierarchy of %s." , methods . length , methodName , declaringClass . getName ( ) ) ) ; } MockRepository . putMethodProxy ( methods [ 0 ] , invocationHandler ) ; |
public class JsBusImpl { /** * ( non - Javadoc )
* @ see com . ibm . ws . sib . admin . JsBus # getSIBDestination ( java . lang . String ,
* java . lang . String , com . ibm . ws . sib . admin . DestinationDefinition ) */
public void getSIBDestination ( String busName , String name , DestinationDefinition dd ) throws SIBExceptionBase , SIBExceptionDestinationNotFound { } } | getDestinationCache ( ) . getSIBDestination ( busName , name , dd ) ; |
public class CPDefinitionInventoryLocalServiceUtil { /** * Adds the cp definition inventory to the database . Also notifies the appropriate model listeners .
* @ param cpDefinitionInventory the cp definition inventory
* @ return the cp definition inventory that was added */
public static com . liferay . commerce . model . CPDefinitionInventory addCPDefinitionInventory ( com . liferay . commerce . model . CPDefinitionInventory cpDefinitionInventory ) { } } | return getService ( ) . addCPDefinitionInventory ( cpDefinitionInventory ) ; |
public class ReadBuffer { /** * Converts a variable amount of bytes from the read buffer to an unsigned int .
* The first bit is for continuation info , the other seven bits are for data .
* @ return the int value . */
public int readUnsignedInt ( ) { } } | int variableByteDecode = 0 ; byte variableByteShift = 0 ; // check if the continuation bit is set
while ( ( this . bufferData [ this . bufferPosition ] & 0x80 ) != 0 ) { variableByteDecode |= ( this . bufferData [ this . bufferPosition ++ ] & 0x7f ) << variableByteShift ; variableByteShift += 7 ; } // read the seven data bits from the last byte
return variableByteDecode | ( this . bufferData [ this . bufferPosition ++ ] << variableByteShift ) ; |
public class XmlJobDefParser { /** * Will import all JobDef from an XML file . Creates and commits a transaction .
* @ param path
* full or relative path to the deployment descriptor to read .
* @ param cnx
* a database connection to use with no active transaction .
* @ param overrideJarBasePath
* ignore the base path of the jar in the deployment descriptor and use this one . It must be relative to to repository root .
* @ throws JqmEngineException */
static void parse ( String path , DbConn cnx , String overrideJarBasePath ) throws JqmXmlException { } } | // Argument checks
jqmlogger . trace ( path ) ; if ( path == null || path . isEmpty ( ) ) { throw new IllegalArgumentException ( "XML file path cannot be empty" ) ; } if ( cnx == null ) { throw new IllegalArgumentException ( "Database connection cannot be null" ) ; } File f = new File ( path ) ; if ( f == null || ! f . isFile ( ) || ! f . canRead ( ) ) { throw new IllegalArgumentException ( "The XML file " + f + " was not found or cannot be read." ) ; } // Create parsers
DocumentBuilderFactory dbFactory = DocumentBuilderFactory . newInstance ( ) ; dbFactory . setNamespaceAware ( true ) ; DocumentBuilder dBuilder ; // Result fields
Map < String , Integer > createdQueues = new HashMap < > ( ) ; JobDef jd = null ; Integer queueId = null ; try { dBuilder = dbFactory . newDocumentBuilder ( ) ; Document doc = dBuilder . parse ( f ) ; // Schema validation
SchemaFactory factory = SchemaFactory . newInstance ( XMLConstants . W3C_XML_SCHEMA_NS_URI ) ; Schema schema = factory . newSchema ( XmlJobDefParser . class . getClassLoader ( ) . getResource ( "res.xsd" ) ) ; Validator validator = schema . newValidator ( ) ; validator . validate ( new DOMSource ( doc ) ) ; doc . getDocumentElement ( ) . normalize ( ) ; // First parse CLs
NodeList clList = doc . getElementsByTagName ( "context" ) ; for ( int clIndex = 0 ; clIndex < clList . getLength ( ) ; clIndex ++ ) { Cl cl ; Node clNode = clList . item ( clIndex ) ; if ( clNode . getNodeType ( ) != Node . ELEMENT_NODE ) { continue ; } Element clElement = ( Element ) clNode ; String clName = clElement . getElementsByTagName ( "name" ) . item ( 0 ) . getTextContent ( ) . trim ( ) ; try { cl = Cl . select_key ( cnx , clName ) ; // Remove all handlers - we will recreate them .
cnx . runUpdate ( "clehprm_delete_all_for_cl" , cl . getId ( ) ) ; cnx . runUpdate ( "cleh_delete_all_for_cl" , cl . getId ( ) ) ; } catch ( NoResultException e ) { Cl . create ( cnx , clName , false , null , false , true , null ) ; cl = Cl . select_key ( cnx , clName ) ; } // Basic attributes ( with defaults )
if ( clElement . getElementsByTagName ( "childFirst" ) . getLength ( ) > 0 ) { cl . setChildFirst ( Boolean . parseBoolean ( clElement . getElementsByTagName ( "childFirst" ) . item ( 0 ) . getTextContent ( ) ) ) ; } else { cl . setChildFirst ( false ) ; } if ( clElement . getElementsByTagName ( "tracingEnabled" ) . getLength ( ) > 0 ) { cl . setTracingEnabled ( Boolean . parseBoolean ( clElement . getElementsByTagName ( "tracingEnabled" ) . item ( 0 ) . getTextContent ( ) ) ) ; } else { cl . setTracingEnabled ( false ) ; } if ( clElement . getElementsByTagName ( "persistent" ) . getLength ( ) > 0 ) { cl . setPersistent ( Boolean . parseBoolean ( clElement . getElementsByTagName ( "persistent" ) . item ( 0 ) . getTextContent ( ) ) ) ; } else { cl . setPersistent ( true ) ; } if ( clElement . getElementsByTagName ( "hiddenJavaClasses" ) . getLength ( ) > 0 ) { cl . setHiddenClasses ( clElement . getElementsByTagName ( "hiddenJavaClasses" ) . item ( 0 ) . getTextContent ( ) . trim ( ) ) ; } else { cl . setHiddenClasses ( null ) ; } if ( clElement . getElementsByTagName ( "runners" ) . getLength ( ) > 0 ) { cl . setAllowedRunners ( clElement . getElementsByTagName ( "runners" ) . item ( 0 ) . getTextContent ( ) . trim ( ) ) ; } else { cl . setAllowedRunners ( null ) ; } cl . update ( cnx ) ; if ( clElement . getElementsByTagName ( "eventHandlers" ) . getLength ( ) > 0 ) { NodeList handlersList = ( ( Element ) clElement . getElementsByTagName ( "eventHandlers" ) . item ( 0 ) ) . getElementsByTagName ( "handler" ) ; for ( int j = 0 ; j < handlersList . getLength ( ) ; j ++ ) { Element hElement = ( Element ) handlersList . item ( j ) ; Map < String , String > handlerPrms = new HashMap < > ( ) ; if ( hElement . getElementsByTagName ( "parameters" ) . getLength ( ) > 0 ) { NodeList prmList = ( ( Element ) hElement . getElementsByTagName ( "parameters" ) . item ( 0 ) ) . getElementsByTagName ( "parameter" ) ; for ( int k = 0 ; k < prmList . getLength ( ) ; k ++ ) { Element prmElement = ( Element ) prmList . item ( k ) ; handlerPrms . put ( prmElement . getElementsByTagName ( "key" ) . item ( 0 ) . getTextContent ( ) , prmElement . getElementsByTagName ( "value" ) . item ( 0 ) . getTextContent ( ) ) ; } } ClHandler . create ( cnx , ClEvent . JI_STARTING , hElement . getElementsByTagName ( "className" ) . item ( 0 ) . getTextContent ( ) . trim ( ) , cl . getId ( ) , handlerPrms ) ; } } } // Second parse jars
NodeList jarList = doc . getElementsByTagName ( "jar" ) ; for ( int jarIndex = 0 ; jarIndex < jarList . getLength ( ) ; jarIndex ++ ) { Node jarNode = jarList . item ( jarIndex ) ; if ( jarNode . getNodeType ( ) != Node . ELEMENT_NODE ) { continue ; } Element jarElement = ( Element ) jarNode ; NodeList jdList = jarElement . getElementsByTagName ( "jobDefinition" ) ; // Potentially remap jar path
String jarPath = jarElement . getElementsByTagName ( "path" ) . item ( 0 ) . getTextContent ( ) . trim ( ) ; if ( overrideJarBasePath != null ) { String fileName = ( new File ( jarPath ) ) . getName ( ) ; jarPath = ( new File ( overrideJarBasePath , fileName ) ) . getPath ( ) ; } for ( int jdIndex = 0 ; jdIndex < jdList . getLength ( ) ; jdIndex ++ ) { Element jdElement = ( Element ) jdList . item ( jdIndex ) ; // Retrieve existing JobDef ( if exists )
String name = jdElement . getElementsByTagName ( "name" ) . item ( 0 ) . getTextContent ( ) . trim ( ) ; try { jd = JobDef . select_key ( cnx , name ) ; } catch ( NoResultException e ) { jd = new JobDef ( ) ; } // Retrieve the Queue on which to run the JobDef
Queue q = null ; try { q = jd . getQueue ( cnx ) ; } catch ( NoResultException e ) { // Nothing .
} if ( q == null && jdElement . getElementsByTagName ( "queue" ) . getLength ( ) != 0 ) { // Specified inside the XML , nothing yet in DB . Does the queue already exist ?
String qname = jdElement . getElementsByTagName ( "queue" ) . item ( 0 ) . getTextContent ( ) . trim ( ) ; try { queueId = Queue . select_key ( cnx , qname ) . getId ( ) ; } catch ( NoResultException e ) { // The queue must be created .
if ( createdQueues . containsKey ( qname ) ) { queueId = createdQueues . get ( qname ) ; } else { queueId = Queue . create ( cnx , qname , "Created from a jobdef import. Description should be set later" , false ) ; createdQueues . put ( qname , queueId ) ; } } jd . setQueue ( queueId ) ; } else if ( q == null ) { // Not specified ( and no queue specified inside DB ) = > default queue
queueId = cnx . runSelectSingle ( "q_select_default" , Integer . class ) ; jd . setQueue ( queueId ) ; } // Simple jar attributes
jd . setJarPath ( jarPath ) ; jd . setPathType ( PathType . valueOf ( jarElement . getElementsByTagName ( "pathType" ) . getLength ( ) > 0 ? jarElement . getElementsByTagName ( "pathType" ) . item ( 0 ) . getTextContent ( ) . trim ( ) : "FS" ) ) ; // Simple JD attributes
jd . setCanBeRestarted ( "true" . equals ( jdElement . getElementsByTagName ( "canBeRestarted" ) . item ( 0 ) . getTextContent ( ) . trim ( ) ) ? true : false ) ; jd . setJavaClassName ( jdElement . getElementsByTagName ( "javaClassName" ) . item ( 0 ) . getTextContent ( ) . trim ( ) ) ; jd . setDescription ( jdElement . getElementsByTagName ( "description" ) . item ( 0 ) . getTextContent ( ) ) ; jd . setApplicationName ( name ) ; jd . setModule ( jdElement . getElementsByTagName ( "module" ) . item ( 0 ) . getTextContent ( ) ) ; jd . setHighlander ( "true" . equals ( jdElement . getElementsByTagName ( "highlander" ) . item ( 0 ) . getTextContent ( ) . trim ( ) ) ? true : false ) ; // Classifier
if ( jdElement . getElementsByTagName ( "application" ) . getLength ( ) > 0 ) { jd . setApplication ( jdElement . getElementsByTagName ( "application" ) . item ( 0 ) . getTextContent ( ) ) ; } else { jd . setApplication ( null ) ; } // Keyword used to be called " other " . We allow both for ascending compatibility . ( " other " is deprecated - don ' t use )
if ( jdElement . getElementsByTagName ( "other1" ) . getLength ( ) > 0 ) { jd . setKeyword1 ( jdElement . getElementsByTagName ( "other1" ) . item ( 0 ) . getTextContent ( ) ) ; } else { jd . setKeyword1 ( null ) ; } if ( jdElement . getElementsByTagName ( "keyword1" ) . getLength ( ) > 0 ) { jd . setKeyword1 ( jdElement . getElementsByTagName ( "keyword1" ) . item ( 0 ) . getTextContent ( ) ) ; } else { jd . setKeyword1 ( null ) ; } if ( jdElement . getElementsByTagName ( "other2" ) . getLength ( ) > 0 ) { jd . setKeyword2 ( jdElement . getElementsByTagName ( "other2" ) . item ( 0 ) . getTextContent ( ) ) ; } else { jd . setKeyword2 ( null ) ; } if ( jdElement . getElementsByTagName ( "keyword2" ) . getLength ( ) > 0 ) { jd . setKeyword2 ( jdElement . getElementsByTagName ( "keyword2" ) . item ( 0 ) . getTextContent ( ) ) ; } else { jd . setKeyword2 ( null ) ; } if ( jdElement . getElementsByTagName ( "other3" ) . getLength ( ) > 0 ) { jd . setKeyword3 ( jdElement . getElementsByTagName ( "other3" ) . item ( 0 ) . getTextContent ( ) ) ; } else { jd . setKeyword3 ( null ) ; } if ( jdElement . getElementsByTagName ( "keyword3" ) . getLength ( ) > 0 ) { jd . setKeyword3 ( jdElement . getElementsByTagName ( "keyword3" ) . item ( 0 ) . getTextContent ( ) ) ; } else { jd . setKeyword3 ( null ) ; } // Class loading
if ( jdElement . getElementsByTagName ( "executionContext" ) . getLength ( ) > 0 ) { String clName = jdElement . getElementsByTagName ( "executionContext" ) . item ( 0 ) . getTextContent ( ) ; try { jd . setClassLoader ( Cl . select_key ( cnx , clName ) . getId ( ) ) ; } catch ( NoResultException e ) { jqmlogger . error ( "Incorrect deployment descriptor: a job definition is using undefined context " + clName ) ; } } else { jd . setClassLoader ( null ) ; } // Alert time
if ( jdElement . getElementsByTagName ( "reasonableRuntimeLimitMinute" ) . getLength ( ) > 0 ) { jd . setMaxTimeRunning ( Integer . parseInt ( jdElement . getElementsByTagName ( "reasonableRuntimeLimitMinute" ) . item ( 0 ) . getTextContent ( ) ) ) ; } else { jd . setMaxTimeRunning ( null ) ; } // Parameters
Map < String , String > parameters = new HashMap < > ( ) ; NodeList prmList = jdElement . getElementsByTagName ( "parameter" ) ; for ( int prmIndex = 0 ; prmIndex < prmList . getLength ( ) ; prmIndex ++ ) { Element prmElement = ( Element ) prmList . item ( prmIndex ) ; parameters . put ( prmElement . getElementsByTagName ( "key" ) . item ( 0 ) . getTextContent ( ) , prmElement . getElementsByTagName ( "value" ) . item ( 0 ) . getTextContent ( ) ) ; } jd . update ( cnx , parameters ) ; jqmlogger . info ( "Imported application " + jd . getApplicationName ( ) ) ; } } cnx . commit ( ) ; } catch ( Exception e ) { throw new JqmXmlException ( "an error occured while parsing the XML file " + path + ". No changes were done to the configuration." , e ) ; } |
public class CmsGalleriesTab { /** * Adds more gallery list items to display in the tab , if available . < p > */
protected void loadMoreItems ( ) { } } | setLoading ( true ) ; MoreItemsCommand cmd = new MoreItemsCommand ( 30 ) ; Scheduler . get ( ) . scheduleFixedDelay ( cmd , 1 ) ; |
public class GrailsMessageBundleScriptCreator { /** * Returns the basenames
* @ return the base names for the bundle resources */
public String [ ] getBaseNames ( boolean warDeployed ) { } } | String [ ] names = configParam . split ( GrailsLocaleUtils . RESOURCE_BUNDLE_SEPARATOR ) ; List < String > baseNames = new ArrayList < String > ( ) ; for ( String baseName : names ) { // Read the properties files to find out the available message keys . It
// is done differently
// for run - app or run - war style of runtimes .
boolean isPluginResoucePath = GrailsLocaleUtils . isPluginResoucePath ( baseName ) ; if ( warDeployed ) { if ( isPluginResoucePath ) { baseName = WEB_INF_DIR + rsReader . getRealResourcePath ( baseName ) ; } else { baseName = PROPERTIES_DIR + baseName . substring ( baseName . lastIndexOf ( '.' ) + 1 ) ; } } else { if ( isPluginResoucePath ) { baseName = URI_ABSOLUTE_FILE_PREFIX + rsReader . getRealResourcePath ( baseName ) ; } else { baseName = URI_RELATIVE_FILE_PREFIX + baseName . replaceAll ( REGEX_DOT_CHARACTER , JawrConstant . URL_SEPARATOR ) ; } } baseNames . add ( baseName ) ; } return baseNames . toArray ( new String [ ] { } ) ; |
public class UAgentInfo { /** * Detects if the current browser is a BlackBerry Touch
* device , such as the Storm , Torch , and Bold Touch . Excludes the Playbook .
* @ return detection of a Blackberry touchscreen device */
public boolean detectBlackBerryTouch ( ) { } } | if ( detectBlackBerry ( ) && ( ( userAgent . indexOf ( deviceBBStorm ) != - 1 ) || ( userAgent . indexOf ( deviceBBTorch ) != - 1 ) || ( userAgent . indexOf ( deviceBBBoldTouch ) != - 1 ) || ( userAgent . indexOf ( deviceBBCurveTouch ) != - 1 ) ) ) { return true ; } return false ; |
public class Config { /** * Retrieve an array of < tt > InetAddress < / tt > created from a property
* value containing a < tt > delim < / tt > separated list of host names and / or
* ip addresses . */
public static InetAddress [ ] getInetAddressArray ( Properties props , String key , String delim , InetAddress [ ] def ) { } } | String p = props . getProperty ( key ) ; if ( p != null ) { StringTokenizer tok = new StringTokenizer ( p , delim ) ; int len = tok . countTokens ( ) ; InetAddress [ ] arr = new InetAddress [ len ] ; for ( int i = 0 ; i < len ; i ++ ) { String addr = tok . nextToken ( ) ; try { arr [ i ] = InetAddress . getByName ( addr ) ; } catch ( UnknownHostException uhe ) { log . error ( "Unknown host " + addr , uhe ) ; return def ; } } return arr ; } return def ; |
public class TransactionableResourceManager { /** * Add session to the transaction group .
* @ param userSession
* SessionImpl , user Session
* @ throws SystemException
* @ throws RollbackException
* @ throws IllegalStateException */
private void add ( SessionImpl session , PlainChangesLog changes ) throws SystemException , IllegalStateException , RollbackException { } } | Transaction tx = tm . getTransaction ( ) ; if ( tx == null ) { // No active tx so there is no need to register the session
return ; } // Get the current TransactionContext
TransactionContext ctx = getOrCreateTransactionContext ( ) ; // Register the tx if it has not been done already
ctx . registerTransaction ( tx ) ; // Register the given changes
ctx . add ( session , changes ) ; |
public class CLI { /** * Create the parameters available for evaluation . */
private void loadTokevalParameters ( ) { } } | this . tokevalParser . addArgument ( "-l" , "--language" ) . required ( true ) . choices ( "ca" , "de" , "en" , "es" , "eu" , "fr" , "it" ) . help ( "Choose language.\n" ) ; this . tokevalParser . addArgument ( "-t" , "--testset" ) . required ( true ) . help ( "The test or reference corpus.\n" ) ; |
public class JspFunctions { /** * Utility method used to build a query parameter map which includes the list of parameters needed to change the
* direction of a sort related to the given sort expression . This method uses a { @ link DataGridURLBuilder }
* instance to call its { @ link DataGridURLBuilder # buildSortQueryParamsMap ( String ) } method from JSP EL .
* @ param urlBuilder the data grid URL builder associated with a
* { @ link org . apache . beehive . netui . databinding . datagrid . api . DataGridState } object that is used to
* build lists of query parameters
* @ param sortExpression the sort expression whose direction to change
* @ return a { @ link Map } of key / value pairs for query parameters
* @ netui : jspfunction name = " buildQueryParamsMapForSortExpression "
* signature = " java . util . Map buildQueryParamsMapForSortExpression ( org . apache . beehive . netui . databinding . datagrid . api . DataGridURLBuilder , java . lang . String ) " */
public static Map buildQueryParamsMapForSortExpression ( DataGridURLBuilder urlBuilder , String sortExpression ) { } } | if ( urlBuilder == null || sortExpression == null ) return null ; return urlBuilder . buildSortQueryParamsMap ( sortExpression ) ; |
public class DiskAccessOneFile { /** * Get the class of a given object . */
@ Override public long getObjectClass ( long oid ) { } } | FilePos oie = oidIndex . findOid ( oid ) ; if ( oie == null ) { throw DBLogger . newObjectNotFoundException ( "OID not found: " + Util . oidToString ( oid ) ) ; } try { fileInAP . seekPage ( PAGE_TYPE . DATA , oie . getPage ( ) , oie . getOffs ( ) ) ; return DataDeSerializerNoClass . getClassOid ( fileInAP ) ; } catch ( Exception e ) { throw DBLogger . newObjectNotFoundException ( "ERROR reading object: " + Util . oidToString ( oid ) ) ; } |
public class JSDocInfo { /** * Adds an implemented interface . Returns whether the interface was added . If
* the interface was already present in the list , it won ' t get added again . */
boolean addImplementedInterface ( JSTypeExpression interfaceName ) { } } | lazyInitInfo ( ) ; if ( info . implementedInterfaces == null ) { info . implementedInterfaces = new ArrayList < > ( 2 ) ; } if ( info . implementedInterfaces . contains ( interfaceName ) ) { return false ; } info . implementedInterfaces . add ( interfaceName ) ; return true ; |
public class Shape { /** * Returns the order given the shape information
* @ param buffer the buffer
* @ return */
public static char order ( DataBuffer buffer ) { } } | int length = Shape . shapeInfoLength ( Shape . rank ( buffer ) ) ; return ( char ) buffer . getInt ( length - 1 ) ; |
public class GraphHelper { /** * Remove the specified edge from the graph .
* @ param edge */
public void removeEdge ( AtlasEdge edge ) { } } | String edgeString = null ; if ( LOG . isDebugEnabled ( ) ) { edgeString = string ( edge ) ; LOG . debug ( "Removing {}" , edgeString ) ; } graph . removeEdge ( edge ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . info ( "Removed {}" , edgeString ) ; } |
public class IntTuples { /** * Computes the minimum value that occurs in the given tuple ,
* or < code > Integer . MAX _ VALUE < / code > if the given tuple has a
* size of 0.
* @ param t The input tuple
* @ return The minimum value */
public static int min ( IntTuple t ) { } } | return IntTupleFunctions . reduce ( t , Integer . MAX_VALUE , Math :: min ) ; |
public class JavacParser { /** * AnnotationsOpt = { ' @ ' Annotation }
* @ param kind Whether to parse an ANNOTATION or TYPE _ ANNOTATION */
List < JCAnnotation > annotationsOpt ( Tag kind ) { } } | if ( token . kind != MONKEYS_AT ) return List . nil ( ) ; // optimization
ListBuffer < JCAnnotation > buf = new ListBuffer < JCAnnotation > ( ) ; int prevmode = mode ; while ( token . kind == MONKEYS_AT ) { int pos = token . pos ; nextToken ( ) ; buf . append ( annotation ( pos , kind ) ) ; } lastmode = mode ; mode = prevmode ; List < JCAnnotation > annotations = buf . toList ( ) ; if ( debugJSR308 && kind == Tag . TYPE_ANNOTATION ) System . out . println ( "TA: parsing " + annotations + " in " + log . currentSourceFile ( ) ) ; return annotations ; |
public class Exceptions { /** * Returns the same throwing bi - consumer .
* @ param consumer the bi - consumer
* @ param < T > the first input type
* @ param < U > the second input type
* @ param < E > the exception type
* @ return the bi - consumer */
public static < T , U , E extends Throwable > @ NonNull BiConsumer < T , U > rethrowBiConsumer ( final @ NonNull ThrowingBiConsumer < T , U , E > consumer ) { } } | return consumer ; |
public class BlockInfo { /** * Return a specific envelope in the block by it ' s index .
* @ param envelopeIndex the index into list .
* @ return envelopeIndex the index
* @ throws InvalidProtocolBufferException */
public EnvelopeInfo getEnvelopeInfo ( int envelopeIndex ) throws InvalidProtocolBufferException { } } | try { EnvelopeInfo ret ; if ( isFiltered ( ) ) { switch ( filteredBlock . getFilteredTransactions ( envelopeIndex ) . getType ( ) . getNumber ( ) ) { case Common . HeaderType . ENDORSER_TRANSACTION_VALUE : ret = new TransactionEnvelopeInfo ( this . filteredBlock . getFilteredTransactions ( envelopeIndex ) ) ; break ; default : // just assume base properties .
ret = new EnvelopeInfo ( this . filteredBlock . getFilteredTransactions ( envelopeIndex ) ) ; break ; } } else { EnvelopeDeserializer ed = EnvelopeDeserializer . newInstance ( block . getBlock ( ) . getData ( ) . getData ( envelopeIndex ) , block . getTransActionsMetaData ( ) [ envelopeIndex ] ) ; switch ( ed . getType ( ) ) { case Common . HeaderType . ENDORSER_TRANSACTION_VALUE : ret = new TransactionEnvelopeInfo ( ( EndorserTransactionEnvDeserializer ) ed ) ; break ; default : // just assume base properties .
ret = new EnvelopeInfo ( ed ) ; break ; } } return ret ; } catch ( InvalidProtocolBufferRuntimeException e ) { throw e . getCause ( ) ; } |
public class ViewSet { /** * Creates a dynamic view , where the scope is the specified software system . The following
* elements can be added to the resulting view :
* < ul >
* < li > People < / li >
* < li > Software systems < / li >
* < li > Containers that reside inside the specified software system < / li >
* < / ul >
* @ param softwareSystem the SoftwareSystem object representing the scope of the view
* @ param key the key for the view ( must be unique )
* @ param description a description of the view
* @ return a DynamicView object
* @ throws IllegalArgumentException if the software system is null or the key is not unique */
public DynamicView createDynamicView ( SoftwareSystem softwareSystem , String key , String description ) { } } | assertThatTheSoftwareSystemIsNotNull ( softwareSystem ) ; assertThatTheViewKeyIsSpecifiedAndUnique ( key ) ; DynamicView view = new DynamicView ( softwareSystem , key , description ) ; view . setViewSet ( this ) ; dynamicViews . add ( view ) ; return view ; |
public class Commands { /** * Connects to the database defined in the specified properties file .
* @ param line Command line
* @ param callback Callback for command status
* @ throws Exception on error */
public void properties ( String line , DispatchCallback callback ) throws Exception { } } | String example = "" ; example += "Usage: properties <properties file>" + SqlLine . getSeparator ( ) ; String [ ] parts = sqlLine . split ( line ) ; if ( parts . length < 2 ) { callback . setToFailure ( ) ; sqlLine . error ( example ) ; return ; } int successes = 0 ; for ( int i = 1 ; i < parts . length ; i ++ ) { Properties props = new Properties ( ) ; props . load ( new FileInputStream ( parts [ i ] ) ) ; connect ( props , callback ) ; if ( callback . isSuccess ( ) ) { successes ++ ; String nickname = getProperty ( props , "nickname" , "ConnectionNickname" ) ; if ( nickname != null ) { sqlLine . getDatabaseConnection ( ) . setNickname ( nickname ) ; } } } if ( successes != parts . length - 1 ) { callback . setToFailure ( ) ; } else { callback . setToSuccess ( ) ; } |
public class PreambleUtil { /** * Mode bits */
static void insertCurMode ( final WritableMemory wmem , final CurMode curMode ) { } } | final int curModeId = curMode . ordinal ( ) ; int mode = wmem . getByte ( MODE_BYTE ) & ~ CUR_MODE_MASK ; // strip bits 0 , 1
mode |= ( curModeId & CUR_MODE_MASK ) ; wmem . putByte ( MODE_BYTE , ( byte ) mode ) ; |
public class Validate { /** * Validates that the argument can be converted to the specified class , if not , throws an exception .
* < p > This method is useful when validating that there will be no casting errors . < / p >
* < pre > Validate . isAssignableFrom ( SuperClass . class , object . getClass ( ) ) ; < / pre >
* < p > The message format of the exception is & quot ; Cannot assign { type } to { superType } & quot ; < / p >
* @ param superType the class the class must be validated against , not null
* @ param type the class to check , not null
* @ throws IllegalArgumentException if type argument is not assignable to the specified superType
* @ see # isAssignableFrom ( Class , Class , String , Object . . . )
* @ since 3.0 */
@ GwtIncompatible ( "incompatible method" ) public static void isAssignableFrom ( final Class < ? > superType , final Class < ? > type ) { } } | // TODO when breaking BC , consider returning type
if ( ! superType . isAssignableFrom ( type ) ) { throw new IllegalArgumentException ( StringUtils . simpleFormat ( DEFAULT_IS_ASSIGNABLE_EX_MESSAGE , type == null ? "null" : type . getName ( ) , superType . getName ( ) ) ) ; } |
public class Friend { /** * Gets the FriendGroup that contains this friend .
* @ return the FriendGroup that currently contains this Friend or null if
* this Friend is not in a FriendGroup . */
public FriendGroup getGroup ( ) { } } | final Collection < RosterGroup > groups = get ( ) . getGroups ( ) ; if ( groups . size ( ) > 0 ) { return new FriendGroup ( api , con , get ( ) . getGroups ( ) . iterator ( ) . next ( ) ) ; } return null ; |
public class SARLEclipsePlugin { /** * Create a status .
* @ param severity the severity level , see { @ link IStatus } .
* @ param code the code of the error .
* @ param cause the cause of the problem .
* @ return the status . */
public IStatus createStatus ( int severity , int code , Throwable cause ) { } } | return createStatus ( severity , code , null , cause ) ; |
public class Layout { /** * Returns the layout for a particular generation of this layout ' s type .
* @ throws FetchNoneException if generation not found */
public Layout getGeneration ( int generation ) throws FetchNoneException , FetchException { } } | try { Storage < StoredLayoutEquivalence > equivStorage = mLayoutFactory . mRepository . storageFor ( StoredLayoutEquivalence . class ) ; StoredLayoutEquivalence equiv = equivStorage . prepare ( ) ; equiv . setStorableTypeName ( getStorableTypeName ( ) ) ; equiv . setGeneration ( generation ) ; if ( equiv . tryLoad ( ) ) { generation = equiv . getMatchedGeneration ( ) ; } } catch ( RepositoryException e ) { throw e . toFetchException ( ) ; } return new Layout ( mLayoutFactory , getStoredLayoutByGeneration ( generation ) ) ; |
public class DbsUtilities { /** * Join a list of strings by comma .
* @ param items the list of strings .
* @ return the resulting string . */
public static String joinByComma ( List < String > items ) { } } | StringBuilder sb = new StringBuilder ( ) ; for ( String item : items ) { sb . append ( "," ) . append ( item ) ; } if ( sb . length ( ) == 0 ) { return "" ; } return sb . substring ( 1 ) ; |
public class StatementParameter { /** * 设置参数 .
* @ param value */
public void setObject ( Class < ? > type , Object value ) { } } | if ( type . equals ( String . class ) ) { this . setString ( ( String ) value ) ; } else if ( type . equals ( Integer . class ) || type . equals ( int . class ) ) { this . setInt ( ( Integer ) value ) ; } else if ( type . equals ( Long . class ) || type . equals ( long . class ) ) { this . setLong ( ( Long ) value ) ; } else if ( type . equals ( Float . class ) || type . equals ( float . class ) ) { this . setFloat ( ( Float ) value ) ; } else if ( type . equals ( Double . class ) || type . equals ( double . class ) ) { this . setDouble ( ( Double ) value ) ; } else if ( type . equals ( Boolean . class ) || type . equals ( boolean . class ) ) { if ( value instanceof Integer ) { int num = ( Integer ) value ; this . setBool ( num == 1 ) ; } else { this . setBool ( ( Boolean ) value ) ; } } // else if ( type . equals ( Month . class ) ) {
// if ( value instanceof String ) {
// String month = ( String ) value ;
// this . setMonth ( new Month ( month ) ) ;
// else {
// this . setMonth ( ( Month ) value ) ;
else if ( type . equals ( Date . class ) ) { this . setDate ( ( Date ) value ) ; } // else if ( type . equals ( OnlyDate . class ) ) {
// if ( value instanceof java . sql . Date ) {
// java . sql . Date date = ( java . sql . Date ) value ;
// this . setOnlyDate ( new OnlyDate ( date . getTime ( ) ) ) ;
// else {
// this . setOnlyDate ( ( OnlyDate ) value ) ;
else { throw new IllegalArgumentException ( "未知类型[" + type + "]." ) ; } |
public class BadiCalendar { /** * / * [ deutsch ]
* < p > Erzeugt ein neues Badi - Kalenderdatum . < / p >
* @ param era Bahai era
* @ param yearOfEra year of era in range 1-1083
* @ param division either { @ code BadiMonth } or { @ code BadiIntercalaryDays }
* @ param day day in range 1-19 ( 1-4/5 in case of Ayyam - i - Ha )
* @ return new instance of { @ code BadiCalendar }
* @ throws IllegalArgumentException in case of any inconsistencies */
public static BadiCalendar ofComplete ( BadiEra era , int yearOfEra , BadiDivision division , int day ) { } } | if ( era == null ) { throw new NullPointerException ( "Missing Bahai era." ) ; } else if ( ( yearOfEra < 1 ) || ( yearOfEra > 1083 ) ) { throw new IllegalArgumentException ( "Year of era out of range 1-1083: " + yearOfEra ) ; } BadiCalendar prototype = BadiCalendar . axis ( ) . getMinimum ( ) . with ( YEAR_OF_ERA , yearOfEra ) ; int kullishay = prototype . getKullishai ( ) ; int vahid = prototype . getVahid ( ) ; int yearOfVahid = prototype . getYearOfVahid ( ) ; if ( division instanceof BadiMonth ) { if ( ( day < 1 ) || ( day > 19 ) ) { throw new IllegalArgumentException ( "Day out of range 1-19: " + day ) ; } else { return new BadiCalendar ( kullishay , vahid , yearOfVahid , BadiMonth . class . cast ( division ) . getValue ( ) , day ) ; } } else if ( division == BadiIntercalaryDays . AYYAM_I_HA ) { int max = isLeapYear ( kullishay , vahid , yearOfVahid ) ? 5 : 4 ; if ( ( day < 1 ) || ( day > max ) ) { throw new IllegalArgumentException ( "Day out of range 1-" + max + ": " + day ) ; } else { return new BadiCalendar ( kullishay , vahid , yearOfVahid , 0 , day ) ; } } else if ( division == null ) { throw new NullPointerException ( "Missing Badi month or Ayyam-i-Ha." ) ; } else { throw new IllegalArgumentException ( "Invalid implementation of Badi division: " + division ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.