signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EEnum getIfcArithmeticOperatorEnum ( ) { } } | if ( ifcArithmeticOperatorEnumEEnum == null ) { ifcArithmeticOperatorEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 781 ) ; } return ifcArithmeticOperatorEnumEEnum ; |
public class FieldAnnotation { /** * Factory method . Construct from class name and BCEL Field object .
* @ param className
* the name of the class which defines the field
* @ param field
* the BCEL Field object
* @ return the FieldAnnotation */
public static FieldAnnotation fromBCELField ( @ DottedClassName String className , Field field ) { } } | return new FieldAnnotation ( className , field . getName ( ) , field . getSignature ( ) , field . isStatic ( ) ) ; |
public class AbstractImageGL { /** * Draws this image with the supplied transform in the specified target dimensions . */
void draw ( GLShader shader , InternalTransform xform , int tint , float dx , float dy , float dw , float dh ) { } } | draw ( shader , xform , tint , dx , dy , dw , dh , 0 , 0 , ( repeatX ? dw : width ( ) ) , ( repeatY ? dh : height ( ) ) ) ; |
public class AbstractObjectStore { /** * ( non - Javadoc )
* @ see com . ibm . ws . objectManager . ObjectStore # add ( com . ibm . ws . objectManager . ManagedObject , boolean , int ) */
public void add ( ManagedObject managedObject , boolean requiresCurrentCheckpoint ) throws ObjectManagerException { } } | final String methodName = "add" ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , methodName , new Object [ ] { managedObject , new Boolean ( requiresCurrentCheckpoint ) } ) ; // At recovery make sure the Token is in memory , if the Token was newly allocated it will already be in memory .
Token inMemoryToken = ( Token ) inMemoryTokens . putIfAbsent ( new Long ( managedObject . owningToken . storedObjectIdentifier ) , managedObject . owningToken ) ; if ( inMemoryToken == null ) { synchronized ( sequenceNumberLock ) { // During recovery processing we may be given a sequence number to replace an object which never completed
// an add operation in the previous run so make sure we do not reuse such a number again .
if ( managedObject . owningToken . storedObjectIdentifier > sequenceNumber ) { sequenceNumber = Math . max ( managedObject . owningToken . storedObjectIdentifier , sequenceNumber ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isDebugEnabled ( ) ) trace . debug ( this , cclass , methodName , new Object [ ] { "sequenceNumber now" , new Long ( sequenceNumber ) } ) ; } // if ( tokenToStore . storedObjectIdentifier > sequenceNumber ) .
} // synchronized ( sequenceNumberLock ) .
} else { // The inMemoryToken must be the same Token .
if ( inMemoryToken != managedObject . owningToken ) { ReplacementException replacementException = new ReplacementException ( this , managedObject , managedObject . owningToken , inMemoryToken ) ; ObjectManager . ffdc . processException ( this , cclass , methodName , replacementException , "1:473:1.31" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "add" , new Object [ ] { replacementException , managedObject , managedObject . owningToken , inMemoryToken } ) ; throw replacementException ; } // if ( inMemoryToken ! = managedObject . owningToken ) .
} // if ( inMemoryToken = = null ) .
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , methodName ) ; |
public class Matrix4f { /** * Apply a " lookat " transformation to this matrix for a right - handed coordinate system ,
* that aligns < code > - z < / code > with < code > center - eye < / code > and store the result in < code > dest < / code > .
* If < code > M < / code > is < code > this < / code > matrix and < code > L < / code > the lookat matrix ,
* then the new matrix will be < code > M * L < / code > . So when transforming a
* vector < code > v < / code > with the new matrix by using < code > M * L * v < / code > ,
* the lookat transformation will be applied first !
* In order to set the matrix to a lookat transformation without post - multiplying it ,
* use { @ link # setLookAt ( float , float , float , float , float , float , float , float , float ) setLookAt ( ) } .
* @ see # lookAt ( Vector3fc , Vector3fc , Vector3fc )
* @ see # setLookAt ( float , float , float , float , float , float , float , float , float )
* @ param eyeX
* the x - coordinate of the eye / camera location
* @ param eyeY
* the y - coordinate of the eye / camera location
* @ param eyeZ
* the z - coordinate of the eye / camera location
* @ param centerX
* the x - coordinate of the point to look at
* @ param centerY
* the y - coordinate of the point to look at
* @ param centerZ
* the z - coordinate of the point to look at
* @ param upX
* the x - coordinate of the up vector
* @ param upY
* the y - coordinate of the up vector
* @ param upZ
* the z - coordinate of the up vector
* @ param dest
* will hold the result
* @ return dest */
public Matrix4f lookAt ( float eyeX , float eyeY , float eyeZ , float centerX , float centerY , float centerZ , float upX , float upY , float upZ , Matrix4f dest ) { } } | if ( ( properties & PROPERTY_IDENTITY ) != 0 ) return dest . setLookAt ( eyeX , eyeY , eyeZ , centerX , centerY , centerZ , upX , upY , upZ ) ; else if ( ( properties & PROPERTY_PERSPECTIVE ) != 0 ) return lookAtPerspective ( eyeX , eyeY , eyeZ , centerX , centerY , centerZ , upX , upY , upZ , dest ) ; return lookAtGeneric ( eyeX , eyeY , eyeZ , centerX , centerY , centerZ , upX , upY , upZ , dest ) ; |
public class Settings { /** * Get the internal version or throw an { @ link IllegalArgumentException } if not present
* @ return The { @ link EsMajorVersion } extracted from the properties */
public EsMajorVersion getInternalVersionOrThrow ( ) { } } | String version = getProperty ( InternalConfigurationOptions . INTERNAL_ES_VERSION , null ) ; if ( version == null ) { throw new IllegalArgumentException ( "Elasticsearch version:[ " + InternalConfigurationOptions . INTERNAL_ES_VERSION + "] not present in configuration" ) ; } return EsMajorVersion . parse ( version ) ; |
public class CPAttachmentFileEntryPersistenceImpl { /** * Returns an ordered range of all the cp attachment file entries where classNameId = & # 63 ; and classPK = & # 63 ; and displayDate & lt ; & # 63 ; and status = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPAttachmentFileEntryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param classNameId the class name ID
* @ param classPK the class pk
* @ param displayDate the display date
* @ param status the status
* @ param start the lower bound of the range of cp attachment file entries
* @ param end the upper bound of the range of cp attachment file entries ( not inclusive )
* @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > )
* @ return the ordered range of matching cp attachment file entries */
@ Override public List < CPAttachmentFileEntry > findByC_C_LtD_S ( long classNameId , long classPK , Date displayDate , int status , int start , int end , OrderByComparator < CPAttachmentFileEntry > orderByComparator ) { } } | return findByC_C_LtD_S ( classNameId , classPK , displayDate , status , start , end , orderByComparator , true ) ; |
public class SimpleGapPenalty { /** * helper method to set the type given the open and extension penalties */
private void setType ( ) { } } | type = ( gop == 0 ) ? GapPenalty . Type . LINEAR : ( ( gep == 0 ) ? GapPenalty . Type . CONSTANT : GapPenalty . Type . AFFINE ) ; |
public class CompactDecimalFormat { /** * { @ inheritDoc } */
@ Override public AttributedCharacterIterator formatToCharacterIterator ( Object obj ) { } } | if ( ! ( obj instanceof Number ) ) { throw new IllegalArgumentException ( ) ; } Number number = ( Number ) obj ; Amount amount = toAmount ( number . doubleValue ( ) , null , null ) ; return super . formatToCharacterIterator ( amount . getQty ( ) , amount . getUnit ( ) ) ; |
public class Project { /** * Get Goals in this Project filtered as specified in the passed in filter .
* @ param filter Criteria to filter on . Project will be set automatically .
* If null , all Goals in the project are returned .
* @ param includeSubprojects Specifies whether to include items from sub
* project or not . This only adds open sub projects .
* @ return An Collection of Goals . */
public Collection < Goal > getGoals ( GoalFilter filter , boolean includeSubprojects ) { } } | filter = ( filter != null ) ? filter : new GoalFilter ( ) ; return getInstance ( ) . get ( ) . goals ( getFilter ( filter , includeSubprojects ) ) ; |
public class MtasDataBasic { /** * Sets the value .
* @ param newPosition the new position
* @ param valueSum the value sum
* @ param valueN the value N
* @ param currentExisting the current existing */
protected void setValue ( int newPosition , T1 valueSum , long valueN , boolean currentExisting ) { } } | if ( valueN > 0 ) { if ( currentExisting ) { newBasicValueSumList [ newPosition ] = operations . add11 ( newBasicValueSumList [ newPosition ] , valueSum ) ; newBasicValueNList [ newPosition ] += valueN ; } else { newBasicValueSumList [ newPosition ] = valueSum ; newBasicValueNList [ newPosition ] = valueN ; } } |
public class HiveMetaStoreEventHelper { /** * Drop partition */
protected static void submitSuccessfulPartitionDrop ( EventSubmitter eventSubmitter , String dbName , String tableName , List < String > partitionValues , String metastoreURI ) { } } | eventSubmitter . submit ( PARTITION_DROP + SUCCESS_POSTFIX , ImmutableMap . < String , String > builder ( ) . put ( DB_NAME , dbName ) . put ( TABLE_NAME , tableName ) . put ( "PartitionValues" , partitionValues . toString ( ) ) . put ( "metastoreURI" , metastoreURI ) . build ( ) ) ; |
public class DBFField { /** * Set Length of the field .
* This method should be called before calling setDecimalCount ( ) .
* @ param length of the field as int . */
public void setLength ( int length ) { } } | if ( length > this . type . getMaxSize ( ) || length < this . type . getMinSize ( ) ) { throw new UnsupportedOperationException ( "Length for " + this . type + " must be between " + this . type . getMinSize ( ) + " and " + this . type . getMaxSize ( ) ) ; } this . length = length ; |
public class PathfindableConfig { /** * Import the allowed movements .
* @ param node The root node ( must not be < code > null < / code > ) .
* @ return The allowed movements .
* @ throws LionEngineException If malformed movement name . */
private static Collection < MovementTile > importAllowedMovements ( Xml node ) { } } | if ( ! node . hasChild ( NODE_MOVEMENT ) ) { return Collections . emptySet ( ) ; } final Collection < MovementTile > movements = EnumSet . noneOf ( MovementTile . class ) ; for ( final Xml movementNode : node . getChildren ( NODE_MOVEMENT ) ) { try { movements . add ( MovementTile . valueOf ( movementNode . getText ( ) ) ) ; } catch ( final IllegalArgumentException exception ) { throw new LionEngineException ( exception ) ; } } return movements ; |
public class HadoopJobUtils { /** * This method is a decorator around the KillAllSpawnedHadoopJobs method .
* This method takes additional parameters to determine whether KillAllSpawnedHadoopJobs needs to
* be executed
* using doAs as a different user
* @ param logFilePath Azkaban log file path
* @ param jobProps Azkaban job props
* @ param tokenFile Pass in the tokenFile if value is known . It is ok to skip if the token file
* is in the environmental variable
* @ param log a usable logger */
public static void proxyUserKillAllSpawnedHadoopJobs ( final String logFilePath , Props jobProps , File tokenFile , final Logger log ) { } } | Properties properties = new Properties ( ) ; properties . putAll ( jobProps . getFlattened ( ) ) ; try { if ( HadoopSecureWrapperUtils . shouldProxy ( properties ) ) { UserGroupInformation proxyUser = HadoopSecureWrapperUtils . setupProxyUser ( properties , tokenFile . getAbsolutePath ( ) , log ) ; proxyUser . doAs ( new PrivilegedExceptionAction < Void > ( ) { @ Override public Void run ( ) throws Exception { HadoopJobUtils . killAllSpawnedHadoopJobs ( logFilePath , log ) ; return null ; } } ) ; } else { HadoopJobUtils . killAllSpawnedHadoopJobs ( logFilePath , log ) ; } } catch ( Throwable t ) { log . warn ( "something happened while trying to kill all spawned jobs" , t ) ; } |
public class DataUtils { /** * Return a list of all the artifact contained into the module ( an its submodules )
* @ param module
* @ return Set < Artifact > */
public static Set < Artifact > getAllArtifacts ( final Module module ) { } } | final Set < Artifact > artifacts = new HashSet < Artifact > ( ) ; artifacts . addAll ( module . getArtifacts ( ) ) ; for ( final Module submodule : module . getSubmodules ( ) ) { artifacts . addAll ( getAllArtifacts ( submodule ) ) ; } return artifacts ; |
public class SpringTemplateEngine { /** * Returns whether the SpringEL compiler should be enabled in SpringEL expressions or not .
* ( This is just a convenience method , equivalent to calling
* { @ link SpringStandardDialect # getEnableSpringELCompiler ( ) } on the dialect instance itself . It is provided
* here in order to allow users to enable the SpEL compiler without
* having to directly create instances of the { @ link SpringStandardDialect } )
* Expression compilation can significantly improve the performance of Spring EL expressions , but
* might not be adequate for every environment . Read
* < a href = " http : / / docs . spring . io / spring / docs / current / spring - framework - reference / html / expressions . html # expressions - spel - compilation " > the
* official Spring documentation < / a > for more detail .
* Also note that although Spring includes a SpEL compiler since Spring 4.1 , most expressions
* in Thymeleaf templates will only be able to properly benefit from this compilation step when at least
* Spring Framework version 4.2.4 is used .
* This flag is set to { @ code false } by default .
* @ return { @ code true } if SpEL expressions should be compiled if possible , { @ code false } if not . */
public boolean getEnableSpringELCompiler ( ) { } } | final Set < IDialect > dialects = getDialects ( ) ; for ( final IDialect dialect : dialects ) { if ( dialect instanceof SpringStandardDialect ) { return ( ( SpringStandardDialect ) dialect ) . getEnableSpringELCompiler ( ) ; } } return false ; |
public class RegexHashMap { /** * checks whether the cache or container contain a specific key , then evaluates the
* container ' s keys as regexes and checks whether they match the specific key . */
public boolean containsKey ( Object key ) { } } | // the key is a direct hit from our cache
if ( cache . containsKey ( key ) ) return true ; // the key is a direct hit from our hashmap
if ( container . containsKey ( key ) ) return true ; // check if the requested key is a matching string of a regex key from our container
Iterator < String > regexKeys = container . keySet ( ) . iterator ( ) ; while ( regexKeys . hasNext ( ) ) { if ( Pattern . matches ( regexKeys . next ( ) , ( String ) key ) ) return true ; } // if the three previous tests yield no result , the key does not exist
return false ; |
public class AdjacencyMatrix { /** * Push - relabel algorithm for maximum flow */
private void push ( double [ ] [ ] flow , double [ ] excess , int u , int v ) { } } | double send = Math . min ( excess [ u ] , graph [ u ] [ v ] - flow [ u ] [ v ] ) ; flow [ u ] [ v ] += send ; flow [ v ] [ u ] -= send ; excess [ u ] -= send ; excess [ v ] += send ; |
public class FetchStreamRequestHandler { /** * Progress info message
* @ param tag Message that precedes progress info . Indicate ' keys ' or
* ' entries ' . */
protected void progressInfoMessage ( final String tag ) { } } | if ( logger . isInfoEnabled ( ) ) { long totalTimeS = ( System . currentTimeMillis ( ) - startTimeMs ) / Time . MS_PER_SECOND ; logger . info ( tag + " : scanned " + scanned + " and fetched " + fetched + " for store '" + storageEngine . getName ( ) + "' partitionIds:" + partitionIds + " in " + totalTimeS + " s" ) ; } |
public class StringHtmlEncoder { /** * Encodes complete component by calling { @ link UIComponent # encodeAll ( FacesContext ) } . Surrounds template with given
* styleclass if template does not contains it .
* @ param component component
* @ param context { @ link FacesContext }
* @ param styleClass a div wrapper style class
* @ return the rendered string .
* @ throws IOException thrown by writer */
public static String encodeComponentWithSurroundingDivIfNecessary ( final FacesContext context , final UIComponent component , final String styleClass ) throws IOException { } } | final String encodedFacet = encodeComponent ( context , component ) ; if ( StringUtils . isNotEmpty ( styleClass ) && ! encodedFacet . contains ( styleClass ) ) { return "<div class=\"" + styleClass + "\">" + encodedFacet + "</div>" ; } return "<div>" + encodedFacet + "</div>" ; |
public class AnnotationExtensions { /** * Sets the annotation value for the given key of the given annotation to the given new value at
* runtime .
* @ param annotation
* the annotation
* @ param key
* the key
* @ param value
* the value to set
* @ return the old value or default value if not set
* @ throws NoSuchFieldException
* the no such field exception
* @ throws SecurityException
* Thrown if the security manager indicates a security violation .
* @ throws IllegalArgumentException
* the illegal argument exception
* @ throws IllegalAccessException
* the illegal access exception */
@ SuppressWarnings ( "unchecked" ) public static Object setAnnotationValue ( final Annotation annotation , final String key , final Object value ) throws NoSuchFieldException , SecurityException , IllegalArgumentException , IllegalAccessException { } } | final Object invocationHandler = Proxy . getInvocationHandler ( annotation ) ; final Field field = invocationHandler . getClass ( ) . getDeclaredField ( "memberValues" ) ; field . setAccessible ( true ) ; final Map < String , Object > memberValues = ( Map < String , Object > ) field . get ( invocationHandler ) ; final Object oldValue = memberValues . get ( key ) ; if ( oldValue == null || oldValue . getClass ( ) != value . getClass ( ) ) { throw new IllegalArgumentException ( ) ; } memberValues . put ( key , value ) ; return oldValue ; |
public class DcosSpec { /** * Check if all task of a service are correctly distributed in all datacenters of the cluster
* @ param serviceList all task deployed in the cluster separated by a semicolumn .
* @ param dataCentersIps all ips of the datacenters to be checked
* Example : ip _ 1 _ dc1 , ip _ 2 _ dc1 ; ip _ 3 _ dc2 , ip _ 4 _ dc2
* @ throws Exception */
@ Given ( "^services '(.+?)' are splitted correctly in datacenters '(.+?)'$" ) public void checkServicesDistributionMultiDataCenterPram ( String serviceList , String dataCentersIps ) throws Exception { } } | checkDataCentersDistribution ( serviceList . split ( "," ) , dataCentersIps . split ( ";" ) ) ; |
public class NativeSecp256k1 { /** * TODO add a ' compressed ' arg */
public static byte [ ] computePubkey ( byte [ ] seckey ) throws AssertFailException { } } | Preconditions . checkArgument ( seckey . length == 32 ) ; ByteBuffer byteBuff = nativeECDSABuffer . get ( ) ; if ( byteBuff == null || byteBuff . capacity ( ) < seckey . length ) { byteBuff = ByteBuffer . allocateDirect ( seckey . length ) ; byteBuff . order ( ByteOrder . nativeOrder ( ) ) ; nativeECDSABuffer . set ( byteBuff ) ; } byteBuff . rewind ( ) ; byteBuff . put ( seckey ) ; byte [ ] [ ] retByteArray ; r . lock ( ) ; try { retByteArray = secp256k1_ec_pubkey_create ( byteBuff , Secp256k1Context . getContext ( ) ) ; } finally { r . unlock ( ) ; } byte [ ] pubArr = retByteArray [ 0 ] ; int pubLen = new BigInteger ( new byte [ ] { retByteArray [ 1 ] [ 0 ] } ) . intValue ( ) ; int retVal = new BigInteger ( new byte [ ] { retByteArray [ 1 ] [ 1 ] } ) . intValue ( ) ; assertEquals ( pubArr . length , pubLen , "Got bad pubkey length." ) ; return retVal == 0 ? new byte [ 0 ] : pubArr ; |
public class DynamicRespondrSkinConfigController { /** * Display a form to manage skin choices . */
@ RenderMapping public String showConfigPage ( RenderRequest request , PortletPreferences preferences , Model model ) { } } | // Add skin names
SortedSet < String > skins = skinService . getSkinNames ( request ) ; model . addAttribute ( "skinNames" , skins ) ; // Get the list of preferences and add them to the model
Enumeration < String > preferenceNames = preferences . getNames ( ) ; while ( preferenceNames . hasMoreElements ( ) ) { String name = preferenceNames . nextElement ( ) ; if ( name . startsWith ( DynamicRespondrSkinConstants . CONFIGURABLE_PREFIX ) ) { model . addAttribute ( name , preferences . getValue ( name , "" ) ) ; } } return "jsp/DynamicRespondrSkin/skinConfig" ; |
public class SpecialMatchExpt { /** * Show results in an easily machine - readable format . */
public void dumpResults ( PrintStream out ) throws IOException { } } | PrintfFormat fmt = new PrintfFormat ( "%7.2f\t%s\t%s\n" ) ; for ( int i = 0 ; i < pairs . length ; i ++ ) { if ( pairs [ i ] != null ) { String aText = ( pairs [ i ] . getA ( ) == null ) ? "***" : pairs [ i ] . getA ( ) . unwrap ( ) ; String bText = ( pairs [ i ] . getB ( ) == null ) ? "***" : pairs [ i ] . getB ( ) . unwrap ( ) ; out . print ( fmt . sprintf ( new Object [ ] { new Double ( pairs [ i ] . getDistance ( ) ) , aText , bText } ) ) ; } } |
public class ExcelItemWriter { /** * close . */
public void flush ( ) { } } | try { workbook . write ( outputStream ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; throw new RuntimeException ( e . getMessage ( ) ) ; } |
public class ScoreMetrics { /** * Returns width of string rendered in the textType font .
* @ param textType
* one of { @ link abc . ui . scoretemplates . ScoreElements } constants
* @ param text
* The text we want to know its width
* @ return the width of a string in this font used for this score metrics */
protected int getTextFontWidth ( byte textType , String text ) { } } | if ( text == null ) return 0 ; FontMetrics fontMetrics = g2 . getFontMetrics ( getTextFont ( textType ) ) ; return fontMetrics . stringWidth ( text ) ; |
public class UnpackingSoSource { /** * Verify or refresh the state of the shared library store . */
@ Override protected void prepare ( int flags ) throws IOException { } } | SysUtil . mkdirOrThrow ( soDirectory ) ; File lockFileName = new File ( soDirectory , LOCK_FILE_NAME ) ; FileLocker lock = FileLocker . lock ( lockFileName ) ; try { Log . v ( TAG , "locked dso store " + soDirectory ) ; if ( refreshLocked ( lock , flags , getDepsBlock ( ) ) ) { lock = null ; // Lock transferred to syncer thread
} else { Log . i ( TAG , "dso store is up-to-date: " + soDirectory ) ; } } finally { if ( lock != null ) { Log . v ( TAG , "releasing dso store lock for " + soDirectory ) ; lock . close ( ) ; } else { Log . v ( TAG , "not releasing dso store lock for " + soDirectory + " (syncer thread started)" ) ; } } |
public class FactoryWaveletTransform { /** * Creates a wavelet transform for images that are of type { @ link GrayF32 } .
* @ param waveletDesc Description of the wavelet .
* @ param numLevels Number of levels in the multi - level transform .
* @ param minPixelValue Minimum pixel intensity value
* @ param maxPixelValue Maximum pixel intensity value
* @ return The transform class . */
public static WaveletTransform < GrayF32 , GrayF32 , WlCoef_F32 > create_F32 ( WaveletDescription < WlCoef_F32 > waveletDesc , int numLevels , float minPixelValue , float maxPixelValue ) { } } | return new WaveletTransformFloat32 ( waveletDesc , numLevels , minPixelValue , maxPixelValue ) ; |
public class FileUtil { /** * 创建文件及其父目录 , 如果这个文件存在 , 直接返回这个文件 < br >
* 此方法不对File对象类型做判断 , 如果File不存在 , 无法判断其类型
* @ param parent 父文件对象
* @ param path 文件路径
* @ return File
* @ throws IORuntimeException IO异常 */
public static File touch ( File parent , String path ) throws IORuntimeException { } } | return touch ( file ( parent , path ) ) ; |
public class Evaluation { /** * Evaluate a single prediction ( one prediction at a time )
* @ param predictedIdx Index of class predicted by the network
* @ param actualIdx Index of actual class */
public void eval ( int predictedIdx , int actualIdx ) { } } | // Add the number of rows to numRowCounter
numRowCounter ++ ; // If confusion is null , then Evaluation is instantiated without providing the classes
if ( confusion == null ) { throw new UnsupportedOperationException ( "Cannot evaluate single example without initializing confusion matrix first" ) ; } addToConfusion ( actualIdx , predictedIdx ) ; // If they are equal
if ( predictedIdx == actualIdx ) { // Then add 1 to True Positive
// ( For a particular label )
incrementTruePositives ( predictedIdx ) ; // And add 1 for each negative class that is accurately predicted ( True Negative )
// ( For a particular label )
for ( Integer clazz : confusion ( ) . getClasses ( ) ) { if ( clazz != predictedIdx ) trueNegatives . incrementCount ( clazz , 1.0f ) ; } } else { // Otherwise the real label is predicted as negative ( False Negative )
incrementFalseNegatives ( actualIdx ) ; // Otherwise the prediction is predicted as falsely positive ( False Positive )
incrementFalsePositives ( predictedIdx ) ; // Otherwise true negatives
for ( Integer clazz : confusion ( ) . getClasses ( ) ) { if ( clazz != predictedIdx && clazz != actualIdx ) trueNegatives . incrementCount ( clazz , 1.0f ) ; } } |
public class TypeUtils { /** * Checks if a { @ link TypeRef } is a { @ link java . util . Optional } .
* @ param type The type to check .
* @ return True if its a { @ link java . util . Optional } . */
public static boolean isOptional ( TypeRef type ) { } } | if ( ! ( type instanceof ClassRef ) ) { return false ; } return JAVA_UTIL_OPTIONAL . equals ( ( ( ClassRef ) type ) . getDefinition ( ) . getFullyQualifiedName ( ) ) ; |
public class EdgeScores { /** * Convert a Tensor object to an EdgeScores , where the wall node is indexed as position n + 1 in the Tensor . */
public static EdgeScores tensorToEdgeScores ( Tensor t ) { } } | if ( t . getDims ( ) . length != 2 ) { throw new IllegalArgumentException ( "Tensor must be an nxn matrix." ) ; } int n = t . getDims ( ) [ 1 ] ; EdgeScores es = new EdgeScores ( n , 0 ) ; for ( int p = - 1 ; p < n ; p ++ ) { for ( int c = 0 ; c < n ; c ++ ) { if ( p == c ) { continue ; } int pp = getTensorParent ( p , c ) ; es . setScore ( p , c , t . get ( pp , c ) ) ; } } return es ; |
public class JdbcRepository { /** * Disposes the resources . */
public static void dispose ( ) { } } | final JdbcTransaction jdbcTransaction = TX . get ( ) ; if ( null != jdbcTransaction && jdbcTransaction . getConnection ( ) != null ) { jdbcTransaction . dispose ( ) ; } final Connection connection = CONN . get ( ) ; if ( null != connection ) { try { connection . close ( ) ; } catch ( final SQLException e ) { throw new RuntimeException ( "Close connection failed" , e ) ; } finally { CONN . set ( null ) ; } } |
public class CmsCategoryTree { /** * Disabled the category selection . < p >
* @ param disabledReason the disable reason , will be displayed as check box title */
public void disable ( String disabledReason ) { } } | if ( m_isEnabled || ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( disabledReason ) && ! disabledReason . equals ( m_disabledReason ) ) ) { m_isEnabled = false ; m_disabledReason = disabledReason ; m_scrollList . addStyleName ( I_CmsInputLayoutBundle . INSTANCE . inputCss ( ) . disabled ( ) ) ; setListEnabled ( m_scrollList , false , disabledReason ) ; } |
public class XMLUtil { /** * Write an enumeration value .
* @ param < T > is the type of the enumeration .
* @ param document is the XML document to explore .
* @ param type is the type of the enumeration .
* @ param caseSensitive indicates of the { @ code path } ' s components are case sensitive .
* @ param value is the value to put in the document .
* @ param path is the list of and ended by the attribute ' s name .
* @ return < code > true < / code > if written , < code > false < / code > if not written . */
public static < T extends Enum < T > > boolean setAttributeEnum ( Node document , Class < T > type , boolean caseSensitive , T value , String ... path ) { } } | assert document != null : AssertMessages . notNullParameter ( 0 ) ; assert type != null : AssertMessages . notNullParameter ( 1 ) ; if ( value != null ) { final String [ ] thePath = Arrays . copyOf ( path , path . length - 1 ) ; final String attrName = path [ path . length - 1 ] ; if ( attrName != null && ! attrName . isEmpty ( ) ) { Element node = null ; if ( thePath != null && thePath . length > 0 ) { node = getElementFromPath ( document , caseSensitive , 0 , thePath ) ; } else if ( document instanceof Element ) { node = ( Element ) document ; } if ( node != null ) { node . setAttribute ( attrName , value . name ( ) ) ; return true ; } } } return false ; |
public class Parser { /** * A { @ link Parser } that returns both parsed object and matched string . */
public final Parser < WithSource < T > > withSource ( ) { } } | return new Parser < WithSource < T > > ( ) { @ Override boolean apply ( ParseContext ctxt ) { int begin = ctxt . getIndex ( ) ; if ( ! Parser . this . apply ( ctxt ) ) { return false ; } String source = ctxt . source . subSequence ( begin , ctxt . getIndex ( ) ) . toString ( ) ; @ SuppressWarnings ( "unchecked" ) WithSource < T > withSource = new WithSource < T > ( ( T ) ctxt . result , source ) ; ctxt . result = withSource ; return true ; } @ Override public String toString ( ) { return Parser . this . toString ( ) ; } } ; |
public class TicketsEntity { /** * Create a Password Change Ticket . A token with scope create : user _ tickets is needed .
* See https : / / auth0 . com / docs / api / management / v2 # ! / Tickets / post _ password _ change
* @ param passwordChangeTicket the password change ticket data to set .
* @ return a Request to execute . */
public Request < PasswordChangeTicket > requestPasswordChange ( PasswordChangeTicket passwordChangeTicket ) { } } | Asserts . assertNotNull ( passwordChangeTicket , "password change ticket" ) ; String url = baseUrl . newBuilder ( ) . addPathSegments ( "api/v2/tickets/password-change" ) . build ( ) . toString ( ) ; CustomRequest < PasswordChangeTicket > request = new CustomRequest < > ( client , url , "POST" , new TypeReference < PasswordChangeTicket > ( ) { } ) ; request . addHeader ( "Authorization" , "Bearer " + apiToken ) ; request . setBody ( passwordChangeTicket ) ; return request ; |
public class ElementPlugin { /** * Adds the specified component to the toolbar container . The component is registered to this
* container and will visible only when the container is active .
* @ param component BaseComponent to add . */
public void addToolbarComponent ( BaseComponent component ) { } } | if ( tbarContainer == null ) { tbarContainer = new ToolbarContainer ( ) ; shell . addToolbarComponent ( tbarContainer ) ; registerComponent ( tbarContainer ) ; } tbarContainer . addChild ( component ) ; |
public class Types { /** * where */
private boolean eraseNotNeeded ( Type t ) { } } | // We don ' t want to erase primitive types and String type as that
// operation is idempotent . Also , erasing these could result in loss
// of information such as constant values attached to such types .
return ( t . isPrimitive ( ) ) || ( syms . stringType . tsym == t . tsym ) ; |
public class SecurityInflowContextProviderImpl { /** * ( non - Javadoc )
* @ see com . ibm . wsspi . threadcontext . ThreadContextProvider # getDefaultThreadContext ( java . util . Map ) */
@ Override public ThreadContext createDefaultThreadContext ( Map < String , String > execProps ) { } } | String identityName = execProps . get ( WSContextService . TASK_OWNER ) ; SecurityContext defaultCtx = new SecurityContext ( ) { private static final long serialVersionUID = 1924323423421234352L ; @ Override public void setupSecurityContext ( CallbackHandler arg0 , Subject arg1 , Subject arg2 ) { // do nothing and instead use the default subjects passed in by the application server .
} } ; CredentialsService credService = ( CredentialsService ) credServiceRef . getServiceWithException ( ) ; WSSecurityService wss = ( WSSecurityService ) securityServiceRef . getServiceWithException ( ) ; UnauthenticatedSubjectService unauthSubjService = ( UnauthenticatedSubjectService ) unauthSubjectServiceRef . getServiceWithException ( ) ; AuthenticationService authService = ( AuthenticationService ) authServiceRef . getServiceWithException ( ) ; return new SecurityInflowContext ( credService , wss , unauthSubjService , authService , defaultCtx , identityName ) ; |
public class AipImageSearch { /** * 相似图检索 — 更新接口
* * * 更新图库中图片的摘要和分类信息 ( 具体变量为brief 、 tags ) * *
* @ param image - 二进制图像数据
* @ param options - 可选参数对象 , key : value都为string类型
* options - options列表 :
* brief 更新的摘要信息 , 最长256B 。 样例 : { " name " : " 周杰伦 " , " id " : " 666 " }
* tags 1 - 65535范围内的整数 , tag间以逗号分隔 , 最多2个tag 。 样例 : " 100,11 " ; 检索时可圈定分类维度进行检索
* @ return JSONObject */
public JSONObject similarUpdate ( byte [ ] image , HashMap < String , String > options ) { } } | AipRequest request = new AipRequest ( ) ; preOperation ( request ) ; String base64Content = Base64Util . encode ( image ) ; request . addBody ( "image" , base64Content ) ; if ( options != null ) { request . addBody ( options ) ; } request . setUri ( ImageSearchConsts . SIMILAR_UPDATE ) ; postOperation ( request ) ; return requestServer ( request ) ; |
public class BNFHeadersImpl { /** * Utility method to parse CRLFs and find out if we ' ve reached the end
* of the headers ( 4 CRLFs ) . The global endOfHeaders flag will
* be true if we did find 4 CRLFs . Throws a MalformedMessageException
* if a multiline header value is parsed without the corresponding
* header name .
* @ param buff
* @ return boolean ( false if need more data , true otherwise )
* @ throws MalformedMessageException */
private boolean parseCRLFs ( WsByteBuffer buff ) throws MalformedMessageException { } } | byte b ; // scan through up to 4 characters
for ( int i = 0 ; i < 4 ; i ++ ) { if ( this . bytePosition >= this . byteLimit ) { if ( ! fillByteCache ( buff ) ) { // no more data
return false ; } } b = this . byteCache [ this . bytePosition ++ ] ; if ( BNFHeaders . CR == b ) { // ignore CR characters
continue ; } else if ( BNFHeaders . LF == b ) { // line feed found
this . numCRLFs ++ ; } else if ( BNFHeaders . SPACE == b || BNFHeaders . TAB == b ) { // Check for multi - line header values
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Multiline header follows" ) ; } this . bIsMultiLine = true ; if ( null == this . lastHdrInSequence ) { // can ' t start off with a multiline value
throw new MalformedMessageException ( "Incorrect multiline header value" ) ; } this . currentElem = this . lastHdrInSequence ; this . stateOfParsing = PARSING_VALUE ; this . numCRLFs = 0 ; return true ; } else { // found end . . . move pointer back one
this . bytePosition -- ; break ; // out of for loop
} if ( 2 <= this . numCRLFs ) { // found double LFs , end of headers
this . eohPosition = findCurrentBufferPosition ( buff ) ; buff . position ( this . eohPosition ) ; break ; // out of for loop
} } // end of for loop
// we ' re about to either start parsing another header or
// we ' ve reached the end of the headers
this . bIsMultiLine = false ; this . stateOfParsing = PARSING_HEADER ; this . numCRLFs = 0 ; return true ; |
public class NioGroovyMethods { /** * Append the text supplied by the Writer at the end of the File , using a specified encoding .
* If the given charset is " UTF - 16BE " or " UTF - 16LE " ( or an equivalent alias ) ,
* < code > writeBom < / code > is < code > true < / code > , and the file doesn ' t already
* exist , the requisite byte order mark is written to the file before the
* text is appended .
* @ param file a File
* @ param writer the Writer supplying the text to append at the end of the File
* @ param writeBom whether to write the BOM
* @ throws IOException if an IOException occurs .
* @ since 2.5.0 */
public static void append ( Path file , Writer writer , boolean writeBom ) throws IOException { } } | append ( file , writer , Charset . defaultCharset ( ) . name ( ) , writeBom ) ; |
public class J4pRequest { private Object serializeCollection ( Collection pArg ) { } } | JSONArray array = new JSONArray ( ) ; for ( Object value : ( ( Collection ) pArg ) ) { array . add ( serializeArgumentToJson ( value ) ) ; } return array ; |
public class Concat { /** * transfer demand from previous to next */
public void addMissingRequests ( ) { } } | int missed = 1 ; long toRequest = 0L ; do { long localQueued = queued . getAndSet ( 0l ) ; Subscription localSub = next . getAndSet ( null ) ; long missedOutput = produced . get ( ) ; Subscription localActive = active . get ( ) ; long reqs = requested . get ( ) + localQueued ; if ( reqs < 0 || toRequest < 0 ) { processAll = true ; if ( localSub != null ) localSub . request ( Long . MAX_VALUE ) ; if ( localActive != null ) localActive . request ( Long . MAX_VALUE ) ; return ; } requested . set ( reqs ) ; if ( localSub != null ) { active . set ( localSub ) ; toRequest += reqs ; } else if ( localQueued != 0 && localActive != null ) { toRequest += reqs ; } missed = wip . accumulateAndGet ( missed , ( a , b ) -> a - b ) ; } while ( missed != 0 ) ; if ( toRequest > 0 ) active . get ( ) . request ( toRequest ) ; |
public class CmsXsltUtil { /** * Returns the delimiter that most often occures in the CSV content and is therefore best applicable for the CSV data . < p >
* @ param csvData the comma separated values
* @ return the delimiter that is best applicable for the CSV data */
public static String getPreferredDelimiter ( String csvData ) { } } | String bestMatch = "" ; int bestMatchCount = 0 ; // find for each delimiter , how often it occures in the String csvData
for ( int i = 0 ; i < DELIMITERS . length ; i ++ ) { int currentCount = csvData . split ( DELIMITERS [ i ] ) . length ; if ( currentCount > bestMatchCount ) { bestMatch = DELIMITERS [ i ] ; bestMatchCount = currentCount ; } } return bestMatch ; |
public class Message { /** * Determines if an RRset with the given name and type is already
* present in any section .
* @ see RRset
* @ see Section */
public boolean findRRset ( Name name , int type ) { } } | return ( findRRset ( name , type , Section . ANSWER ) || findRRset ( name , type , Section . AUTHORITY ) || findRRset ( name , type , Section . ADDITIONAL ) ) ; |
public class CPDefinitionOptionRelPersistenceImpl { /** * Returns the cp definition option rel where uuid = & # 63 ; and groupId = & # 63 ; or returns < code > null < / code > if it could not be found . Uses the finder cache .
* @ param uuid the uuid
* @ param groupId the group ID
* @ return the matching cp definition option rel , or < code > null < / code > if a matching cp definition option rel could not be found */
@ Override public CPDefinitionOptionRel fetchByUUID_G ( String uuid , long groupId ) { } } | return fetchByUUID_G ( uuid , groupId , true ) ; |
public class ProfileCollection { /** * create ( if not exist ) and return video xml as dom
* @ param xml
* @ return
* @ throws IOException
* @ throws SAXException */
private static Element getVideoXML ( Resource xml ) throws IOException , SAXException { } } | if ( ! xml . exists ( ) ) { createFileFromResource ( "/resource/video/video.xml" , xml ) ; } Document doc = loadDocument ( xml ) ; return doc . getDocumentElement ( ) ; |
public class JvmUtil { /** * Represents the amount of memory currently used ( in bytes ) .
* @ return memory */
public static long usedMemory ( ) { } } | long memory = 0 ; for ( MemoryPoolMXBean mp : ManagementFactory . getMemoryPoolMXBeans ( ) ) { memory += mp . getUsage ( ) . getUsed ( ) ; } return memory ; |
public class JavaSourceUtils { /** * 合并枚举常量声明 */
public static EnumConstantDeclaration mergeEnumConstant ( EnumConstantDeclaration one , EnumConstantDeclaration two ) { } } | if ( isAllNull ( one , two ) ) return null ; EnumConstantDeclaration ecd = null ; if ( isAllNotNull ( one , two ) ) { ecd = new EnumConstantDeclaration ( ) ; ecd . setName ( one . getName ( ) ) ; ecd . setJavaDoc ( mergeSelective ( one . getJavaDoc ( ) , two . getJavaDoc ( ) ) ) ; ecd . setComment ( mergeSelective ( one . getComment ( ) , two . getComment ( ) ) ) ; ecd . setAnnotations ( mergeListNoDuplicate ( one . getAnnotations ( ) , two . getAnnotations ( ) ) ) ; ecd . setArgs ( mergeListInOrder ( one . getArgs ( ) , two . getArgs ( ) ) ) ; ecd . setClassBody ( mergeBodies ( one . getClassBody ( ) , two . getClassBody ( ) ) ) ; LOG . info ( "merge EnumConstantDeclaration --> {}" , ecd . getName ( ) ) ; } else { ecd = findFirstNotNull ( one , two ) ; LOG . info ( "add EnumConstantDeclaration --> {}" , ecd . getName ( ) ) ; } return ecd ; |
public class UpdateGroupRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateGroupRequest updateGroupRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( updateGroupRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateGroupRequest . getGroupName ( ) , GROUPNAME_BINDING ) ; protocolMarshaller . marshall ( updateGroupRequest . getDescription ( ) , DESCRIPTION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class PropertiesFileLoader { /** * Method called to indicate persisting the properties file is now complete .
* @ throws IOException */
protected void endPersistence ( final BufferedWriter writer ) throws IOException { } } | // Append any additional users to the end of the file .
for ( Object currentKey : toSave . keySet ( ) ) { String key = ( String ) currentKey ; if ( ! key . contains ( DISABLE_SUFFIX_KEY ) ) { writeProperty ( writer , key , null ) ; } } toSave = null ; |
public class DescriptorImplBase { /** * { @ inheritDoc }
* @ see org . jboss . shrinkwrap . descriptor . api . Descriptor # exportAsString ( ) */
@ Override public String exportAsString ( ) throws DescriptorExportException { } } | // Export as bytes
final ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; this . exportTo ( baos ) ; // Make a String out of the bytes
final String content ; try { content = baos . toString ( Charset . UTF8 . name ( ) ) ; } catch ( final UnsupportedEncodingException e ) { throw new DescriptorExportException ( "Inconsistent encoding used during export" , e ) ; } // Return
return content ; |
public class PerformanceMetrics { /** * Creates new instance of performance metrics . Generates new metrics key and assigns zero level .
* @ param action a short name of measured operation , typically a first prefix of descriptor
* @ param descriptor a full description of measured operation
* @ param correlationId a reference to the request , which caused the operation
* @ return PerformanceMetrics a new instance of performance metrics */
public static PerformanceMetrics createNew ( String action , String descriptor , String correlationId ) { } } | return new PerformanceMetrics ( KEY_GEN . getAndIncrement ( ) , 0 , action , null , descriptor , correlationId ) ; |
public class WebSocketClientInitializer { /** * 通道注册的时候配置websocket解码handler */
@ Override protected final void initChannel ( Channel ch ) throws Exception { } } | ChannelPipeline pipeline = ch . pipeline ( ) ; if ( sslCtx != null ) { pipeline . addLast ( sslCtx . newHandler ( ch . alloc ( ) , host , port ) ) ; } pipeline . addLast ( new HttpClientCodec ( ) ) ; pipeline . addLast ( new ChunkedWriteHandler ( ) ) ; pipeline . addLast ( new HttpObjectAggregator ( 64 * 1024 ) ) ; pipeline . addLast ( new WebSocketClientProtocolHandler ( WebSocketClientHandshakerFactory . newHandshaker ( webSocketURL , WebSocketVersion . V13 , subprotocol , false , new DefaultHttpHeaders ( ) ) ) ) ; pipeline . addLast ( new WebSocketConnectedClientHandler ( ) ) ; // 连接成功监听handler |
public class ArchetypeCatalogFileEntry { /** * { @ inheritDoc } */
public long getLastModified ( ) throws IOException { } } | try { return store . getArchetypeCatalogLastModified ( ) ; } catch ( ArchetypeCatalogNotFoundException e ) { IOException ioe = new IOException ( "File not found" ) ; ioe . initCause ( e ) ; throw ioe ; } |
public class RelationalOperationsMatrix { /** * with the exterior of Point B . */
private void interiorPointExteriorPoint_ ( int cluster , int id_a , int id_b , int predicate ) { } } | if ( m_matrix [ predicate ] == 0 ) return ; int clusterParentage = m_topo_graph . getClusterParentage ( cluster ) ; if ( ( clusterParentage & id_a ) != 0 && ( clusterParentage & id_b ) == 0 ) { m_matrix [ predicate ] = 0 ; } |
public class CassandraDataHandlerBase { /** * Sets the field value .
* @ param entity
* the entity
* @ param thriftColumnValue
* the thrift column value
* @ param attribute
* the attribute */
private void setFieldValue ( Object entity , Object thriftColumnValue , Attribute attribute ) { } } | if ( attribute != null ) { try { if ( thriftColumnValue . getClass ( ) . isAssignableFrom ( String . class ) ) { PropertyAccessorHelper . set ( entity , ( Field ) attribute . getJavaMember ( ) , ( String ) thriftColumnValue ) ; } else if ( CassandraDataTranslator . isCassandraDataTypeClass ( ( ( AbstractAttribute ) attribute ) . getBindableJavaType ( ) ) ) { Object decomposed = null ; try { Class < ? > clazz = ( ( AbstractAttribute ) attribute ) . getBindableJavaType ( ) ; decomposed = CassandraDataTranslator . decompose ( clazz , thriftColumnValue , false ) ; } catch ( Exception e ) { String tableName = entity . getClass ( ) . getSimpleName ( ) ; String fieldName = attribute . getName ( ) ; String msg = "Decomposing failed for `" + tableName + "`.`" + fieldName + "`, did you set the correct type in your entity class?" ; log . error ( msg , e ) ; throw new KunderaException ( msg , e ) ; } PropertyAccessorHelper . set ( entity , ( Field ) attribute . getJavaMember ( ) , decomposed ) ; } else { PropertyAccessorHelper . set ( entity , ( Field ) attribute . getJavaMember ( ) , ( byte [ ] ) thriftColumnValue ) ; } } catch ( PropertyAccessException pae ) { log . warn ( "Error while setting field value, Caused by: ." , pae ) ; } } |
public class ReteooRuleBuilder { /** * Adds a query pattern to the given subrule */
private void addInitialFactPattern ( final GroupElement subrule ) { } } | // creates a pattern for initial fact
final Pattern pattern = new Pattern ( 0 , ClassObjectType . InitialFact_ObjectType ) ; // adds the pattern as the first child of the given AND group element
subrule . addChild ( 0 , pattern ) ; |
public class TextValidator { /** * 验证15或18位身份证号码 */
public static boolean isIdCard ( @ Nullable CharSequence input ) { } } | return isMatch ( PATTERN_REGEX_ID_CARD15 , input ) || isMatch ( PATTERN_REGEX_ID_CARD18 , input ) ; |
public class UBLTRDocumentTypes { /** * Get the XSD Schema object for the UBLTR document type of the passed
* document element local name .
* @ param sLocalName
* The document element local name of any UBLTR document type . May be
* < code > null < / code > .
* @ return < code > null < / code > if no such UBLTR document type exists . */
@ Nullable public static Schema getSchemaOfLocalName ( @ Nullable final String sLocalName ) { } } | final EUBLTRDocumentType eDocType = getDocumentTypeOfLocalName ( sLocalName ) ; return eDocType == null ? null : eDocType . getSchema ( ) ; |
public class JobService { /** * Starts a job asynchronously in the background .
* @ param jobType the type of the job
* @ return the URI under which you can retrieve the status about the triggered job instance */
public Optional < String > startAsyncJob ( String jobType ) { } } | try { final JobRunnable jobRunnable = findJobRunnable ( jobType ) ; final JobInfo jobInfo = createJobInfo ( jobType ) ; jobMetaService . aquireRunLock ( jobInfo . getJobId ( ) , jobInfo . getJobType ( ) ) ; jobRepository . createOrUpdate ( jobInfo ) ; return Optional . of ( startAsync ( metered ( jobRunnable ) , jobInfo . getJobId ( ) ) ) ; } catch ( JobBlockedException e ) { LOG . info ( e . getMessage ( ) ) ; return Optional . empty ( ) ; } |
public class ImageSlideModel { /** * Return the splash title .
* @ return the title */
public String getTitle ( ) { } } | return getSlide ( ) . getContent ( ) == null || getSlide ( ) . getContent ( ) . isEmpty ( ) || getSlide ( ) . getContent ( ) . get ( 0 ) . getTitle ( ) == null ? null : getSlide ( ) . getContent ( ) . get ( 0 ) . getTitle ( ) . replaceAll ( "\\\\n" , "\n" ) ; |
public class BoxMetadataFilter { /** * Set a NumberRanger filter to the filter numbers , example : key = documentNumber , lt : 20 , gt : 5.
* @ param key the key that the filter should be looking for .
* @ param sizeRange the specific value that corresponds to the key . */
public void addNumberRangeFilter ( String key , SizeRange sizeRange ) { } } | JsonObject opObj = new JsonObject ( ) ; if ( sizeRange . getLowerBoundBytes ( ) != 0 ) { opObj . add ( "gt" , sizeRange . getLowerBoundBytes ( ) ) ; } if ( sizeRange . getUpperBoundBytes ( ) != 0 ) { opObj . add ( "lt" , sizeRange . getUpperBoundBytes ( ) ) ; } this . filtersList . add ( key , opObj ) ; |
public class ModelsImpl { /** * Updates the name of an entity extractor .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param entityId The entity extractor ID .
* @ param updateEntityOptionalParameter the object representing the optional parameters to be set before calling this API
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the OperationStatus object */
public Observable < OperationStatus > updateEntityAsync ( UUID appId , String versionId , UUID entityId , UpdateEntityOptionalParameter updateEntityOptionalParameter ) { } } | return updateEntityWithServiceResponseAsync ( appId , versionId , entityId , updateEntityOptionalParameter ) . map ( new Func1 < ServiceResponse < OperationStatus > , OperationStatus > ( ) { @ Override public OperationStatus call ( ServiceResponse < OperationStatus > response ) { return response . body ( ) ; } } ) ; |
public class CmsResourceTypeXmlContainerPage { /** * Returns < code > true < / code > in case the given resource is a container page . < p >
* Internally this checks if the type id for the given resource is
* identical type id of the container page . < p >
* @ param resource the resource to check
* @ return < code > true < / code > in case the given resource is a container page */
public static boolean isContainerPage ( CmsResource resource ) { } } | boolean result = false ; if ( resource != null ) { result = ( resource . getTypeId ( ) == getContainerPageTypeIdSafely ( ) ) || ( OpenCms . getResourceManager ( ) . getResourceType ( resource ) instanceof CmsResourceTypeXmlContainerPage ) ; } return result ; |
public class TagLL { /** * Removes this tag from the chain , connecting prevTag and nextTag . Does not
* modify " this " object ' s pointers , so the caller can refer to nextTag after
* removing it . */
public void removeLL ( ) { } } | if ( head [ 0 ] == this ) head [ 0 ] = nextTag ; if ( prevTag != null ) { prevTag . nextTag = nextTag ; } if ( nextTag != null ) { nextTag . prevTag = prevTag ; } |
public class PatternMatcher { /** * This method takes a String key and a map from Strings to values of any type . During
* processing , the method will identify the most specific key in the map that matches
* the line . Once the correct is identified , its value is returned . Note that if the
* map contains the wildcard string " * " as a key , then it will serve as the " default "
* case , matching every line that does not match anything else .
* If no matching prefix is found , a { @ link IllegalStateException } will be thrown .
* Null keys are not allowed in the map .
* @ param line An input string
* @ return the value whose prefix matches the given line */
public S match ( String line ) { } } | S value = null ; Assert . notNull ( line , "A non-null key must be provided to match against." ) ; for ( String key : this . sorted ) { if ( PatternMatcher . match ( key , line ) ) { value = this . map . get ( key ) ; break ; } } if ( value == null ) { throw new IllegalStateException ( "Could not find a matching pattern for key=[" + line + "]" ) ; } return value ; |
public class Offering { /** * Used to add a new property to the offering
* @ param propertyName the name of the property
* @ param property the value of the property
* @ return true if the property was already present ( and it has been replaced ) , false otherwise */
public boolean addProperty ( String propertyName , String property ) { } } | boolean ret = this . properties . containsKey ( propertyName ) ; this . properties . put ( propertyName , property ) ; return ret ; |
public class Configuration { /** * This method allows to specify initial memory to be used within system .
* HOST : all data is located on host memory initially , and gets into DEVICE , if used frequent enough
* DEVICE : all memory is located on device .
* DELAYED : memory allocated on HOST first , and on first use gets moved to DEVICE
* PLEASE NOTE : For device memory all data still retains on host side as well .
* Default value : DEVICE
* @ param initialMemory
* @ return */
public Configuration setFirstMemory ( @ NonNull AllocationStatus initialMemory ) { } } | if ( initialMemory != AllocationStatus . DEVICE && initialMemory != AllocationStatus . HOST && initialMemory != AllocationStatus . DELAYED ) throw new IllegalStateException ( "First memory should be either [HOST], [DEVICE] or [DELAYED]" ) ; this . firstMemory = initialMemory ; return this ; |
public class FileUtil { /** * 将Map写入文件 , 每个键值对为一行 , 一行中键与值之间使用kvSeparator分隔
* @ param map Map
* @ param file 文件
* @ param charset 字符集编码
* @ param kvSeparator 键和值之间的分隔符 , 如果传入null使用默认分隔符 " = "
* @ param isAppend 是否追加
* @ return 目标文件
* @ throws IORuntimeException IO异常
* @ since 4.0.5 */
public static File writeMap ( Map < ? , ? > map , File file , Charset charset , String kvSeparator , boolean isAppend ) throws IORuntimeException { } } | return FileWriter . create ( file , charset ) . writeMap ( map , kvSeparator , isAppend ) ; |
public class BuildVersion { /** * reads the pom file to get this version , only to be used for development or within the IDE .
* @ return gets the version from the pom . xml */
private static String getVersionFromPom ( ) { } } | final String absolutePath = new File ( BuildVersion . class . getResource ( BuildVersion . class . getSimpleName ( ) + ".class" ) . getPath ( ) ) . getParentFile ( ) . getParentFile ( ) . getParentFile ( ) . getParentFile ( ) . getParentFile ( ) . getParentFile ( ) . getParentFile ( ) . getAbsolutePath ( ) ; final File file = new File ( absolutePath + "/pom.xml" ) ; try ( InputStreamReader reader = new InputStreamReader ( new FileInputStream ( file ) , StandardCharsets . UTF_8 ) ) { final MavenXpp3Reader xpp3Reader = new MavenXpp3Reader ( ) ; Model model = xpp3Reader . read ( reader ) ; return model . getVersion ( ) ; } catch ( NoClassDefFoundError e ) { // if you want to get the version possibly in development add in to your pom
// pax - url - aether . jar
return null ; } catch ( Exception e ) { return null ; } |
public class GetBackupPlanFromTemplateRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetBackupPlanFromTemplateRequest getBackupPlanFromTemplateRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getBackupPlanFromTemplateRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getBackupPlanFromTemplateRequest . getBackupPlanTemplateId ( ) , BACKUPPLANTEMPLATEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class BucketInfo { /** * A list of the calculated facet values and counts .
* @ param buckets
* A list of the calculated facet values and counts . */
public void setBuckets ( java . util . Collection < Bucket > buckets ) { } } | if ( buckets == null ) { this . buckets = null ; return ; } this . buckets = new com . amazonaws . internal . SdkInternalList < Bucket > ( buckets ) ; |
public class MongoTogglzRepository { /** * Persist the supplied feature state . The repository implementation must ensure that subsequent calls to
* { @ link # getFeatureState ( Feature ) } return the same state as persisted using this method .
* @ param featureState The feature state to persist
* @ throws UnsupportedOperationException if this state repository does not support updates */
@ Override public void setFeatureState ( final FeatureState featureState ) { } } | createOrUpdate ( featureState ) ; LOG . info ( ( ! isEmpty ( userProvider . getCurrentUser ( ) . getName ( ) ) ? "User '" + userProvider . getCurrentUser ( ) . getName ( ) + "'" : "Unknown user" ) + ( featureState . isEnabled ( ) ? " enabled " : " disabled " ) + "feature " + featureState . getFeature ( ) . name ( ) ) ; |
public class ResourceHandlerImpl { /** * Recursively unwarp the resource until we find the real resourceName
* This is needed because the JSF2 specced ResourceWrapper doesn ' t override
* the getResourceName ( ) method : (
* @ param resource
* @ return the first non - null resourceName or < code > null < / code > if none set */
private String getWrappedResourceName ( Resource resource ) { } } | String resourceName = resource . getResourceName ( ) ; if ( resourceName != null ) { return resourceName ; } if ( resource instanceof ResourceWrapper ) { return getWrappedResourceName ( ( ( ResourceWrapper ) resource ) . getWrapped ( ) ) ; } return null ; |
public class FileContent { /** * Check if the file is binary or not */
private boolean isBinary ( ) { } } | try ( InputStream in = getInputStream ( ) ) { long size = Files . size ( file . toPath ( ) ) ; if ( size == 0 ) { // Empty file , so no need to check
return true ; } byte [ ] b = new byte [ ( size < StreamUtils . DEFAULT_PROBE_SIZE ? ( int ) size : StreamUtils . DEFAULT_PROBE_SIZE ) ] ; int read = in . read ( b ) ; if ( read != b . length ) { // Something went wrong , so better not to read line by line
return true ; } return StreamUtils . isNonWhitespaceControlCharacter ( b ) ; } catch ( IOException e ) { // If cannot be checked , then considered as binary , so we do not
// read line by line
return true ; } |
public class SimpleMonthView { /** * Draws the week and month day numbers for this week . Override this method
* if you need different placement .
* @ param canvas The canvas to draw on */
protected void drawMonthNums ( Canvas canvas ) { } } | int y = ( ( ( mRowHeight + MINI_DAY_NUMBER_TEXT_SIZE ) / 2 ) - DAY_SEPARATOR_WIDTH ) + MONTH_HEADER_SIZE ; int dayWidthHalf = ( mWidth - mPadding * 2 ) / ( mNumDays * 2 ) ; int j = findDayOffset ( ) ; for ( int dayNumber = 1 ; dayNumber <= mNumCells ; dayNumber ++ ) { int x = ( 2 * j + 1 ) * dayWidthHalf + mPadding ; if ( mSelectedDay == dayNumber ) { canvas . drawCircle ( x , y - ( MINI_DAY_NUMBER_TEXT_SIZE / 3 ) , DAY_SELECTED_CIRCLE_SIZE , mSelectedCirclePaint ) ; } mMonthNumPaint . setColor ( mHasToday && mToday == dayNumber ? mTodayNumberColor : mDayTextColor ) ; canvas . drawText ( String . format ( "%d" , dayNumber ) , x , y , mMonthNumPaint ) ; j ++ ; if ( j == mNumDays ) { j = 0 ; y += mRowHeight ; } } |
public class AbstractAliasDestinationHandler { /** * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . impl . interfaces . DestinationHandler # choosePtoPOutputHandler ( com . ibm . ws . sib . mfp . JsDestinationAddress ) */
@ Override public OutputHandler choosePtoPOutputHandler ( SIBUuid8 fixedMEUuid , SIBUuid8 preferredMEUuid , boolean localMessage , boolean forcePut , HashSet < SIBUuid8 > scopedMEs ) throws SIRollbackException , SIConnectionLostException , SIResourceException , SIErrorException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "choosePtoPOutputHandler" , new Object [ ] { fixedMEUuid , preferredMEUuid , localMessage , forcePut , scopedMEs } ) ; // We ' re an alias ( or foreign destination ) so we should never be called with a scoped ME set
if ( scopedMEs != null ) { SIMPErrorException e = new SIMPErrorException ( "Alias called with scoped ME set" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exception ( tc , e ) ; } e . setExceptionReason ( SIRCConstants . SIRC0901_INTERNAL_MESSAGING_ERROR ) ; e . setExceptionInserts ( new String [ ] { "com.ibm.ws.sib.processor.impl.ProducerSessionImpl.handleMessage" , "1:290:1.67.1.22" , SIMPUtils . getStackTrace ( e ) } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "choosePtoPOutputHandlers" , e ) ; throw e ; } // Look to the resolved destination to choose an output handler as the
// alias destination doesnt store any messages
OutputHandler result = _targetDestinationHandler . choosePtoPOutputHandler ( fixedMEUuid , preferredMEUuid , localMessage , false , null ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "choosePtoPOutputHandler" , result ) ; return result ; |
public class MessageBirdClient { /** * Retrieves the information of an existing HLR . You only need to supply the unique message id that was returned upon creation or receiving .
* @ param hlrId ID as returned by getRequestHlr in the id variable
* @ return Hlr Object
* @ throws UnauthorizedException if client is unauthorized
* @ throws GeneralException general exception */
public Hlr getViewHlr ( final String hlrId ) throws GeneralException , UnauthorizedException , NotFoundException { } } | if ( hlrId == null ) { throw new IllegalArgumentException ( "Hrl ID must be specified." ) ; } return messageBirdService . requestByID ( HLRPATH , hlrId , Hlr . class ) ; |
public class CreateProductTemplates { /** * Runs the example .
* @ param adManagerServices the services factory .
* @ param session the session .
* @ throws ApiException if the API request failed with one or more service errors .
* @ throws RemoteException if the API request failed due to other errors . */
public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session ) throws RemoteException { } } | // Get the ProductTemplateService .
ProductTemplateServiceInterface productTemplateService = adManagerServices . get ( session , ProductTemplateServiceInterface . class ) ; // Get the NetworkService .
NetworkServiceInterface networkService = adManagerServices . get ( session , NetworkServiceInterface . class ) ; // Create a product template .
ProductTemplate productTemplate = new ProductTemplate ( ) ; productTemplate . setName ( "Product template #" + new Random ( ) . nextInt ( Integer . MAX_VALUE ) ) ; productTemplate . setDescription ( "This product template creates standard proposal line items " + "targeting Chrome browsers with product segmentation on ad units and geo targeting." ) ; // Set the name macro which will be used to generate the names of the products .
// This will create a segmentation based on the line item type , ad unit , and location .
productTemplate . setNameMacro ( "<line-item-type> - <ad-unit> - <template-name> - <location>" ) ; // Set the product type so the created proposal line items will be trafficked in Ad Manager .
productTemplate . setProductType ( ProductType . DFP ) ; // Set rate type to create CPM priced proposal line items .
productTemplate . setRateType ( RateType . CPM ) ; productTemplate . setDeliveryRateType ( DeliveryRateType . AS_FAST_AS_POSSIBLE ) ; // Optionally set the creative rotation of the product to serve one or more creatives .
productTemplate . setRoadblockingType ( RoadblockingType . ONE_OR_MORE ) ; // Create the master creative placeholder .
CreativePlaceholder creativeMasterPlaceholder = new CreativePlaceholder ( ) ; creativeMasterPlaceholder . setSize ( new Size ( 728 , 90 , false ) ) ; // Create companion creative placeholders .
CreativePlaceholder companionCreativePlaceholder = new CreativePlaceholder ( ) ; companionCreativePlaceholder . setSize ( new Size ( 300 , 250 , false ) ) ; // Set the size of creatives that can be associated with the product template .
productTemplate . setCreativePlaceholders ( new CreativePlaceholder [ ] { creativeMasterPlaceholder , companionCreativePlaceholder } ) ; // Set the type of proposal line item to be created from the product template .
productTemplate . setLineItemType ( LineItemType . STANDARD ) ; // Get the root ad unit ID used to target the whole site .
String rootAdUnitId = networkService . getCurrentNetwork ( ) . getEffectiveRootAdUnitId ( ) ; // Create ad unit targeting for the root ad unit ( i . e . the whole network ) .
AdUnitTargeting adUnitTargeting = new AdUnitTargeting ( ) ; adUnitTargeting . setAdUnitId ( rootAdUnitId ) ; adUnitTargeting . setIncludeDescendants ( true ) ; // Create geo targeting for the US .
Location countryLocation = new Location ( ) ; countryLocation . setId ( 2840L ) ; // Create geo targeting for Hong Kong .
Location regionLocation = new Location ( ) ; regionLocation . setId ( 2344L ) ; GeoTargeting geoTargeting = new GeoTargeting ( ) ; geoTargeting . setTargetedLocations ( new Location [ ] { countryLocation , regionLocation } ) ; // Add browser targeting to Chrome on the product template distinct from product segmentation .
Browser chromeBrowser = new Browser ( ) ; chromeBrowser . setId ( 500072L ) ; BrowserTargeting browserTargeting = new BrowserTargeting ( ) ; browserTargeting . setBrowsers ( new Browser [ ] { chromeBrowser } ) ; TechnologyTargeting technologyTargeting = new TechnologyTargeting ( ) ; technologyTargeting . setBrowserTargeting ( browserTargeting ) ; Targeting productTemplateTargeting = new Targeting ( ) ; productTemplateTargeting . setTechnologyTargeting ( technologyTargeting ) ; productTemplate . setBuiltInTargeting ( productTemplateTargeting ) ; // Allow placement targeting to be customized on the proposal line item .
CustomizableAttributes customizableAttributes = new CustomizableAttributes ( ) ; customizableAttributes . setAllowPlacementTargetingCustomization ( true ) ; productTemplate . setCustomizableAttributes ( customizableAttributes ) ; // Add inventory and geo targeting as product segmentation .
ProductSegmentation productSegmentation = new ProductSegmentation ( ) ; productSegmentation . setAdUnitSegments ( new AdUnitTargeting [ ] { adUnitTargeting } ) ; productSegmentation . setGeoSegment ( geoTargeting ) ; productTemplate . setProductSegmentation ( productSegmentation ) ; // Create the product template on the server .
ProductTemplate [ ] productTemplates = productTemplateService . createProductTemplates ( new ProductTemplate [ ] { productTemplate } ) ; for ( ProductTemplate createdProductTemplate : productTemplates ) { System . out . printf ( "A product template with ID %d and name '%s' was created.%n" , createdProductTemplate . getId ( ) , createdProductTemplate . getName ( ) ) ; } |
public class TiffDocument { /** * Gets an string with the value of the first tag matching the given tag name . < br >
* @ param name the tag name
* @ return the metadata single string */
public String getMetadataSingleString ( String name ) { } } | String s = "" ; if ( metadata == null ) createMetadataDictionary ( ) ; if ( metadata . contains ( name ) ) s = metadata . get ( name ) . toString ( ) ; return s ; |
public class POJOJaxWsWebEndpoint { /** * invokePOJOPreDestroy */
private void invokePOJOPreDestroy ( ) { } } | POJOJAXWSMethodInvoker tempInvoker = ( POJOJAXWSMethodInvoker ) server . getEndpoint ( ) . getService ( ) . getInvoker ( ) ; Object instance = tempInvoker . getServiceObject ( ) ; JaxWsInstanceManager jaxWsInstanceManager = jaxWsModuleMetaData . getJaxWsInstanceManager ( ) ; try { jaxWsInstanceManager . destroyInstance ( instance ) ; } catch ( InterceptException e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "invoke POJO service PreDestroy fails:" + e . toString ( ) ) ; } } |
public class MtoNCollectionPrefetcher { /** * prefix the this class fk columns with the indirection table */
private String [ ] getFksToThisClass ( ) { } } | String indTable = getCollectionDescriptor ( ) . getIndirectionTable ( ) ; String [ ] fks = getCollectionDescriptor ( ) . getFksToThisClass ( ) ; String [ ] result = new String [ fks . length ] ; for ( int i = 0 ; i < result . length ; i ++ ) { result [ i ] = indTable + "." + fks [ i ] ; } return result ; |
public class GitlabAPI { /** * This will fail , if the given namespace is a user and not a group
* @ param namespaceId Namespace ID
* @ return A list of Gitlab Project members
* @ throws IOException on gitlab api call error */
public List < GitlabProjectMember > getNamespaceMembers ( Integer namespaceId ) throws IOException { } } | String tailUrl = GitlabGroup . URL + "/" + namespaceId + GitlabProjectMember . URL ; return Arrays . asList ( retrieve ( ) . to ( tailUrl , GitlabProjectMember [ ] . class ) ) ; |
public class MapBasedXPathFunctionResolver { /** * Remove the function with the specified name .
* @ param aName
* The name to be removed . May not be < code > null < / code > .
* @ param nArity
* The number of parameters of the function . Must be & ge ; 0.
* @ return { @ link EChange } */
@ Nonnull public EChange removeFunction ( @ Nonnull final QName aName , @ Nonnegative final int nArity ) { } } | final XPathFunctionKey aKey = new XPathFunctionKey ( aName , nArity ) ; return removeFunction ( aKey ) ; |
public class FactorFilter { /** * static function to generate an instance of the class . refer to the constructor for the param
* definitions . */
public static < T , V > FactorFilter < T , V > create ( final String factorName , final Filter < T , V > filter ) { } } | if ( null == factorName || factorName . length ( ) == 0 || null == filter ) { logger . error ( "failed to create instance of FactorFilter, at least one of the input paramters are invalid" ) ; return null ; } return new FactorFilter < > ( factorName , filter ) ; |
public class ClientSessionCache { /** * TODO Fix this . Schemata should be kept in a separate cache
* for each node !
* @ param cls Class name
* @ param node Node object
* @ return Schema object for a given Java class . */
@ Override public ZooClassDef getSchema ( Class < ? > cls , Node node ) { } } | ZooClassDef ret = nodeSchemata . get ( node ) . get ( cls ) ; if ( ret == null ) { if ( cls == null ) { return null ; } // Try virtual / generic schemata
ret = getSchema ( cls . getName ( ) ) ; if ( ret != null ) { // check ( associate also checks compatibility )
ret . associateJavaTypes ( true ) ; nodeSchemata . get ( node ) . put ( cls , ret ) ; } } return ret ; |
public class DOTranslationUtility { /** * The audit record is created by the system , so programmatic validation
* here is o . k . Normally , validation takes place via XML Schema and
* Schematron .
* @ param audit
* @ throws ObjectIntegrityException */
protected static void validateAudit ( AuditRecord audit ) throws ObjectIntegrityException { } } | if ( audit . id == null || audit . id . isEmpty ( ) ) { throw new ObjectIntegrityException ( "Audit record must have id." ) ; } if ( audit . date == null ) { throw new ObjectIntegrityException ( "Audit record must have date." ) ; } if ( audit . processType == null || audit . processType . isEmpty ( ) ) { throw new ObjectIntegrityException ( "Audit record must have processType." ) ; } if ( audit . action == null || audit . action . isEmpty ( ) ) { throw new ObjectIntegrityException ( "Audit record must have action." ) ; } if ( audit . componentID == null ) { audit . componentID = "" ; // for backwards compatibility , no error on null
// throw new ObjectIntegrityException ( " Audit record must have componentID . " ) ;
} if ( audit . responsibility == null || audit . responsibility . isEmpty ( ) ) { throw new ObjectIntegrityException ( "Audit record must have responsibility." ) ; } |
public class MapComposedElement { /** * Set the specified point at the given index in the specified group .
* @ param groupIndex is the index of the group
* @ param indexInGroup is the index of the ponit in the group ( 0 for the
* first point of the group . . . ) .
* @ param x is the new value .
* @ param y is the new value .
* @ return < code > true < / code > if the point was set , < code > false < / code > if
* the specified coordinates correspond to the already existing point .
* @ throws IndexOutOfBoundsException in case of error */
public final boolean setPointAt ( int groupIndex , int indexInGroup , double x , double y ) { } } | return setPointAt ( groupIndex , indexInGroup , x , y , false ) ; |
public class MathTools { /** * Analog of Math . min that returns the largest double value in an array of double .
* @ param values the values to be searched for the smallest value among them
* @ return the smallest value among a set of given values */
static double min ( double [ ] values ) { } } | double min = values [ 0 ] ; for ( double value : values ) if ( value < min ) min = value ; return min ; |
public class Check { /** * Ensures that a passed { @ code byte } is less than another { @ code byte } .
* @ param expected
* Expected value
* @ param check
* Comparable to be checked
* @ param message
* an error message describing why the comparables must be less than a value ( will be passed to
* { @ code IllegalNotLessThanException } )
* @ return the passed { @ code Comparable } argument { @ code check }
* @ throws IllegalNotLesserThanException
* if the argument value { @ code check } is not lesser than value { @ code expected } */
@ ArgumentsChecked @ Throws ( IllegalNotLesserThanException . class ) public static byte lesserThan ( final byte expected , final byte check , @ Nonnull final String message ) { } } | if ( expected <= check ) { throw new IllegalNotLesserThanException ( message , check ) ; } return check ; |
public class LoggerDefinitionVersion { /** * A list of loggers .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setLoggers ( java . util . Collection ) } or { @ link # withLoggers ( java . util . Collection ) } if you want to override
* the existing values .
* @ param loggers
* A list of loggers .
* @ return Returns a reference to this object so that method calls can be chained together . */
public LoggerDefinitionVersion withLoggers ( Logger ... loggers ) { } } | if ( this . loggers == null ) { setLoggers ( new java . util . ArrayList < Logger > ( loggers . length ) ) ; } for ( Logger ele : loggers ) { this . loggers . add ( ele ) ; } return this ; |
public class ParosTableContext { /** * / * ( non - Javadoc )
* @ see org . parosproxy . paros . db . paros . TableContext # delete ( int , int , java . lang . String ) */
@ Override public synchronized void delete ( int contextId , int type , String data ) throws DatabaseException { } } | try { psDeleteData . setInt ( 1 , contextId ) ; psDeleteData . setInt ( 2 , type ) ; psDeleteData . setString ( 3 , data ) ; psDeleteData . executeUpdate ( ) ; } catch ( SQLException e ) { throw new DatabaseException ( e ) ; } |
public class EGRImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } } | switch ( featureID ) { case AfplibPackage . EGR__GDO_NAME : setGdoName ( ( String ) newValue ) ; return ; case AfplibPackage . EGR__TRIPLETS : getTriplets ( ) . clear ( ) ; getTriplets ( ) . addAll ( ( Collection < ? extends Triplet > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ; |
public class Values { /** * Create values for owner type and id . */
@ Override @ Path ( "/{ownerType}/{ownerId}" ) @ ApiOperation ( value = "Create values for an ownerType and ownerId" , response = StatusMessage . class ) @ ApiImplicitParams ( { } } | @ ApiImplicitParam ( name = "Values" , paramType = "body" ) } ) public JSONObject post ( String path , JSONObject content , Map < String , String > headers ) throws ServiceException , JSONException { return put ( path , content , headers ) ; |
public class AbstractList { /** * ( non - Javadoc )
* @ see com . ibm . ws . objectManager . List # listIterator ( long , com . ibm . ws . objectManager . Transaction ) */
public ListIterator listIterator ( long index , Transaction transaction ) throws ObjectManagerException { } } | if ( index < 0 ) throw new IndexOutOfBoundsException ( "Index: " + index ) ; ListIterator listIterator = listIterator ( ) ; try { for ( long i = 0 ; i < index ; i ++ ) listIterator . next ( transaction ) ; } catch ( java . util . NoSuchElementException exception ) { // No FFDC code needed .
throw new IndexOutOfBoundsException ( "Index: " + index ) ; } // try .
return listIterator ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.