signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class XTimeBoundsImpl { /** * / * ( non - Javadoc ) * @ see org . deckfour . xes . summary . XTimeBoundaries # isWithin ( java . util . Date ) */ public boolean isWithin ( Date date ) { } }
if ( first == null ) { return false ; } else if ( date . equals ( first ) ) { return true ; } else if ( date . equals ( last ) ) { return true ; } else if ( date . after ( first ) && date . before ( last ) ) { return true ; } else { return false ; }
public class ArrayUtilities { /** * Convert Collection to a Java ( typed ) array [ ] . * @ param classToCastTo array type ( Object [ ] , Person [ ] , etc . ) * @ param c Collection containing items to be placed into the array . * @ param < T > Type of the array * @ return Array of the type ( T ) containing the items from collection ' c ' . */ public static < T > T [ ] toArray ( Class < T > classToCastTo , Collection c ) { } }
T [ ] array = ( T [ ] ) c . toArray ( ( T [ ] ) Array . newInstance ( classToCastTo , c . size ( ) ) ) ; Iterator i = c . iterator ( ) ; int idx = 0 ; while ( i . hasNext ( ) ) { Array . set ( array , idx ++ , i . next ( ) ) ; } return array ;
public class LiteralType { /** * Accept . * @ param < R > the generic type * @ param < P > the generic type * @ param v the v * @ param p the p * @ return the r */ public < R , P > R accept ( TypeVisitor < R , P > v , P p ) { } }
return null ;
public class SettingsInMemory { /** * Get a Settings property as an Integer object . * @ param key the Key Name of the numeric property to be returned . * @ return Value of the property as an Integer or null if no property found . */ public Integer getInteger ( String key ) { } }
Integer iVal = null ; String sVal = getText ( key ) ; if ( ( sVal != null ) && ( sVal . length ( ) > 0 ) ) { iVal = Integer . valueOf ( sVal ) ; } return iVal ;
public class VariableResolver { /** * Returns a set of references variables . * @ param value * Value to parse - May be < code > null < / code > . * @ return Referenced variable names - Never < code > null < / code > , but may be empty . */ public static Set < String > references ( final String value ) { } }
final HashSet < String > names = new HashSet < String > ( ) ; if ( ( value == null ) || ( value . length ( ) == 0 ) ) { return names ; } int end = - 1 ; int from = 0 ; int start = - 1 ; while ( ( start = value . indexOf ( "${" , from ) ) > - 1 ) { end = value . indexOf ( '}' , start + 1 ) ; if ( end == - 1 ) { // No closing bracket found . . . from = value . length ( ) ; } else { names . add ( value . substring ( start + 2 , end ) ) ; from = end + 1 ; } } return names ;
public class DescribeProvisioningParametersResult { /** * Information about the TagOptions associated with the resource . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setTagOptions ( java . util . Collection ) } or { @ link # withTagOptions ( java . util . Collection ) } if you want to * override the existing values . * @ param tagOptions * Information about the TagOptions associated with the resource . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeProvisioningParametersResult withTagOptions ( TagOptionSummary ... tagOptions ) { } }
if ( this . tagOptions == null ) { setTagOptions ( new java . util . ArrayList < TagOptionSummary > ( tagOptions . length ) ) ; } for ( TagOptionSummary ele : tagOptions ) { this . tagOptions . add ( ele ) ; } return this ;
public class ICUResourceBundleReader { /** * See res _ init ( ) in ICU4C / source / common / uresdata . c . */ private void init ( ByteBuffer inBytes ) throws IOException { } }
dataVersion = ICUBinary . readHeader ( inBytes , DATA_FORMAT , IS_ACCEPTABLE ) ; int majorFormatVersion = inBytes . get ( 16 ) ; bytes = ICUBinary . sliceWithOrder ( inBytes ) ; int dataLength = bytes . remaining ( ) ; if ( DEBUG ) System . out . println ( "The ByteBuffer is direct (memory-mapped): " + bytes . isDirect ( ) ) ; if ( DEBUG ) System . out . println ( "The available bytes in the buffer before reading the data: " + dataLength ) ; rootRes = bytes . getInt ( 0 ) ; // Bundles with formatVersion 1.1 and later contain an indexes [ ] array . // We need it so that we can read the key string bytes up front , for lookup performance . // read the variable - length indexes [ ] array int indexes0 = getIndexesInt ( URES_INDEX_LENGTH ) ; int indexLength = indexes0 & 0xff ; if ( indexLength <= URES_INDEX_MAX_TABLE_LENGTH ) { throw new ICUException ( "not enough indexes" ) ; } int bundleTop ; if ( dataLength < ( ( 1 + indexLength ) << 2 ) || dataLength < ( ( bundleTop = getIndexesInt ( URES_INDEX_BUNDLE_TOP ) ) << 2 ) ) { throw new ICUException ( "not enough bytes" ) ; } int maxOffset = bundleTop - 1 ; if ( majorFormatVersion >= 3 ) { // In formatVersion 1 , the indexLength took up this whole int . // In version 2 , bits 31 . . 8 were reserved and always 0. // In version 3 , they contain bits 23 . . 0 of the poolStringIndexLimit . // Bits 27 . . 24 are in indexes [ URES _ INDEX _ ATTRIBUTES ] bits 15 . . 12. poolStringIndexLimit = indexes0 >>> 8 ; } if ( indexLength > URES_INDEX_ATTRIBUTES ) { // determine if this resource bundle falls back to a parent bundle // along normal locale ID fallback int att = getIndexesInt ( URES_INDEX_ATTRIBUTES ) ; noFallback = ( att & URES_ATT_NO_FALLBACK ) != 0 ; isPoolBundle = ( att & URES_ATT_IS_POOL_BUNDLE ) != 0 ; usesPoolBundle = ( att & URES_ATT_USES_POOL_BUNDLE ) != 0 ; poolStringIndexLimit |= ( att & 0xf000 ) << 12 ; // bits 15 . . 12 - > 27 . . 24 poolStringIndex16Limit = att >>> 16 ; } int keysBottom = 1 + indexLength ; int keysTop = getIndexesInt ( URES_INDEX_KEYS_TOP ) ; if ( keysTop > keysBottom ) { // Deserialize the key strings up front . // Faster table item search at the cost of slower startup and some heap memory . if ( isPoolBundle ) { // Shift the key strings down : // Pool bundle key strings are used with a 0 - based index , // unlike regular bundles ' key strings for which indexes // are based on the start of the bundle data . keyBytes = new byte [ ( keysTop - keysBottom ) << 2 ] ; bytes . position ( keysBottom << 2 ) ; } else { localKeyLimit = keysTop << 2 ; keyBytes = new byte [ localKeyLimit ] ; } bytes . get ( keyBytes ) ; } // Read the array of 16 - bit units . if ( indexLength > URES_INDEX_16BIT_TOP ) { int _16BitTop = getIndexesInt ( URES_INDEX_16BIT_TOP ) ; if ( _16BitTop > keysTop ) { int num16BitUnits = ( _16BitTop - keysTop ) * 2 ; bytes . position ( keysTop << 2 ) ; b16BitUnits = bytes . asCharBuffer ( ) ; b16BitUnits . limit ( num16BitUnits ) ; maxOffset |= num16BitUnits - 1 ; } else { b16BitUnits = EMPTY_16_BIT_UNITS ; } } else { b16BitUnits = EMPTY_16_BIT_UNITS ; } if ( indexLength > URES_INDEX_POOL_CHECKSUM ) { poolCheckSum = getIndexesInt ( URES_INDEX_POOL_CHECKSUM ) ; } if ( ! isPoolBundle || b16BitUnits . length ( ) > 1 ) { resourceCache = new ResourceCache ( maxOffset ) ; } // Reset the position for future . asCharBuffer ( ) etc . bytes . position ( 0 ) ;
public class PropertiesEscape { /** * Perform a ( configurable ) Java Properties Value < strong > escape < / strong > operation on a < tt > String < / tt > input . * This method will perform an escape operation according to the specified * { @ link org . unbescape . properties . PropertiesValueEscapeLevel } argument value . * All other < tt > String < / tt > - based < tt > escapePropertiesValue * ( . . . ) < / tt > methods call this one with * preconfigured < tt > level < / tt > values . * This method is < strong > thread - safe < / strong > . * @ param text the < tt > String < / tt > to be escaped . * @ param level the escape level to be applied , see { @ link org . unbescape . properties . PropertiesValueEscapeLevel } . * @ return The escaped result < tt > String < / tt > . As a memory - performance improvement , will return the exact * same object as the < tt > text < / tt > input argument if no escaping modifications were required ( and * no additional < tt > String < / tt > objects will be created during processing ) . Will * return < tt > null < / tt > if input is < tt > null < / tt > . */ public static String escapePropertiesValue ( final String text , final PropertiesValueEscapeLevel level ) { } }
if ( level == null ) { throw new IllegalArgumentException ( "The 'level' argument cannot be null" ) ; } return PropertiesValueEscapeUtil . escape ( text , level ) ;
public class RandomMatrices_DSCC { /** * Creates a triangular matrix where the amount of fill is randomly selected too . * @ param upper true for upper triangular and false for lower * @ param N number of rows and columns * er * @ param minFill minimum fill fraction * @ param maxFill maximum fill fraction * @ param rand random number generator * @ return Random matrix */ public static DMatrixSparseCSC triangle ( boolean upper , int N , double minFill , double maxFill , Random rand ) { } }
int nz = ( int ) ( ( ( N - 1 ) * ( N - 1 ) / 2 ) * ( rand . nextDouble ( ) * ( maxFill - minFill ) + minFill ) ) + N ; if ( upper ) { return triangleUpper ( N , 0 , nz , - 1 , 1 , rand ) ; } else { return triangleLower ( N , 0 , nz , - 1 , 1 , rand ) ; }
public class EntityDescFactory { /** * エンティティ記述を作成します 。 * @ param tableMeta テーブルメタデータ * @ param entityPrefix エンティティクラスのプリフィックス * @ param entitySuffix エンティティクラスのサフィックス * @ return エンティティ記述 */ public EntityDesc createEntityDesc ( TableMeta tableMeta , String entityPrefix , String entitySuffix ) { } }
String name = StringUtil . fromSnakeCaseToCamelCase ( tableMeta . getName ( ) ) ; return createEntityDesc ( tableMeta , entityPrefix , entitySuffix , StringUtil . capitalize ( name ) ) ;
public class SignerBuilder { /** * Builds and returns the { @ link Signer } instance . If signing client isn ' t configured , { @ link NullPointerException } is thrown . */ public Signer build ( ) { } }
Util . notNull ( signingService , "KSI signing service" ) ; if ( defaultHashAlgorithm == null ) { this . defaultHashAlgorithm = HashAlgorithm . SHA2_256 ; } defaultHashAlgorithm . checkExpiration ( ) ; if ( policy == null ) { this . policy = ContextAwarePolicyAdapter . createInternalPolicy ( ) ; } KSISignatureComponentFactory signatureComponentFactory = new InMemoryKsiSignatureComponentFactory ( ) ; KSISignatureFactory uniSignatureFactory = new InMemoryKsiSignatureFactory ( policy , signatureComponentFactory ) ; return new SignerImpl ( signingService , uniSignatureFactory , defaultHashAlgorithm ) ;
public class RetryingHttpClientBuilder { /** * Returns a newly - created { @ link RetryingHttpClient } based on the properties of this builder . */ @ Override public RetryingHttpClient build ( Client < HttpRequest , HttpResponse > delegate ) { } }
if ( needsContentInStrategy ) { return new RetryingHttpClient ( delegate , retryStrategyWithContent ( ) , maxTotalAttempts ( ) , responseTimeoutMillisForEachAttempt ( ) , useRetryAfter , contentPreviewLength ) ; } return new RetryingHttpClient ( delegate , retryStrategy ( ) , maxTotalAttempts ( ) , responseTimeoutMillisForEachAttempt ( ) , useRetryAfter ) ;
public class JDayChooser { /** * Draws the day names of the day columnes . */ private void drawDayNames ( ) { } }
int firstDayOfWeek = calendar . getFirstDayOfWeek ( ) ; DateFormatSymbols dateFormatSymbols = new DateFormatSymbols ( locale ) ; dayNames = dateFormatSymbols . getShortWeekdays ( ) ; int day = firstDayOfWeek ; for ( int i = 0 ; i < 7 ; i ++ ) { if ( maxDayCharacters > 0 && maxDayCharacters < 5 ) { if ( dayNames [ day ] . length ( ) >= maxDayCharacters ) { dayNames [ day ] = dayNames [ day ] . substring ( 0 , maxDayCharacters ) ; } } days [ i ] . setText ( dayNames [ day ] ) ; if ( day == 1 ) { days [ i ] . setForeground ( sundayForeground ) ; } else { days [ i ] . setForeground ( weekdayForeground ) ; } if ( day < 7 ) { day ++ ; } else { day -= 6 ; } }
public class SessionUtils { /** * Are we to support AngularDart instead of AngularJS ? * @ return true if AngularDart is used */ public static boolean isDartControllerActive ( ) { } }
Map < String , Object > sessionMap = FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getSessionMap ( ) ; if ( sessionMap . containsKey ( DART_CONTROLLER ) ) { return "true" . equals ( sessionMap . get ( DART_CONTROLLER ) ) ; } return false ;
public class Router { /** * Pushes a new { @ link Controller } to the backstack * @ param transaction The transaction detailing what should be pushed , including the { @ link Controller } , * and its push and pop { @ link ControllerChangeHandler } , and its tag . */ @ UiThread public void pushController ( @ NonNull RouterTransaction transaction ) { } }
ThreadUtils . ensureMainThread ( ) ; RouterTransaction from = backstack . peek ( ) ; pushToBackstack ( transaction ) ; performControllerChange ( transaction , from , true ) ;
public class StringUtils { /** * < p > Checks if the CharSequence contains only Unicode digits . * A decimal point is not a Unicode digit and returns false . * < p > { @ code null } will return { @ code false } . * An empty CharSequence ( length ( ) = 0 ) will return { @ code false } . * < p > Note that the method does not allow for a leading sign , either positive or negative . * Also , if a String passes the numeric test , it may still generate a NumberFormatException * when parsed by Integer . parseInt or Long . parseLong , e . g . if the value is outside the range * for int or long respectively . * < pre > * StringUtils . isNumeric ( null ) = false * StringUtils . isNumeric ( " " ) = false * StringUtils . isNumeric ( " " ) = false * StringUtils . isNumeric ( " 123 " ) = true * StringUtils . isNumeric ( " \ u0967 \ u0968 \ u0969 " ) = true * StringUtils . isNumeric ( " 12 3 " ) = false * StringUtils . isNumeric ( " ab2c " ) = false * StringUtils . isNumeric ( " 12-3 " ) = false * StringUtils . isNumeric ( " 12.3 " ) = false * StringUtils . isNumeric ( " - 123 " ) = false * StringUtils . isNumeric ( " + 123 " ) = false * < / pre > * @ param cs the CharSequence to check , may be null * @ return { @ code true } if only contains digits , and is non - null * @ since 3.0 Changed signature from isNumeric ( String ) to isNumeric ( CharSequence ) * @ since 3.0 Changed " " to return false and not true */ public static boolean isNumeric ( final String cs ) { } }
if ( Strings . isNullOrEmpty ( cs ) ) { return false ; } final int sz = cs . length ( ) ; for ( int i = 0 ; i < sz ; i ++ ) { if ( ! Character . isDigit ( cs . charAt ( i ) ) ) { return false ; } } return true ;
public class PeepholeRemoveDeadCode { /** * Remove try blocks without catch blocks and with empty or not * existent finally blocks . * Or , only leave the finally blocks if try body blocks are empty * @ return the replacement node , if changed , or the original if not */ private Node tryFoldTry ( Node n ) { } }
checkState ( n . isTry ( ) , n ) ; Node body = n . getFirstChild ( ) ; Node catchBlock = body . getNext ( ) ; Node finallyBlock = catchBlock . getNext ( ) ; // Removes TRYs that had its CATCH removed and / or empty FINALLY . if ( ! catchBlock . hasChildren ( ) && ( finallyBlock == null || ! finallyBlock . hasChildren ( ) ) ) { n . removeChild ( body ) ; n . replaceWith ( body ) ; reportChangeToEnclosingScope ( body ) ; return body ; } // Only leave FINALLYs if TRYs are empty if ( ! body . hasChildren ( ) ) { NodeUtil . redeclareVarsInsideBranch ( catchBlock ) ; reportChangeToEnclosingScope ( n ) ; if ( finallyBlock != null ) { n . removeChild ( finallyBlock ) ; n . replaceWith ( finallyBlock ) ; } else { n . detach ( ) ; } return finallyBlock ; } return n ;
public class AnnotatedElementMap { /** * Overrides the standard Serialization readObject implementation to reassociated with the * target AnnotatedElement after deserialization . */ private void readObject ( java . io . ObjectInputStream in ) throws IOException , ClassNotFoundException { } }
in . defaultReadObject ( ) ; if ( _elemDesc == null ) // element is a Class _annotElem = _elemClass ; else { int argsIndex = _elemDesc . indexOf ( '(' ) ; if ( argsIndex < 0 ) // element is a Field { try { _annotElem = _elemClass . getDeclaredField ( _elemDesc ) ; } catch ( NoSuchFieldException nsfe ) { throw new IOException ( "Unable to locate field " + nsfe ) ; } } else // element is a method { String methodName = _elemDesc . substring ( 0 , argsIndex ) ; if ( _elemDesc . charAt ( argsIndex + 1 ) == ')' ) { // At least handle the null args case quickly try { _annotElem = _elemClass . getDeclaredMethod ( methodName , new Class [ ] { } ) ; } catch ( NoSuchMethodException nsme ) { throw new IOException ( "Unable to locate method " + _elemDesc ) ; } } else { // Linear search for the rest : ( String methodArgs = _elemDesc . substring ( argsIndex + 1 , _elemDesc . length ( ) - 1 ) ; Method [ ] methods = _elemClass . getDeclaredMethods ( ) ; for ( int i = 0 ; i < methods . length ; i ++ ) { if ( methods [ i ] . getName ( ) . equals ( methodName ) && getMethodArgs ( methods [ i ] ) . equals ( methodArgs ) ) { _annotElem = methods [ i ] ; break ; } } if ( _annotElem == null ) { throw new IOException ( "Unable to locate method " + _elemDesc ) ; } } } }
public class SquareCrossClustersIntoGrids { /** * Converts the list of rows into a grid . Since it is a chessboard pattern some of the grid * elements will be null . */ private SquareGrid assembleGrid ( List < List < SquareNode > > listRows ) { } }
SquareGrid grid = grids . grow ( ) ; grid . reset ( ) ; List < SquareNode > row0 = listRows . get ( 0 ) ; List < SquareNode > row1 = listRows . get ( 1 ) ; int offset = row0 . get ( 0 ) . getNumberOfConnections ( ) == 1 ? 0 : 1 ; grid . columns = row0 . size ( ) + row1 . size ( ) ; grid . rows = listRows . size ( ) ; // initialize grid to null for ( int i = 0 ; i < grid . columns * grid . rows ; i ++ ) { grid . nodes . add ( null ) ; } // fill in the grid for ( int row = 0 ; row < listRows . size ( ) ; row ++ ) { List < SquareNode > list = listRows . get ( row ) ; int startCol = offset - row % 2 == 0 ? 0 : 1 ; // make sure there is the expected number of elements in the row int adjustedLength = grid . columns - startCol ; if ( ( adjustedLength ) - adjustedLength / 2 != list . size ( ) ) { return null ; } int listIndex = 0 ; for ( int col = startCol ; col < grid . columns ; col += 2 ) { grid . set ( row , col , list . get ( listIndex ++ ) ) ; } } return grid ;
public class MapLens { /** * A lens that focuses on a value at a key in a map , as a { @ link Maybe } , and produces a subtype < code > M < / code > on * the way back out . * @ param < M > the map subtype * @ param < K > the key type * @ param < V > the value type * @ param k the key to focus on * @ param copyFn the copy function * @ return a lens that focuses on the value at key , as a { @ link Maybe } */ public static < M extends Map < K , V > , K , V > Lens < Map < K , V > , M , Maybe < V > , Maybe < V > > valueAt ( Function < ? super Map < K , V > , ? extends M > copyFn , K k ) { } }
return lens ( m -> maybe ( m . get ( k ) ) , ( m , maybeV ) -> maybeV . < Fn1 < M , IO < M > > > fmap ( v -> alter ( updated -> updated . put ( k , v ) ) ) . orElse ( alter ( updated -> updated . remove ( k ) ) ) . apply ( copyFn . apply ( m ) ) . unsafePerformIO ( ) ) ;
public class QueryControllerTreeModel { /** * Inserts a new node group or query into the Tree */ public void elementAdded ( QueryControllerEntity element ) { } }
if ( element instanceof QueryControllerGroup ) { QueryControllerGroup group = ( QueryControllerGroup ) element ; QueryGroupTreeElement ele = new QueryGroupTreeElement ( group . getID ( ) ) ; insertNodeInto ( ele , ( DefaultMutableTreeNode ) root , root . getChildCount ( ) ) ; nodeStructureChanged ( root ) ; } else if ( element instanceof QueryControllerQuery ) { QueryControllerQuery query = ( QueryControllerQuery ) element ; QueryTreeElement ele = new QueryTreeElement ( query . getID ( ) , query . getQuery ( ) ) ; insertNodeInto ( ele , ( DefaultMutableTreeNode ) root , root . getChildCount ( ) ) ; nodeStructureChanged ( root ) ; }
public class AbstractJavaMetadata { /** * Process together modifiers and variables of a { @ link FieldDeclaration } . * @ param fieldDeclaration - the field declaration instance . * @ param arrayTypeFieldMetadata - the meta data . */ private void processModifiersAndVariablesOfFieldDeclaration ( FieldDeclaration fieldDeclaration , FieldMetadata arrayTypeFieldMetadata ) { } }
processModifiersOfFieldDeclaration ( fieldDeclaration , arrayTypeFieldMetadata ) ; processVariablesOfVariableDeclarationFragment ( fieldDeclaration , arrayTypeFieldMetadata ) ;
public class EhCacheWrapper { /** * ( non - Javadoc ) * @ see javax . persistence . Cache # evict ( java . lang . Class , java . lang . Object ) */ @ Override public void evict ( Class arg0 , Object arg1 ) { } }
// TODO Can we use Class with ehcache if ( isAlive ( ) ) ehcache . remove ( arg1 ) ;
public class LdapUtils { /** * Load a LDAP definition from the Yaml config ( IdentityProviderBootstrap ) */ public static LdapIdentityProviderDefinition fromConfig ( Map < String , Object > ldapConfig ) { } }
Assert . notNull ( ldapConfig ) ; LdapIdentityProviderDefinition definition = new LdapIdentityProviderDefinition ( ) ; if ( ldapConfig == null || ldapConfig . isEmpty ( ) ) { return definition ; } if ( ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_STORE_CUSTOM_ATTRIBUTES ) != null ) { definition . setStoreCustomAttributes ( ( boolean ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_STORE_CUSTOM_ATTRIBUTES ) ) ; } if ( ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_EMAIL_DOMAIN ) != null ) { definition . setEmailDomain ( ( List < String > ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_EMAIL_DOMAIN ) ) ; } if ( ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_EXTERNAL_GROUPS_WHITELIST ) != null ) { definition . setExternalGroupsWhitelist ( ( List < String > ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_EXTERNAL_GROUPS_WHITELIST ) ) ; } if ( ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_ATTRIBUTE_MAPPINGS ) != null ) { definition . setAttributeMappings ( ( Map < String , Object > ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_ATTRIBUTE_MAPPINGS ) ) ; } if ( ldapConfig . get ( "ldap.addShadowUserOnLogin" ) != null ) { definition . setAddShadowUserOnLogin ( ( boolean ) ldapConfig . get ( "ldap.addShadowUserOnLogin" ) ) ; } definition . setLdapProfileFile ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_PROFILE_FILE ) ) ; final String profileFile = definition . getLdapProfileFile ( ) ; if ( StringUtils . hasText ( profileFile ) ) { switch ( profileFile ) { case LdapIdentityProviderDefinition . LDAP_PROFILE_FILE_SIMPLE_BIND : { definition . setUserDNPattern ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_USER_DN_PATTERN ) ) ; if ( ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_USER_DN_PATTERN_DELIMITER ) != null ) { definition . setUserDNPatternDelimiter ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_USER_DN_PATTERN_DELIMITER ) ) ; } break ; } case LdapIdentityProviderDefinition . LDAP_PROFILE_FILE_SEARCH_AND_COMPARE : case LdapIdentityProviderDefinition . LDAP_PROFILE_FILE_SEARCH_AND_BIND : { definition . setBindUserDn ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_USER_DN ) ) ; definition . setBindPassword ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_PASSWORD ) ) ; definition . setUserSearchBase ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_SEARCH_BASE ) ) ; definition . setUserSearchFilter ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_SEARCH_FILTER ) ) ; break ; } default : break ; } } definition . setBaseUrl ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_URL ) ) ; definition . setSkipSSLVerification ( ( Boolean ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_SSL_SKIPVERIFICATION ) ) ; definition . setTlsConfiguration ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_SSL_TLS ) ) ; definition . setReferral ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_REFERRAL ) ) ; definition . setMailSubstituteOverridesLdap ( ( Boolean ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_MAIL_SUBSTITUTE_OVERRIDES_LDAP ) ) ; if ( StringUtils . hasText ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_MAIL_ATTRIBUTE_NAME ) ) ) { definition . setMailAttributeName ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_MAIL_ATTRIBUTE_NAME ) ) ; } definition . setMailSubstitute ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_MAIL_SUBSTITUTE ) ) ; definition . setPasswordAttributeName ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_PASSWORD_ATTRIBUTE_NAME ) ) ; definition . setPasswordEncoder ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_PASSWORD_ENCODER ) ) ; definition . setLocalPasswordCompare ( ( Boolean ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_BASE_LOCAL_PASSWORD_COMPARE ) ) ; if ( StringUtils . hasText ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_GROUPS_FILE ) ) ) { definition . setLdapGroupFile ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_GROUPS_FILE ) ) ; } if ( StringUtils . hasText ( definition . getLdapGroupFile ( ) ) && ! LdapIdentityProviderDefinition . LDAP_GROUP_FILE_GROUPS_NULL_XML . equals ( definition . getLdapGroupFile ( ) ) ) { definition . setGroupSearchBase ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_GROUPS_SEARCH_BASE ) ) ; definition . setGroupSearchFilter ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_GROUPS_GROUP_SEARCH_FILTER ) ) ; definition . setGroupsIgnorePartialResults ( ( Boolean ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_GROUPS_IGNORE_PARTIAL_RESULT_EXCEPTION ) ) ; if ( ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_GROUPS_MAX_SEARCH_DEPTH ) != null ) { definition . setMaxGroupSearchDepth ( ( Integer ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_GROUPS_MAX_SEARCH_DEPTH ) ) ; } definition . setGroupSearchSubTree ( ( Boolean ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_GROUPS_SEARCH_SUBTREE ) ) ; definition . setAutoAddGroups ( ( Boolean ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_GROUPS_AUTO_ADD ) ) ; definition . setGroupRoleAttribute ( ( String ) ldapConfig . get ( LdapIdentityProviderDefinition . LDAP_GROUPS_GROUP_ROLE_ATTRIBUTE ) ) ; } // if flat attributes are set in the properties final String LDAP_ATTR_MAP_PREFIX = LdapIdentityProviderDefinition . LDAP_ATTRIBUTE_MAPPINGS + "." ; for ( Map . Entry < String , Object > entry : ldapConfig . entrySet ( ) ) { if ( ! LdapIdentityProviderDefinition . LDAP_PROPERTY_NAMES . contains ( entry . getKey ( ) ) && entry . getKey ( ) . startsWith ( LDAP_ATTR_MAP_PREFIX ) && entry . getValue ( ) instanceof String ) { definition . addAttributeMapping ( entry . getKey ( ) . substring ( LDAP_ATTR_MAP_PREFIX . length ( ) ) , entry . getValue ( ) ) ; } } if ( ldapConfig . get ( LDAP_PREFIX + PROVIDER_DESCRIPTION ) != null && ldapConfig . get ( LDAP_PREFIX + PROVIDER_DESCRIPTION ) instanceof String ) { definition . setProviderDescription ( ( String ) ldapConfig . get ( LDAP_PREFIX + PROVIDER_DESCRIPTION ) ) ; } return definition ;
public class JIRAController { /** * Creating a configuration */ @ RequestMapping ( value = "configurations/create" , method = RequestMethod . POST ) public JIRAConfiguration newConfiguration ( @ RequestBody JIRAConfiguration configuration ) { } }
return jiraConfigurationService . newConfiguration ( configuration ) ;
public class JKIOUtil { /** * Gets the files in folder . * @ param folder the folder * @ param ext the ext * @ return the files in folder */ public static List < String > getFilesInFolder ( String folder , String ext ) { } }
// tryto find using normal file system lookup List < String > files = new Vector < > ( ) ; File dir = new File ( folder ) ; if ( dir . exists ( ) && dir . isDirectory ( ) ) { String [ ] list = dir . list ( ) ; for ( String file : list ) { if ( file . endsWith ( ext ) ) { files . add ( file ) ; } } } return files ;
public class BeanELResolverEx { /** * Invokes the method considering the base ' s BeanInfo class ' MethodDescriptors . Resolves method * overloads by looking at the MethodDescriptors first and only then the base ' s class itself , * choosing the most specific candidate . */ @ Override public Object invoke ( final ELContext context , final Object base , final Object method , Class < ? > [ ] paramTypes , final Object [ ] params ) { } }
if ( ( base == null ) || ( method == null ) ) { return null ; } final MethodDescriptor [ ] methodDescriptors = getMethodDescriptors ( context , base ) ; if ( methodDescriptors != null ) { final Method m = findMethodOrThrow ( method . toString ( ) , paramTypes , params , false , methodDescriptors ) ; paramTypes = m . getParameterTypes ( ) ; } return super . invoke ( context , base , method , paramTypes , params ) ;
public class ZookeeperUtil { /** * Parses chroot section of Zookeeper connection string * @ param zookeepers Zookeeper connection string * @ return Returns root path or " / " if none found */ public static String parseRoot ( String zookeepers ) { } }
int slashIndex = zookeepers . indexOf ( "/" ) ; if ( slashIndex != - 1 ) { return zookeepers . substring ( slashIndex ) . trim ( ) ; } return "/" ;
public class WriteClass { /** * Read the class with this name . */ public boolean readThisClass ( String strClassName ) { } }
try { Record recClassInfo = this . getMainRecord ( ) ; recClassInfo . getField ( ClassInfo . CLASS_NAME ) . setString ( strClassName ) ; recClassInfo . setKeyArea ( ClassInfo . CLASS_NAME_KEY ) ; return recClassInfo . seek ( "=" ) ; // Get this class record back } catch ( DBException ex ) { ex . printStackTrace ( ) ; return false ; }
public class FileInputFormat { /** * Opens an input stream to the file defined in the input format . * The stream is positioned at the beginning of the given split . * The stream is actually opened in an asynchronous thread to make sure any interruptions to the thread * working on the input format do not reach the file system . */ @ Override public void open ( FileInputSplit fileSplit ) throws IOException { } }
this . currentSplit = fileSplit ; this . splitStart = fileSplit . getStart ( ) ; this . splitLength = fileSplit . getLength ( ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Opening input split " + fileSplit . getPath ( ) + " [" + this . splitStart + "," + this . splitLength + "]" ) ; } // open the split in an asynchronous thread final InputSplitOpenThread isot = new InputSplitOpenThread ( fileSplit , this . openTimeout ) ; isot . start ( ) ; try { this . stream = isot . waitForCompletion ( ) ; this . stream = decorateInputStream ( this . stream , fileSplit ) ; } catch ( Throwable t ) { throw new IOException ( "Error opening the Input Split " + fileSplit . getPath ( ) + " [" + splitStart + "," + splitLength + "]: " + t . getMessage ( ) , t ) ; } // get FSDataInputStream if ( this . splitStart != 0 ) { this . stream . seek ( this . splitStart ) ; }
public class IoUtil { /** * 按行读取数据 , 针对每行的数据做处理 * @ param in { @ link InputStream } * @ param charset { @ link Charset } 编码 * @ param lineHandler 行处理接口 , 实现handle方法用于编辑一行的数据后入到指定地方 * @ throws IORuntimeException IO异常 * @ since 3.0.9 */ public static void readLines ( InputStream in , Charset charset , LineHandler lineHandler ) throws IORuntimeException { } }
readLines ( getReader ( in , charset ) , lineHandler ) ;
public class DataUtil { /** * little - endian or intel format . */ public static long readLongLittleEndian ( InputStream io ) throws IOException { } }
long value = io . read ( ) ; if ( value < 0 ) throw new EOFException ( ) ; int i = io . read ( ) ; if ( i < 0 ) throw new EOFException ( ) ; value |= i << 8 ; i = io . read ( ) ; if ( i < 0 ) throw new EOFException ( ) ; value |= i << 16 ; i = io . read ( ) ; if ( i < 0 ) throw new EOFException ( ) ; value |= ( ( long ) i ) << 24 ; i = io . read ( ) ; if ( i < 0 ) throw new EOFException ( ) ; value |= ( ( long ) i ) << 32 ; i = io . read ( ) ; if ( i < 0 ) throw new EOFException ( ) ; value |= ( ( long ) i ) << 40 ; i = io . read ( ) ; if ( i < 0 ) throw new EOFException ( ) ; value |= ( ( long ) i ) << 48 ; i = io . read ( ) ; if ( i < 0 ) throw new EOFException ( ) ; value |= ( ( long ) i ) << 56 ; return value ;
public class SwingWrapper { /** * Repaint the XChartPanel given the provided index . * @ param index */ public void repaintChart ( int index ) { } }
chartPanels . get ( index ) . revalidate ( ) ; chartPanels . get ( index ) . repaint ( ) ;
public class JSONObject { /** * Get an optional string associated with a key . It returns the defaultValue if there is no such key . * @ param key * A key string . * @ param defaultValue * The default . * @ return A string which is the value . */ public String optString ( String key , String defaultValue ) { } }
Object o = opt ( key ) ; return o != null ? o . toString ( ) : defaultValue ;
public class EDMImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . EDM__DM_NAME : setDMName ( ( String ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class Config { /** * Determines if the value of the given property is a script or not * @ param propertyName The name of the property to check * @ return True if the property exists and its value is a script */ public static boolean valueIsScript ( String propertyName ) { } }
return ( getInstance ( ) . properties . containsKey ( propertyName ) && getInstance ( ) . properties . get ( propertyName ) . startsWith ( SCRIPT_PROPERTY ) ) ;
public class FaultRootCauseEntity { /** * The types and messages of the exceptions . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setExceptions ( java . util . Collection ) } or { @ link # withExceptions ( java . util . Collection ) } if you want to * override the existing values . * @ param exceptions * The types and messages of the exceptions . * @ return Returns a reference to this object so that method calls can be chained together . */ public FaultRootCauseEntity withExceptions ( RootCauseException ... exceptions ) { } }
if ( this . exceptions == null ) { setExceptions ( new java . util . ArrayList < RootCauseException > ( exceptions . length ) ) ; } for ( RootCauseException ele : exceptions ) { this . exceptions . add ( ele ) ; } return this ;
public class Printer { /** * Prints the given string tree . * @ param pw * the writer to be used to print the tree . * @ param l * a string tree , i . e . , a string list that can contain other * string lists , and so on recursively . */ static void printList ( final PrintWriter pw , final List < ? > l ) { } }
for ( int i = 0 ; i < l . size ( ) ; ++ i ) { Object o = l . get ( i ) ; if ( o instanceof List ) { printList ( pw , ( List < ? > ) o ) ; } else { pw . print ( o . toString ( ) ) ; } }
public class InstanceTimelineMarshaller { /** * Marshall the given parameter object . */ public void marshall ( InstanceTimeline instanceTimeline , ProtocolMarshaller protocolMarshaller ) { } }
if ( instanceTimeline == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( instanceTimeline . getCreationDateTime ( ) , CREATIONDATETIME_BINDING ) ; protocolMarshaller . marshall ( instanceTimeline . getReadyDateTime ( ) , READYDATETIME_BINDING ) ; protocolMarshaller . marshall ( instanceTimeline . getEndDateTime ( ) , ENDDATETIME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ApiOvhLicenseplesk { /** * Get this object properties * REST : GET / license / plesk / { serviceName } / option / { label } * @ param serviceName [ required ] The name of your Plesk license * @ param label [ required ] This option designation */ public OvhOption serviceName_option_label_GET ( String serviceName , net . minidev . ovh . api . license . OvhOptionLabel label ) throws IOException { } }
String qPath = "/license/plesk/{serviceName}/option/{label}" ; StringBuilder sb = path ( qPath , serviceName , label ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhOption . class ) ;
public class ComplexFloatFFT_Mixed { /** * / * _ _ _ _ _ */ void pass_7 ( int fi , float in [ ] , int in0 , int istride , float out [ ] , int out0 , int ostride , int sign , int product ) { } }
int k , k1 ; int factor = 7 ; int m = n / factor ; int q = n / product ; int p_1 = product / factor ; int jump = ( factor - 1 ) * p_1 ; float c1 = ( float ) Math . cos ( 1.0 * 2.0 * PI / 7.0 ) ; float c2 = ( float ) Math . cos ( 2.0 * 2.0 * PI / 7.0 ) ; float c3 = ( float ) Math . cos ( 3.0 * 2.0 * PI / 7.0 ) ; float s1 = ( float ) ( ( - sign ) * Math . sin ( 1.0 * 2.0 * PI / 7.0 ) ) ; float s2 = ( float ) ( ( - sign ) * Math . sin ( 2.0 * 2.0 * PI / 7.0 ) ) ; float s3 = ( float ) ( ( - sign ) * Math . sin ( 3.0 * 2.0 * PI / 7.0 ) ) ; int i = in0 , j = out0 ; int di = istride * m ; int dj = ostride * p_1 ; float x_real , x_imag ; for ( k = 0 ; k < q ; k ++ ) { float twids [ ] = twiddle [ fi ] [ k ] ; float w1_real = twids [ 0 ] ; float w1_imag = - sign * twids [ 1 ] ; float w2_real = twids [ 2 ] ; float w2_imag = - sign * twids [ 3 ] ; float w3_real = twids [ 4 ] ; float w3_imag = - sign * twids [ 5 ] ; float w4_real = twids [ 6 ] ; float w4_imag = - sign * twids [ 7 ] ; float w5_real = twids [ 8 ] ; float w5_imag = - sign * twids [ 9 ] ; float w6_real = twids [ 10 ] ; float w6_imag = - sign * twids [ 11 ] ; for ( k1 = 0 ; k1 < p_1 ; k1 ++ ) { float z0_real = in [ i ] ; float z0_imag = in [ i + 1 ] ; float z1_real = in [ i + di ] ; float z1_imag = in [ i + di + 1 ] ; float z2_real = in [ i + 2 * di ] ; float z2_imag = in [ i + 2 * di + 1 ] ; float z3_real = in [ i + 3 * di ] ; float z3_imag = in [ i + 3 * di + 1 ] ; float z4_real = in [ i + 4 * di ] ; float z4_imag = in [ i + 4 * di + 1 ] ; float z5_real = in [ i + 5 * di ] ; float z5_imag = in [ i + 5 * di + 1 ] ; float z6_real = in [ i + 6 * di ] ; float z6_imag = in [ i + 6 * di + 1 ] ; i += istride ; /* compute x = W ( 7 ) z */ /* t0 = z1 + z6 */ float t0_real = z1_real + z6_real ; float t0_imag = z1_imag + z6_imag ; /* t1 = z1 - z6 */ float t1_real = z1_real - z6_real ; float t1_imag = z1_imag - z6_imag ; /* t2 = z2 + z5 */ float t2_real = z2_real + z5_real ; float t2_imag = z2_imag + z5_imag ; /* t3 = z2 - z5 */ float t3_real = z2_real - z5_real ; float t3_imag = z2_imag - z5_imag ; /* t4 = z4 + z3 */ float t4_real = z4_real + z3_real ; float t4_imag = z4_imag + z3_imag ; /* t5 = z4 - z3 */ float t5_real = z4_real - z3_real ; float t5_imag = z4_imag - z3_imag ; /* t6 = t2 + t0 */ float t6_real = t2_real + t0_real ; float t6_imag = t2_imag + t0_imag ; /* t7 = t5 + t3 */ float t7_real = t5_real + t3_real ; float t7_imag = t5_imag + t3_imag ; /* b0 = z0 + t6 + t4 */ float b0_real = z0_real + t6_real + t4_real ; float b0_imag = z0_imag + t6_imag + t4_imag ; /* b1 = ( ( cos ( 2pi / 7 ) + cos ( 4pi / 7 ) + cos ( 6pi / 7 ) ) / 3-1 ) ( t6 + t4) */ float b1_real = ( ( ( c1 + c2 + c3 ) / 3.0f - 1.0f ) * ( t6_real + t4_real ) ) ; float b1_imag = ( ( ( c1 + c2 + c3 ) / 3.0f - 1.0f ) * ( t6_imag + t4_imag ) ) ; /* b2 = ( ( 2 * cos ( 2pi / 7 ) - cos ( 4pi / 7 ) - cos ( 6pi / 7 ) ) / 3 ) ( t0 - t4) */ float b2_real = ( ( ( 2.0f * c1 - c2 - c3 ) / 3.0f ) * ( t0_real - t4_real ) ) ; float b2_imag = ( ( ( 2.0f * c1 - c2 - c3 ) / 3.0f ) * ( t0_imag - t4_imag ) ) ; /* b3 = ( ( cos ( 2pi / 7 ) - 2 * cos ( 4pi / 7 ) + cos ( 6pi / 7 ) ) / 3 ) ( t4 - t2) */ float b3_real = ( ( ( c1 - 2.0f * c2 + c3 ) / 3.0f ) * ( t4_real - t2_real ) ) ; float b3_imag = ( ( ( c1 - 2.0f * c2 + c3 ) / 3.0f ) * ( t4_imag - t2_imag ) ) ; /* b4 = ( ( cos ( 2pi / 7 ) + cos ( 4pi / 7 ) - 2 * cos ( 6pi / 7 ) ) / 3 ) ( t2 - t0) */ float b4_real = ( ( ( c1 + c2 - 2.0f * c3 ) / 3.0f ) * ( t2_real - t0_real ) ) ; float b4_imag = ( ( ( c1 + c2 - 2.0f * c3 ) / 3.0f ) * ( t2_imag - t0_imag ) ) ; /* b5 = sign * ( ( sin ( 2pi / 7 ) + sin ( 4pi / 7 ) - sin ( 6pi / 7 ) ) / 3 ) ( t7 + t1) */ float b5_real = ( ( s1 + s2 - s3 ) / 3.0f ) * ( t7_real + t1_real ) ; float b5_imag = ( ( s1 + s2 - s3 ) / 3.0f ) * ( t7_imag + t1_imag ) ; /* b6 = sign * ( ( 2sin ( 2pi / 7 ) - sin ( 4pi / 7 ) + sin ( 6pi / 7 ) ) / 3 ) ( t1 - t5) */ float b6_real = ( ( 2.0f * s1 - s2 + s3 ) / 3.0f ) * ( t1_real - t5_real ) ; float b6_imag = ( ( 2.0f * s1 - s2 + s3 ) / 3.0f ) * ( t1_imag - t5_imag ) ; /* b7 = sign * ( ( sin ( 2pi / 7 ) - 2sin ( 4pi / 7 ) - sin ( 6pi / 7 ) ) / 3 ) ( t5 - t3) */ float b7_real = ( ( s1 - 2.0f * s2 - s3 ) / 3.0f ) * ( t5_real - t3_real ) ; float b7_imag = ( ( s1 - 2.0f * s2 - s3 ) / 3.0f ) * ( t5_imag - t3_imag ) ; /* b8 = sign * ( ( sin ( 2pi / 7 ) + sin ( 4pi / 7 ) + 2sin ( 6pi / 7 ) ) / 3 ) ( t3 - t1) */ float b8_real = ( ( s1 + s2 + 2.0f * s3 ) / 3.0f ) * ( t3_real - t1_real ) ; float b8_imag = ( ( s1 + s2 + 2.0f * s3 ) / 3.0f ) * ( t3_imag - t1_imag ) ; /* T0 = b0 + b1 */ float T0_real = b0_real + b1_real ; float T0_imag = b0_imag + b1_imag ; /* T1 = b2 + b3 */ float T1_real = b2_real + b3_real ; float T1_imag = b2_imag + b3_imag ; /* T2 = b4 - b3 */ float T2_real = b4_real - b3_real ; float T2_imag = b4_imag - b3_imag ; /* T3 = - b2 - b4 */ float T3_real = - b2_real - b4_real ; float T3_imag = - b2_imag - b4_imag ; /* T4 = b6 + b7 */ float T4_real = b6_real + b7_real ; float T4_imag = b6_imag + b7_imag ; /* T5 = b8 - b7 */ float T5_real = b8_real - b7_real ; float T5_imag = b8_imag - b7_imag ; /* T6 = - b8 - b6 */ float T6_real = - b8_real - b6_real ; float T6_imag = - b8_imag - b6_imag ; /* T7 = T0 + T1 */ float T7_real = T0_real + T1_real ; float T7_imag = T0_imag + T1_imag ; /* T8 = T0 + T2 */ float T8_real = T0_real + T2_real ; float T8_imag = T0_imag + T2_imag ; /* T9 = T0 + T3 */ float T9_real = T0_real + T3_real ; float T9_imag = T0_imag + T3_imag ; /* T10 = T4 + b5 */ float T10_real = T4_real + b5_real ; float T10_imag = T4_imag + b5_imag ; /* T11 = T5 + b5 */ float T11_real = T5_real + b5_real ; float T11_imag = T5_imag + b5_imag ; /* T12 = T6 + b5 */ float T12_real = T6_real + b5_real ; float T12_imag = T6_imag + b5_imag ; /* apply twiddle factors */ /* out0 = 1 * b0 */ out [ j ] = b0_real ; out [ j + 1 ] = b0_imag ; /* out1 = w1 * ( T7 - i T10) */ x_real = T7_real + T10_imag ; x_imag = T7_imag - T10_real ; out [ j + dj ] = w1_real * x_real - w1_imag * x_imag ; out [ j + dj + 1 ] = w1_real * x_imag + w1_imag * x_real ; /* out2 = w2 * ( T9 - i T12) */ x_real = T9_real + T12_imag ; x_imag = T9_imag - T12_real ; out [ j + 2 * dj ] = w2_real * x_real - w2_imag * x_imag ; out [ j + 2 * dj + 1 ] = w2_real * x_imag + w2_imag * x_real ; /* out3 = w3 * ( T8 + i T11) */ x_real = T8_real - T11_imag ; x_imag = T8_imag + T11_real ; out [ j + 3 * dj ] = w3_real * x_real - w3_imag * x_imag ; out [ j + 3 * dj + 1 ] = w3_real * x_imag + w3_imag * x_real ; /* out4 = w4 * ( T8 - i T11) */ x_real = T8_real + T11_imag ; x_imag = T8_imag - T11_real ; out [ j + 4 * dj ] = w4_real * x_real - w4_imag * x_imag ; out [ j + 4 * dj + 1 ] = w4_real * x_imag + w4_imag * x_real ; /* out5 = w5 * ( T9 + i T12) */ x_real = T9_real - T12_imag ; x_imag = T9_imag + T12_real ; out [ j + 5 * dj ] = w5_real * x_real - w5_imag * x_imag ; out [ j + 5 * dj + 1 ] = w5_real * x_imag + w5_imag * x_real ; /* out6 = w6 * ( T7 + i T10) */ x_real = T7_real - T10_imag ; x_imag = T7_imag + T10_real ; out [ j + 6 * dj ] = w6_real * x_real - w6_imag * x_imag ; out [ j + 6 * dj + 1 ] = w6_real * x_imag + w6_imag * x_real ; j += ostride ; } j += ( factor - 1 ) * dj ; }
public class CEDescrBuilderImpl { /** * { @ inheritDoc } */ public AccumulateDescrBuilder < CEDescrBuilder < P , T > > accumulate ( ) { } }
// here we have to do a trick as a top level accumulate is just an accumulate // whose result pattern is Object [ ] // create a linked Object [ ] pattern and set it to query false PatternDescrBuilder < CEDescrBuilder < P , T > > pdb = pattern ( "Object[]" ) . isQuery ( false ) ; // create the accumulate builder with this CE as its parent AccumulateDescrBuilder < CEDescrBuilder < P , T > > accumulate = new AccumulateDescrBuilderImpl < CEDescrBuilder < P , T > > ( this ) ; // set the accumulate descriptor as the source of that pattern descr pdb . getDescr ( ) . setSource ( accumulate . getDescr ( ) ) ; // return the accumulate builder , that has the properly set parent return accumulate ;
public class ByteArray { /** * 将array的内容引用复制给本对象 * @ param array ByteArray */ public void directFrom ( ByteArray array ) { } }
if ( array != null ) { this . content = array . content ; this . count = array . count ; }
public class appfwxmlcontenttype { /** * Use this API to fetch all the appfwxmlcontenttype resources that are configured on netscaler . */ public static appfwxmlcontenttype [ ] get ( nitro_service service ) throws Exception { } }
appfwxmlcontenttype obj = new appfwxmlcontenttype ( ) ; appfwxmlcontenttype [ ] response = ( appfwxmlcontenttype [ ] ) obj . get_resources ( service ) ; return response ;
public class EntityDataModelUtil { /** * Gets the names of the properties that are part of the key of an entity type . * @ param entityType The entity type . * @ return A { @ code Set } containing the names of the key properties of the entity type . */ public static Set < String > getKeyPropertyNames ( EntityType entityType ) { } }
Set < String > keyPropertyNames = entityType . getKey ( ) . getPropertyRefs ( ) . stream ( ) . map ( PropertyRef :: getPath ) . collect ( Collectors . toSet ( ) ) ; return keyPropertyNames ;
public class AndroidPermissions { /** * Requests the missing permissions . * The activity from which this method is called has to implement * { @ link Activity # onRequestPermissionsResult ( int , String [ ] , int [ ] ) } * and then , inside it , it has to call the method * { @ link AndroidPermissions # areAllRequiredPermissionsGranted ( String [ ] , int [ ] ) } to check that all the * requested permissions are granted by the user * @ param requestCode request code used by the activity */ public void requestPermissions ( int requestCode ) { } }
String [ ] request = mPermissionsToRequest . toArray ( new String [ mPermissionsToRequest . size ( ) ] ) ; StringBuilder log = new StringBuilder ( ) ; log . append ( "Requesting permissions:\n" ) ; for ( String permission : request ) { log . append ( permission ) . append ( "\n" ) ; } Log . i ( getClass ( ) . getSimpleName ( ) , log . toString ( ) ) ; ActivityCompat . requestPermissions ( mContext , request , requestCode ) ;
public class AmazonSNSClient { /** * Returns the settings for sending SMS messages from your account . * These settings are set with the < code > SetSMSAttributes < / code > action . * @ param getSMSAttributesRequest * The input for the < code > GetSMSAttributes < / code > request . * @ return Result of the GetSMSAttributes operation returned by the service . * @ throws ThrottledException * Indicates that the rate at which requests have been submitted for this action exceeds the limit for your * account . * @ throws InternalErrorException * Indicates an internal service error . * @ throws AuthorizationErrorException * Indicates that the user has been denied access to the requested resource . * @ throws InvalidParameterException * Indicates that a request parameter does not comply with the associated constraints . * @ sample AmazonSNS . GetSMSAttributes * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / sns - 2010-03-31 / GetSMSAttributes " target = " _ top " > AWS API * Documentation < / a > */ @ Override public GetSMSAttributesResult getSMSAttributes ( GetSMSAttributesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetSMSAttributes ( request ) ;
public class SchedulerProvider { /** * Sets the { @ code TriggerListener } s . * @ param triggerListeners The { @ code TriggerListener } s * @ throws SchedulerException If any error occurs */ @ Inject ( optional = true ) public void addTriggerListeners ( Set < TriggerListener > triggerListeners ) throws SchedulerException { } }
for ( TriggerListener triggerListener : triggerListeners ) { scheduler . getListenerManager ( ) . addTriggerListener ( triggerListener ) ; }
public class autoscaleprofile { /** * Use this API to update autoscaleprofile . */ public static base_response update ( nitro_service client , autoscaleprofile resource ) throws Exception { } }
autoscaleprofile updateresource = new autoscaleprofile ( ) ; updateresource . name = resource . name ; updateresource . url = resource . url ; updateresource . apikey = resource . apikey ; updateresource . sharedsecret = resource . sharedsecret ; return updateresource . update_resource ( client ) ;
public class AbstractCommand { /** * Set the id . In most cases , this is provided by the constructor or through * the beanId provided in the applicationContext . * @ param id */ protected void setId ( String id ) { } }
if ( ! StringUtils . hasText ( id ) ) { id = null ; } this . id = id ;
public class AliPayApi { /** * 统一收单交易退款查询 * @ param model * { AlipayTradeFastpayRefundQueryModel } * @ return { AlipayTradeFastpayRefundQueryResponse } * @ throws { AlipayApiException } */ public static AlipayTradeFastpayRefundQueryResponse tradeRefundQueryToResponse ( AlipayTradeFastpayRefundQueryModel model ) throws AlipayApiException { } }
AlipayTradeFastpayRefundQueryRequest request = new AlipayTradeFastpayRefundQueryRequest ( ) ; request . setBizModel ( model ) ; return AliPayApiConfigKit . getAliPayApiConfig ( ) . getAlipayClient ( ) . execute ( request ) ;
public class ClientInterface { /** * Move partition leader from one host to another . * find a partition leader from a host which hosts the most partition leaders * and find the host which hosts the partition replica and the least number of partition leaders . * send MigratePartitionLeaderMessage to the host with older partition leader to initiate @ MigratePartitionLeader * Repeatedly call this task until no qualified partition is available . * @ param prepareStopNode if true , only move partition leaders on this host to other hosts - used via @ PrepareStopNode * Otherwise , balance the partition leaders among all nodes . */ void startMigratePartitionLeader ( boolean prepareStopNode ) { } }
RealVoltDB voltDB = ( RealVoltDB ) VoltDB . instance ( ) ; final int hostId = CoreUtils . getHostIdFromHSId ( m_siteId ) ; Pair < Integer , Integer > target = null ; if ( prepareStopNode ) { target = m_cartographer . getPartitionLeaderMigrationTargetForStopNode ( hostId ) ; } else { if ( voltDB . isClusterComplete ( ) ) { target = m_cartographer . getPartitionLeaderMigrationTarget ( voltDB . getHostCount ( ) , hostId , prepareStopNode ) ; } else { // Out of the scheduled task target = new Pair < Integer , Integer > ( - 1 , - 1 ) ; } } // The host does not have any thing to do this time . It does not mean that the host does not // have more partition leaders than expected . Other hosts may have more partition leaders // than this one . So let other hosts do @ MigratePartitionLeader first . if ( target == null ) { return ; } final int partitionId = target . getFirst ( ) ; final int targetHostId = target . getSecond ( ) ; int partitionKey = - 1 ; // MigratePartitionLeader is completed or there are hosts down . Stop MigratePartitionLeader service on this host if ( targetHostId == - 1 || ( ! prepareStopNode && ! voltDB . isClusterComplete ( ) ) ) { voltDB . scheduleWork ( ( ) -> { m_mailbox . deliver ( new MigratePartitionLeaderMessage ( ) ) ; } , 0 , 0 , TimeUnit . SECONDS ) ; return ; } // Others may also iterate through the partition keys . So make a copy and find the key VoltTable partitionKeys = TheHashinator . getPartitionKeys ( VoltType . INTEGER ) ; ByteBuffer buf = ByteBuffer . allocate ( partitionKeys . getSerializedSize ( ) ) ; partitionKeys . flattenToBuffer ( buf ) ; buf . flip ( ) ; VoltTable keyCopy = PrivateVoltTableFactory . createVoltTableFromSharedBuffer ( buf ) ; keyCopy . resetRowPosition ( ) ; while ( keyCopy . advanceRow ( ) ) { if ( partitionId == keyCopy . getLong ( "PARTITION_ID" ) ) { partitionKey = ( int ) ( keyCopy . getLong ( "PARTITION_KEY" ) ) ; break ; } } if ( partitionKey == - 1 ) { tmLog . warn ( "Could not find the partition key for partition " + partitionId ) ; return ; } // grab a lock String errorMessage = VoltZK . createActionBlocker ( m_zk , VoltZK . migratePartitionLeaderBlocker , CreateMode . EPHEMERAL , tmLog , "Migrate Partition Leader" ) ; if ( errorMessage != null ) { tmLog . rateLimitedLog ( 60 , Level . INFO , null , errorMessage ) ; return ; } if ( tmLog . isDebugEnabled ( ) ) { tmLog . debug ( String . format ( "Move the leader of partition %d to host %d" , partitionId , targetHostId ) ) ; VoltTable vt = Cartographer . peekTopology ( m_cartographer ) ; tmLog . debug ( "[@MigratePartitionLeader]\n" + vt . toFormattedString ( ) ) ; } boolean transactionStarted = false ; Long targetHSId = m_cartographer . getHSIDForPartitionHost ( targetHostId , partitionId ) ; if ( targetHSId == null ) { if ( tmLog . isDebugEnabled ( ) ) { tmLog . debug ( String . format ( "Partition %d is no longer on host %d" , partitionId , targetHostId ) ) ; } return ; } try { SimpleClientResponseAdapter . SyncCallback cb = new SimpleClientResponseAdapter . SyncCallback ( ) ; final String procedureName = "@MigratePartitionLeader" ; Config procedureConfig = SystemProcedureCatalog . listing . get ( procedureName ) ; StoredProcedureInvocation spi = new StoredProcedureInvocation ( ) ; spi . setProcName ( procedureName ) ; spi . setClientHandle ( m_executeTaskAdpater . registerCallback ( cb ) ) ; spi . setParams ( partitionKey , partitionId , targetHostId ) ; if ( spi . getSerializedParams ( ) == null ) { spi = MiscUtils . roundTripForCL ( spi ) ; } // Info saved for the node failure handling MigratePartitionLeaderInfo spiInfo = new MigratePartitionLeaderInfo ( m_cartographer . getHSIDForPartitionHost ( hostId , partitionId ) , targetHSId , partitionId ) ; VoltZK . createMigratePartitionLeaderInfo ( m_zk , spiInfo ) ; notifyPartitionMigrationStatus ( partitionId , targetHSId , false ) ; if ( Boolean . getBoolean ( "TEST_MIGRATION_FAILURE" ) ) { Thread . sleep ( 100 ) ; throw new IOException ( "failure simulation" ) ; } synchronized ( m_executeTaskAdpater ) { if ( createTransaction ( m_executeTaskAdpater . connectionId ( ) , spi , procedureConfig . getReadonly ( ) , procedureConfig . getSinglepartition ( ) , procedureConfig . getEverysite ( ) , partitionId , spi . getSerializedSize ( ) , System . nanoTime ( ) ) != CreateTransactionResult . SUCCESS ) { tmLog . warn ( String . format ( "Failed to start transaction for migration of partition %d to host %d" , partitionId , targetHostId ) ) ; notifyPartitionMigrationStatus ( partitionId , targetHSId , true ) ; return ; } } transactionStarted = true ; final long timeoutMS = 5 * 60 * 1000 ; ClientResponse resp = cb . getResponse ( timeoutMS ) ; if ( resp != null && resp . getStatus ( ) == ClientResponse . SUCCESS ) { tmLog . info ( String . format ( "The partition leader for %d has been moved to host %d." , partitionId , targetHostId ) ) ; } else { // not necessary a failure . tmLog . warn ( String . format ( "Fail to move the leader of partition %d to host %d. %s" , partitionId , targetHostId , resp == null ? null : resp . getStatusString ( ) ) ) ; notifyPartitionMigrationStatus ( partitionId , targetHSId , true ) ; } } catch ( Exception e ) { tmLog . warn ( String . format ( "errors in leader change for partition %d" , partitionId ) , e ) ; notifyPartitionMigrationStatus ( partitionId , targetHSId , true ) ; } finally { if ( ! transactionStarted ) { return ; } // wait for the Cartographer to see the new partition leader . The leader promotion process should happen instantly . // If the new leader does not show up in 5 min , the cluster may have experienced host - down events . long remainingWaitTime = TimeUnit . MINUTES . toMillis ( 5 ) ; final long waitingInterval = TimeUnit . SECONDS . toMillis ( 1 ) ; boolean anyFailedHosts = false ; boolean migrationComplete = false ; while ( remainingWaitTime > 0 ) { try { Thread . sleep ( waitingInterval ) ; } catch ( InterruptedException ignoreIt ) { } remainingWaitTime -= waitingInterval ; if ( CoreUtils . getHostIdFromHSId ( m_cartographer . getHSIdForMaster ( partitionId ) ) == targetHostId ) { migrationComplete = true ; break ; } // some hosts may be down . if ( ! voltDB . isClusterComplete ( ) && ! prepareStopNode ) { anyFailedHosts = true ; // If the target host is still alive , migration is still going on . if ( ! voltDB . getHostMessenger ( ) . getLiveHostIds ( ) . contains ( targetHostId ) ) { break ; } } } // if there are failed hosts , this blocker will be removed in RealVoltDB . handleHostsFailedForMigratePartitionLeader ( ) if ( ! anyFailedHosts ) { voltDB . scheduleWork ( ( ) -> removeMigrationZKNodes ( ) , 5 , 0 , TimeUnit . SECONDS ) ; } if ( ! migrationComplete ) { notifyPartitionMigrationStatus ( partitionId , targetHSId , true ) ; } }
public class CurseFilter { /** * Configure the words that will stop . */ protected void configureStopWords ( String stopWords ) { } }
List < String > patterns = Lists . newArrayList ( ) ; for ( StringTokenizer st = new StringTokenizer ( stopWords ) ; st . hasMoreTokens ( ) ; ) { patterns . add ( getStopWordRegexp ( st . nextToken ( ) ) ) ; } String pattern = patterns . isEmpty ( ) ? ".\\A" // matches nothing : "(" + Joiner . on ( '|' ) . join ( patterns ) + ")" ; setStopPattern ( pattern ) ;
public class Parser { /** * * * * * * INLINES * * * * * */ public Rule Inlines ( ) { } }
return NodeSequence ( InlineOrIntermediateEndline ( ) , push ( new SuperNode ( popAsNode ( ) ) ) , ZeroOrMore ( InlineOrIntermediateEndline ( ) , addAsChild ( ) ) , Optional ( Endline ( ) , drop ( ) ) ) ;
public class TileBoundingBoxUtils { /** * Get the tile grid * @ param totalBox * total bounding box * @ param matrixWidth * matrix width * @ param matrixHeight * matrix height * @ param boundingBox * bounding box * @ return tile grid */ public static TileGrid getTileGrid ( BoundingBox totalBox , long matrixWidth , long matrixHeight , BoundingBox boundingBox ) { } }
long minColumn = getTileColumn ( totalBox , matrixWidth , boundingBox . getMinLongitude ( ) ) ; long maxColumn = getTileColumn ( totalBox , matrixWidth , boundingBox . getMaxLongitude ( ) ) ; if ( minColumn < matrixWidth && maxColumn >= 0 ) { if ( minColumn < 0 ) { minColumn = 0 ; } if ( maxColumn >= matrixWidth ) { maxColumn = matrixWidth - 1 ; } } long maxRow = getTileRow ( totalBox , matrixHeight , boundingBox . getMinLatitude ( ) ) ; long minRow = getTileRow ( totalBox , matrixHeight , boundingBox . getMaxLatitude ( ) ) ; if ( minRow < matrixHeight && maxRow >= 0 ) { if ( minRow < 0 ) { minRow = 0 ; } if ( maxRow >= matrixHeight ) { maxRow = matrixHeight - 1 ; } } TileGrid tileGrid = new TileGrid ( minColumn , minRow , maxColumn , maxRow ) ; return tileGrid ;
public class POIUtils { /** * 任意の列のセルを全て取得する 。 * < p > { @ literal jxl . Seet . getColumn ( int col ) } < / p > * @ param sheet * @ param col 列番号 ( 0から始まる ) * @ return 列レコード ( 行の集合 ) 。 * ただし 、 シートの最大行数以下の場合 、 空のセルを補完する 。 * @ throws IllegalArgumentException { @ literal sheet = = null } */ public static Cell [ ] getColumn ( final Sheet sheet , final int col ) { } }
ArgUtils . notNull ( sheet , "sheet" ) ; int maxRow = getRows ( sheet ) ; Cell [ ] cells = new Cell [ maxRow ] ; for ( int i = 0 ; i < maxRow ; i ++ ) { Row rows = sheet . getRow ( i ) ; if ( rows == null ) { rows = sheet . createRow ( i ) ; } Cell cell = rows . getCell ( col ) ; if ( cell == null ) { cell = rows . createCell ( col , CellType . BLANK ) ; } cells [ i ] = cell ; } return cells ;
public class MergeableManifest2 { /** * Add the set with given bundles to the " Require - Bundle " main attribute . * @ param requiredBundles The set with all bundles to add . */ public void addRequiredBundles ( Set < String > requiredBundles ) { } }
addRequiredBundles ( requiredBundles . toArray ( new String [ requiredBundles . size ( ) ] ) ) ;
public class ModelsImpl { /** * Create an entity role for an entity in the application . * @ param appId The application ID . * @ param versionId The version ID . * @ param entityId The entity model ID . * @ param createCustomPrebuiltEntityRoleOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the UUID object */ public Observable < UUID > createCustomPrebuiltEntityRoleAsync ( UUID appId , String versionId , UUID entityId , CreateCustomPrebuiltEntityRoleOptionalParameter createCustomPrebuiltEntityRoleOptionalParameter ) { } }
return createCustomPrebuiltEntityRoleWithServiceResponseAsync ( appId , versionId , entityId , createCustomPrebuiltEntityRoleOptionalParameter ) . map ( new Func1 < ServiceResponse < UUID > , UUID > ( ) { @ Override public UUID call ( ServiceResponse < UUID > response ) { return response . body ( ) ; } } ) ;
public class ValidatorMap { /** * Returns a list of { @ link XMLValidator } s applicable to the given * { @ link ValidationContext } , as determined by the { @ link Predicate } used to * build this map . The validators are returned in the order in which they have * been added to the builder . * < code > XMLValidator < / code > instances are built dynamically , only when the * context satisfies the underlying predicate . * @ param context * a validation context * @ return the list of validators applicable to < code > context < / code > */ public List < XMLValidator > getValidators ( final ValidationContext context ) { } }
return FluentIterable . from ( validators . entrySet ( ) ) . transform ( new Function < Entry < XMLValidators , Predicate < ? super ValidationContext > > , XMLValidator > ( ) { @ Override public XMLValidator apply ( Entry < XMLValidators , Predicate < ? super ValidationContext > > entry ) { return entry . getValue ( ) . apply ( context ) ? entry . getKey ( ) . get ( ) : null ; } } ) . filter ( Predicates . notNull ( ) ) . toList ( ) ;
public class TreeCache { /** * Return the current data for the given path . There are no guarantees of accuracy . This is * merely the most recent view of the data . If there is no node at the given path , * { @ code null } is returned . * @ param fullPath full path to the node to check * @ return data if the node is alive , or null */ public ChildData getCurrentData ( String fullPath ) { } }
TreeNode node = find ( fullPath ) ; if ( node == null || node . nodeState != NodeState . LIVE ) { return null ; } ChildData result = node . childData ; // Double - check liveness after retreiving data . return node . nodeState == NodeState . LIVE ? result : null ;
public class Menu { /** * Returns for given parameter < i > _ id < / i > the instance of class { @ link Menu } * @ param _ id id to search in the cache * @ return instance of class { @ link Menu } * @ throws CacheReloadException on error */ public static Menu get ( final long _id ) throws CacheReloadException { } }
return AbstractUserInterfaceObject . < Menu > get ( _id , Menu . class , CIAdminUserInterface . Menu . getType ( ) ) ;
public class MapComposedElement { /** * Check if point p is contained in this coordinates collections . * @ param point the point to compare with this coordinates * @ param groupIndex into look for * @ return true if p is already part of coordinates */ @ Pure public boolean containsPoint ( Point2D < ? , ? > point , int groupIndex ) { } }
final int grpCount = getGroupCount ( ) ; if ( groupIndex < 0 ) { throw new IndexOutOfBoundsException ( groupIndex + "<0" ) ; // $ NON - NLS - 1 $ } if ( groupIndex > grpCount ) { throw new IndexOutOfBoundsException ( groupIndex + ">" + grpCount ) ; // $ NON - NLS - 1 $ } if ( point == null ) { return false ; } for ( int i = 0 ; i < getPointCountInGroup ( groupIndex ) ; ++ i ) { final Point2d cur = getPointAt ( groupIndex , i ) ; if ( cur . epsilonEquals ( point , MapElementConstants . POINT_FUSION_DISTANCE ) ) { return true ; } } return false ;
public class FileOperations { /** * Copies the source file to the target file . * @ param aSourceFile * The source file to use . May not be < code > null < / code > . Needs to be an * existing file . * @ param aTargetFile * The destination files . May not be < code > null < / code > and may not be * an existing file . * @ return A non - < code > null < / code > error code . */ @ Nonnull public static FileIOError copyFile ( @ Nonnull final File aSourceFile , @ Nonnull final File aTargetFile ) { } }
ValueEnforcer . notNull ( aSourceFile , "SourceFile" ) ; ValueEnforcer . notNull ( aTargetFile , "TargetFile" ) ; // Does the source file exist ? if ( ! FileHelper . existsFile ( aSourceFile ) ) return EFileIOErrorCode . SOURCE_DOES_NOT_EXIST . getAsIOError ( EFileIOOperation . COPY_FILE , aSourceFile ) ; // Are source and target different ? if ( EqualsHelper . equals ( aSourceFile , aTargetFile ) ) return EFileIOErrorCode . SOURCE_EQUALS_TARGET . getAsIOError ( EFileIOOperation . COPY_FILE , aSourceFile ) ; // Does the target file already exist ? if ( aTargetFile . exists ( ) ) return EFileIOErrorCode . TARGET_ALREADY_EXISTS . getAsIOError ( EFileIOOperation . COPY_FILE , aTargetFile ) ; // Is the source file readable ? if ( ! aSourceFile . canRead ( ) ) return EFileIOErrorCode . SOURCE_NOT_READABLE . getAsIOError ( EFileIOOperation . COPY_FILE , aSourceFile ) ; // Is the target parent directory writable ? final File aTargetParentDir = aTargetFile . getParentFile ( ) ; if ( aTargetParentDir != null && aTargetParentDir . exists ( ) && ! aTargetParentDir . canWrite ( ) ) return EFileIOErrorCode . TARGET_PARENT_NOT_WRITABLE . getAsIOError ( EFileIOOperation . COPY_FILE , aTargetFile ) ; // Ensure the targets parent directory is present FileHelper . ensureParentDirectoryIsPresent ( aTargetFile ) ; ESuccess eSuccess ; if ( false ) { // Used FileChannel for better performance // But they make problems when using UNC paths eSuccess = _copyFileViaChannel ( aSourceFile , aTargetFile ) ; } else { // Streams are slower but more interoperable eSuccess = _copyFileViaStreams ( aSourceFile , aTargetFile ) ; } final EFileIOErrorCode eError = eSuccess . isSuccess ( ) ? EFileIOErrorCode . NO_ERROR : EFileIOErrorCode . OPERATION_FAILED ; return eError . getAsIOError ( EFileIOOperation . COPY_FILE , aSourceFile , aTargetFile ) ;
public class RLEDecoder { /** * even if this will make the output larger . */ public int decode ( final InputStream stream , final ByteBuffer buffer ) throws IOException { } }
while ( buffer . remaining ( ) >= 64 ) { int val = stream . read ( ) ; if ( val < 0 ) { break ; // EOF } if ( ( val & COMPRESSED_RUN_MASK ) == COMPRESSED_RUN_MASK ) { int count = val & ~ COMPRESSED_RUN_MASK ; int pixel = stream . read ( ) ; if ( pixel < 0 ) { break ; // EOF } for ( int i = 0 ; i < count ; i ++ ) { buffer . put ( ( byte ) pixel ) ; } } else { buffer . put ( ( byte ) val ) ; } } return buffer . position ( ) ;
public class AnalyzeJourneyByMethodAction { /** * { @ inheritDoc } */ @ Override public ActionCommand execute ( ActionMapping mapping , FormBean formBean , HttpServletRequest req , HttpServletResponse res ) throws APIException { } }
String journeyName = req . getParameter ( PARAM_JOURNEY_NAME ) ; req . setAttribute ( "journeyName" , journeyName ) ; // add totals to the journey list on top . AnalyzedJourneyAO analyzedJourney = getJourneyAPI ( ) . analyzeJourneyByMethod ( journeyName ) ; List < AnalyzedProducerCallsMapAO > callsInJourny = analyzedJourney . getCalls ( ) ; LinkedList < AnalyzedProducerCallsMapAO > newCalls = new LinkedList < > ( ) ; newCalls . add ( analyzedJourney . getTotalByProducerId ( ) ) ; newCalls . addAll ( callsInJourny ) ; analyzedJourney . setCalls ( newCalls ) ; req . setAttribute ( "analyzedJourney" , analyzedJourney ) ; // Preparing graph data Map < String , GraphDataBean > graphData = new HashMap < > ( ) ; for ( AnalyzedProducerCallsMapAO callsMap : analyzedJourney . getCalls ( ) ) { graphData = fillGraphDataMap ( graphData , callsMap ) ; } req . setAttribute ( "graphDatas" , graphData . values ( ) ) ; // prepare sort type String sortOrder = req . getParameter ( "pSortOrder" ) ; String sortBy = req . getParameter ( "pSortBy" ) ; if ( sortBy != null && sortBy . length ( ) > 0 ) { AnalyzedProducerCallsAOSortType st = AnalyzedProducerCallsAOSortType . fromStrings ( sortBy , sortOrder ) ; MoSKitoWebUIContext . getCallContext ( ) . setAnalyzeProducerCallsSortType ( st ) ; } return mapping . success ( ) ;
public class FilePublisher { /** * Notifies all file subscribers registered to { @ code reloadableFile } * @ param reloadableFile the ReloadableFile object for which to notify all pertaining file subscribers */ public synchronized void notifyFileSubscribers ( ReloadableFile reloadableFile ) { } }
notifyDefaultFileSubscribers ( ) ; IdentificationSet < FileSubscriber > subList = fileSubscribers . get ( reloadableFile ) ; if ( subList == null ) { return ; } for ( FileSubscriber sub : subList ) { CompletableFuture . runAsync ( sub :: update , main . getThreadPoolManager ( ) . getAddOnsThreadPool ( ) ) ; }
public class Strings { /** * Copy the given Enumeration into a String array . * The Enumeration must contain String elements only . * @ param enumeration the Enumeration to copy * @ return the String array ( < code > null < / code > if the passed - in * Enumeration was < code > null < / code > ) */ public static String [ ] toStringArray ( Enumeration < String > enumeration ) { } }
if ( enumeration == null ) { return null ; } List < String > list = java . util . Collections . list ( enumeration ) ; return list . toArray ( new String [ list . size ( ) ] ) ;
public class ImportDialog { /** * < / editor - fold > / / GEN - END : initComponents */ private void btCancelActionPerformed ( java . awt . event . ActionEvent evt ) // GEN - FIRST : event _ btCancelActionPerformed { } }
// GEN - HEADEREND : event _ btCancelActionPerformed if ( isImporting ) { worker . cancel ( true ) ; statementController . cancelStatements ( ) ; } setVisible ( false ) ;
public class DisksInner { /** * Revokes access to a disk . * @ param resourceGroupName The name of the resource group . * @ param diskName The name of the managed disk that is being created . The name can ' t be changed after the disk is created . Supported characters for the name are a - z , A - Z , 0-9 and _ . The maximum name length is 80 characters . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < OperationStatusResponseInner > revokeAccessAsync ( String resourceGroupName , String diskName ) { } }
return revokeAccessWithServiceResponseAsync ( resourceGroupName , diskName ) . map ( new Func1 < ServiceResponse < OperationStatusResponseInner > , OperationStatusResponseInner > ( ) { @ Override public OperationStatusResponseInner call ( ServiceResponse < OperationStatusResponseInner > response ) { return response . body ( ) ; } } ) ;
public class KeyVaultClientBaseImpl { /** * Deletes the creation operation for a specific certificate . * Deletes the creation operation for a specified certificate that is in the process of being created . The certificate is no longer created . This operation requires the certificates / update permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param certificateName The name of the certificate . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws KeyVaultErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the CertificateOperation object if successful . */ public CertificateOperation deleteCertificateOperation ( String vaultBaseUrl , String certificateName ) { } }
return deleteCertificateOperationWithServiceResponseAsync ( vaultBaseUrl , certificateName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class BasePanel { /** * Get the command string that will restore this screen . * Override in Screen , GridScreen , MenuScreen . * @ param strOldURL The URL to add this param to . * @ param strParam The new param . * @ param strData The new data for this param . * @ return The new URL . * @ see Utility . addURLParam . */ public String addURLParam ( String strOldURL , String strParam , String strData ) { } }
return Utility . addURLParam ( strOldURL , strParam , strData ) ;
public class WebServiceCommunication { /** * Issues HTTP POST request , returns response body as string . * @ param endpoint endpoint of request url * @ param json request body * @ return response body * @ throws IOException in case of any IO related issue */ public String postJson ( String endpoint , JSONObject json ) throws IOException { } }
return this . postJson ( endpoint , "" , json ) ;
public class ExtensionHistory { /** * This method initializes resendDialog * @ return org . parosproxy . paros . extension . history . ResendDialog */ public ManualRequestEditorDialog getResendDialog ( ) { } }
if ( resendDialog == null ) { resendDialog = new ManualHttpRequestEditorDialog ( true , "resend" , "ui.dialogs.manreq" ) ; resendDialog . setTitle ( Constant . messages . getString ( "manReq.dialog.title" ) ) ; // ZAP : i18n } return resendDialog ;
public class TemplCommand { /** * Invoke the command execution method given at object creation time . * This method is automatically called by the TANGO core classes when the * associated command is requested by a client . * @ param dev The device on which the command must be executed * @ param in _ any The incoming data still packed in a CORBA Any object . For * command created with this TemplCommand class , this Any object does not * contain usefull data * @ return The CORBA Any object returned to the client . For command created with * this TemplCommand class , this any object does not contain data . * @ exception DevFailed If the execution method failed * Click < a href = " . . / . . / tango _ basic / idl _ html / Tango . html # DevFailed " > here < / a > to read * < b > DevFailed < / b > exception specification */ public Any execute ( DeviceImpl dev , Any in_any ) throws DevFailed { } }
// Execute the command associated method try { java . lang . Object [ ] meth_param = new java . lang . Object [ 0 ] ; exe_method . invoke ( dev , meth_param ) ; } catch ( InvocationTargetException e ) { throw ( DevFailed ) ( e . getTargetException ( ) ) ; } catch ( IllegalArgumentException e ) { StringBuffer mess = new StringBuffer ( "Argument error when trying to invoke method " ) ; mess . append ( exe_method ) ; Except . throw_exception ( "API_MethodArgument" , mess . toString ( ) , "TemplCommand.execute()" ) ; } catch ( IllegalAccessException e ) { StringBuffer mess = new StringBuffer ( "Argument error when trying to invoke method " ) ; mess . append ( exe_method ) ; Except . throw_exception ( "API_MethodArgument" , mess . toString ( ) , "TemplCommand.execute()" ) ; } // Return an empty Any return insert ( ) ;
public class CoverageUtilities { /** * Creates a { @ link WritableRandomIter } . * < p > It is important to use this method since it supports also * large GRASS rasters . * < p > If the size would throw an integer overflow , a { @ link GrassLegacyRandomIter } * will be proposed to try to save the saveable . * @ param raster the coverage on which to wrap a { @ link WritableRandomIter } . * @ return the iterator . */ public static WritableRandomIter getWritableRandomIterator ( int width , int height ) { } }
if ( doesOverFlow ( width , height ) ) { GrassLegacyRandomIter iter = new GrassLegacyRandomIter ( new double [ height ] [ width ] ) ; return iter ; } WritableRaster pitRaster = CoverageUtilities . createWritableRaster ( width , height , null , null , null ) ; WritableRandomIter iter = RandomIterFactory . createWritable ( pitRaster , null ) ; return iter ;
public class GDeferredRequest { @ Override public void resolve ( final Response < T > response ) { } }
if ( ! isPending ( ) ) { throw new IllegalStateException ( "Deferred object already finished, cannot resolve again" ) ; } state = State . RESOLVED ; resolveResult = response . getPayload ( ) ; try { triggerDone ( response ) ; } finally { triggerAlways ( resolveResult , null ) ; }
public class AbstractWComponent { /** * { @ inheritDoc } */ @ Override public void setIdName ( final String idName ) { } }
// Not allow empty or null if ( Util . empty ( idName ) ) { throw new IllegalArgumentException ( "idName cannot be null or empty" ) ; } // Must start with a letter and followed by letters , digits and or underscores Matcher matcher = ID_PATTERN . matcher ( idName ) ; if ( ! matcher . matches ( ) ) { throw new IllegalArgumentException ( "idName " + idName + " must start with a letter and followed by letters, digits and or underscores." ) ; } String currIdName = getIdName ( ) ; if ( ! Objects . equals ( idName , currIdName ) ) { ComponentModel model = getOrCreateComponentModel ( ) ; model . setIdName ( idName ) ; }
public class Resources { /** * Retrieve a char from bundle . * @ param key the key of resource * @ return the resource char * @ throws MissingResourceException if the requested key is unknown */ public char getChar ( String key ) throws MissingResourceException { } }
ResourceBundle bundle = getBundle ( ) ; String value = bundle . getString ( key ) ; if ( 1 == value . length ( ) ) { return value . charAt ( 0 ) ; } else { throw new MissingResourceException ( "Expecting a char value but got " + value , "java.lang.String" , key ) ; }
public class Datamodel { /** * Creates a { @ link TimeValue } for a given date . The precision is * automatically set to { @ link TimeValue # PREC _ DAY } . * @ param year * a year number , where 0 refers to 1BCE * @ param month * a month number between 1 and 12 * @ param day * a day number between 1 and 31 * @ param calendarModel * the IRI of the calendar model preferred when displaying the * date ; usually { @ link TimeValue # CM _ GREGORIAN _ PRO } or * { @ link TimeValue # CM _ JULIAN _ PRO } * @ return a { @ link TimeValue } corresponding to the input */ public static TimeValue makeTimeValue ( long year , byte month , byte day , String calendarModel ) { } }
return factory . getTimeValue ( year , month , day , ( byte ) 0 , ( byte ) 0 , ( byte ) 0 , TimeValue . PREC_DAY , 0 , 0 , 0 , calendarModel ) ;
public class HikariCPDataSources { /** * Gets the set of data sources ( name - > data source ) . * It contains the available data sources only . * @ return the map of name - > data source , empty if none . */ @ Override public Map < String , DataSource > getDataSources ( ) { } }
HashMap < String , DataSource > map = new HashMap < > ( ) ; for ( Map . Entry < String , HikariDataSource > entry : sources . entrySet ( ) ) { if ( entry . getValue ( ) != null ) { map . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } return map ;
public class FnInterval { /** * It converts the input { @ link String } elements into an { @ link Interval } . * The target { @ link String } elements represent the start and end of the { @ link Interval } . * The accepted input String [ ] are : * < ul > * < li > year , month , day , year , month , day < / li > * < li > year , month , day , hour , minute , year , month , day , hour , minute < / li > * < li > year , month , day , hour , minute , second , year , month , day , hour , minute , second < / li > * < li > year , month , day , hour , minute , second , millisecond , year , month , day , hour , minute , second , millisecond < / li > * < / ul > * @ param pattern string with the format of the input String * @ param dateTimeZone the the time zone ( { @ link DateTimeZone } ) to be used * @ return the { @ link Interval } created from the input and arguments */ public static final Function < String [ ] , Interval > strFieldArrayToInterval ( String pattern , DateTimeZone dateTimeZone ) { } }
return new StringFieldArrayToInterval ( pattern , dateTimeZone ) ;
public class JRDF { /** * Tells whether the given resources are equivalent . * Two resources are equivalent if their URIs match . * @ param u1 * first resource . * @ param u2 * second resource . * @ return true if equivalent , false otherwise . */ public static boolean sameResource ( URIReference u1 , URIReference u2 ) { } }
return sameResource ( u1 , u2 . getURI ( ) . toString ( ) ) ;
public class AWSRDSDataClient { /** * Executes any SQL statement on the target database synchronously * @ param executeSqlRequest * Execute SQL Request * @ return Result of the ExecuteSql operation returned by the service . * @ throws BadRequestException * Invalid Request exception * @ throws ForbiddenException * Access denied exception * @ throws InternalServerErrorException * Internal service error * @ throws ServiceUnavailableErrorException * Internal service unavailable error * @ sample AWSRDSData . ExecuteSql * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / rds - data - 2018-08-01 / ExecuteSql " target = " _ top " > AWS API * Documentation < / a > */ @ Override public ExecuteSqlResult executeSql ( ExecuteSqlRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeExecuteSql ( request ) ;
public class Stream { /** * Returns { @ code Stream } with distinct elements ( as determined by { @ code hashCode } * and { @ code equals } methods ) according to the given classifier function . * < p > This is a stateful intermediate operation . * < p > Example : * < pre > * classifier : ( str ) - & gt ; str . length ( ) * stream : [ " a " , " bc " , " d " , " ef " , " ghij " ] * result : [ " a " , " bc " , " ghij " ] * < / pre > * @ param < K > the type of the result of classifier function * @ param classifier the classifier function * @ return the new stream * @ since 1.1.8 */ @ NotNull public < K > Stream < T > distinctBy ( @ NotNull Function < ? super T , ? extends K > classifier ) { } }
return new Stream < T > ( params , new ObjDistinctBy < T , K > ( iterator , classifier ) ) ;
public class EntryViewBase { /** * Makes the entry view " bounce " by applying a scale transition . This is a * good way to make an entry stand out , e . g . when it receives the keyboard * focus . */ public final void bounce ( ) { } }
ScaleTransition transition = new ScaleTransition ( Duration . millis ( 200 ) , this ) ; setCache ( true ) ; setCacheHint ( CacheHint . SCALE ) ; transition . setAutoReverse ( true ) ; transition . setFromX ( 1 ) ; transition . setToX ( .8 ) ; transition . setFromY ( 1 ) ; transition . setToY ( .8 ) ; transition . setCycleCount ( 2 ) ; transition . setOnFinished ( evt -> setCache ( false ) ) ; transition . play ( ) ;
public class ProblemTypeValidator { /** * Validate the parameter . * @ param name The name of the parameter ( e . g . " - host " ) . * @ param value The value of the parameter that we need to validate * @ throws ParameterException Thrown if the value of the parameter is invalid . */ @ Override public void validate ( String name , String value ) throws ParameterException { } }
if ( ! value . equals ( ArbiterCliGenerator . REGRESSION ) || value . equals ( ArbiterCliGenerator . CLASSIFICIATION ) ) { throw new ParameterException ( "Problem type can only be " + ArbiterCliGenerator . REGRESSION + " or " + ArbiterCliGenerator . CLASSIFICIATION ) ; }
public class CascadeDeleteRepositoryDecorator { /** * Guarantee that referenced entities for attributes with cascade delete are loaded , e . g . in case * entity is a lazy or partial entity . */ private void prepareCascadeDeletes ( Entity entity ) { } }
getCascadeDeleteAttributes ( ) . forEach ( attr -> entity . get ( attr . getName ( ) ) ) ;
public class Workdiary { /** * Get Workdiary * @ param company Company ID * @ param date Date * @ param params ( Optional ) Parameters * @ throwsJSONException If error occurred * @ return { @ link JSONObject } */ public JSONObject get ( String company , String date , HashMap < String , String > params ) throws JSONException { } }
return oClient . get ( "/team/v3/workdiaries/companies/" + company + "/" + date , params ) ;
public class HistoryServerStaticFileServerHandler { @ Override public void channelRead0 ( ChannelHandlerContext ctx , RoutedRequest routedRequest ) throws Exception { } }
String requestPath = routedRequest . getPath ( ) ; respondWithFile ( ctx , routedRequest . getRequest ( ) , requestPath ) ;
public class ServerStopEndpoint { /** * Stops the server . * @ return The message */ @ Write ( consumes = { } }
} ) public Object stop ( ) { try { return message ; } finally { Thread thread = new Thread ( this :: stopServer ) ; thread . setContextClassLoader ( getClass ( ) . getClassLoader ( ) ) ; thread . start ( ) ; }
public class MemoryData { /** * todo default to root ? */ public MemoryFileAttributes addDir ( EightyPath dir , Principals principals ) { } }
if ( content . get ( dir ) . isPresent ( ) ) { throw new IllegalArgumentException ( "path exists already" ) ; } if ( ! dir . isAbsolute ( ) || dir . getParent ( ) == null ) { throw new IllegalArgumentException ( "path not absolute or without parent" ) ; } PathContent parentContent = content . getOrThrow ( childGetParent ( dir ) , ( ) -> new IllegalArgumentException ( "parent does not exist" ) ) ; content . put ( dir , PathContent . newDir ( principals ) ) ; parentContent . kids . add ( dir ) ; parentContent . attis . setLastModifiedTime ( ) ; parentContent . attis . setLastAccessTime ( ) ; return parentContent . attis ;
public class AxisDeserializer { /** * 设置json , typeOfT , context值 * @ param json * @ param typeOfT * @ param context */ @ Override public Axis deserialize ( JsonElement json , Type typeOfT , JsonDeserializationContext context ) throws JsonParseException { } }
final JsonObject jsonObject = json . getAsJsonObject ( ) ; String _type = jsonObject . get ( "type" ) . getAsString ( ) ; AxisType type = AxisType . valueOf ( _type ) ; Axis axis = null ; switch ( type ) { case category : axis = context . deserialize ( jsonObject , CategoryAxis . class ) ; break ; case value : axis = context . deserialize ( jsonObject , ValueAxis . class ) ; break ; case time : axis = context . deserialize ( jsonObject , TimeAxis . class ) ; break ; } return axis ;
public class SeLionBuildInfo { /** * Returns values for build time info * @ param property * The { @ link SeLionBuildProperty } of interest * @ return The build time value . < / br > < / br > The fall back value which can be obtained via * { @ link SeLionBuildProperty # getFallBackValue ( ) } if the build time property is not defined . */ public static String getBuildValue ( SeLionBuildProperty property ) { } }
return getInfo ( ) . getProperty ( property . getPropertyValue ( ) , property . getFallBackValue ( ) ) ;
public class Database { /** * Adds the default account ( used by Firefox ) . * @ throws Exception on exception */ public void addDefaultAccount ( ) throws Exception { } }
Account defaultAccount = new Account ( ) ; defaultAccount . setName ( "Defaults" ) ; defaultAccount . setId ( Account . DEFAULT_ACCOUNT_URI ) ; defaultAccount . setDesc ( "Default settings for URLs not elsewhere in this list" ) ; defaultAccount . setUrlComponents ( EnumSet . of ( UrlComponents . Domain ) ) ; addAccount ( rootAccount , defaultAccount ) ;
public class JCalendarDualField { /** * Handle action listener ( button press ) . * If they press the button , display the calendar popup . * @ param e The actionevent . */ public void actionPerformed ( ActionEvent e ) { } }
if ( ( m_button != null ) && ( e . getSource ( ) == m_button ) ) { JCalendarPopup popup = JCalendarPopup . createCalendarPopup ( ( Date ) this . getControlValue ( ) , m_button ) ; popup . addPropertyChangeListener ( this ) ; } else if ( ( m_buttonTime != null ) && ( e . getSource ( ) == m_buttonTime ) ) { JTimePopup popup = JTimePopup . createTimePopup ( ( Date ) this . getControlValue ( ) , m_buttonTime ) ; popup . addPropertyChangeListener ( this ) ; }
public class MiscUtil { /** * Does the given column name ends with one of pattern given in parameter . Not case sensitive */ public static boolean endsWithIgnoreCase ( String name , Iterable < String > patterns ) { } }
String nameUpper = name . toUpperCase ( ) ; for ( String pattern : patterns ) { String patternUpper = pattern . toUpperCase ( ) ; if ( nameUpper . equals ( patternUpper ) || nameUpper . endsWith ( patternUpper ) ) { return true ; } } return false ;
public class ClassReader { /** * Creates a label without the Label . DEBUG flag set , for the given offset . * The label is created with a call to { @ link # readLabel } and its * Label . DEBUG flag is cleared . * @ param offset * a bytecode offset in a method . * @ param labels * the already created labels , indexed by their offset . * @ return a Label without the Label . DEBUG flag set . */ private Label createLabel ( int offset , Label [ ] labels ) { } }
Label label = readLabel ( offset , labels ) ; label . status &= ~ Label . DEBUG ; return label ;
public class CleverTapInstanceConfig { /** * convenience to construct the internal only default config */ @ SuppressWarnings ( { } }
"unused" , "WeakerAccess" } ) protected static CleverTapInstanceConfig createDefaultInstance ( Context context , @ NonNull String accountId , @ NonNull String accountToken , String accountRegion ) { return new CleverTapInstanceConfig ( context , accountId , accountToken , accountRegion , true ) ;
public class PollTcpManagerNio { /** * Returns a jni select manager . */ public static PollTcpManagerNio create ( ) { } }
synchronized ( _nioSelectManager ) { if ( _nioSelectManager . get ( ) == null ) { PollTcpManagerNio selectManager = new PollTcpManagerNio ( ) ; if ( selectManager . start ( ) ) { _nioSelectManager . set ( selectManager ) ; } } return _nioSelectManager . get ( ) ; }
public class PicketBoxSecurityContext { /** * { @ inheritDoc } */ public String [ ] getRoles ( ) { } }
String [ ] roles = null ; org . jboss . security . identity . RoleGroup pbRoles = delegator . getUtil ( ) . getRoles ( ) ; if ( pbRoles != null ) { List < String > l = new ArrayList < String > ( pbRoles . getRoles ( ) . size ( ) ) ; for ( org . jboss . security . identity . Role role : pbRoles . getRoles ( ) ) { l . add ( role . getRoleName ( ) ) ; } roles = l . toArray ( new String [ l . size ( ) ] ) ; } return roles ;