signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class RDBMEntityGroupStore { /** * Find the groups that this group member belongs to . * @ param gm the group member in question * @ return java . util . Iterator */ @ Override public Iterator findParentGroups ( IGroupMember gm ) throws GroupsException { } }
if ( gm . isGroup ( ) ) { IEntityGroup group = ( IEntityGroup ) gm ; return findParentGroups ( group ) ; } else { IEntity ent = ( IEntity ) gm ; return findParentGroups ( ent ) ; }
public class SimpleSolrPersistentProperty { /** * ( non - Javadoc ) * @ see org . springframework . data . solr . core . mapping . SolrPersistentProperty # isSearchable ( ) */ @ Override public boolean isSearchable ( ) { } }
if ( isIdProperty ( ) ) { return true ; } Indexed indexedAnnotation = getIndexAnnotation ( ) ; return indexedAnnotation != null && indexedAnnotation . searchable ( ) ;
public class KeyVaultClientBaseImpl { /** * Permanently deletes the specified secret . * The purge deleted secret operation removes the secret permanently , without the possibility of recovery . This operation can only be enabled on a soft - delete enabled vault . This operation requires the secrets / purge permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param secretName The name of the secret . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > purgeDeletedSecretAsync ( String vaultBaseUrl , String secretName , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( purgeDeletedSecretWithServiceResponseAsync ( vaultBaseUrl , secretName ) , serviceCallback ) ;
public class FnBigDecimal { /** * Determines whether the target object is null or not . * @ return true if the target object is null , false if not . */ public static final Function < BigDecimal , Boolean > isNull ( ) { } }
return ( Function < BigDecimal , Boolean > ) ( ( Function ) FnObject . isNull ( ) ) ;
public class JsonResponseFuture { /** * Sets the JSON response of this promise . * @ param response the RPC response */ public void setResponse ( JsonRpcResponse response ) { } }
if ( response . isError ( ) ) { setException ( response . error ( ) ) ; return ; } try { set ( ( V ) Messages . fromJson ( method . outputBuilder ( ) , response . result ( ) ) ) ; } catch ( Exception e ) { setException ( e ) ; }
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcConstraintEnum ( ) { } }
if ( ifcConstraintEnumEEnum == null ) { ifcConstraintEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 799 ) ; } return ifcConstraintEnumEEnum ;
public class DescribeDBClusterSnapshotsResult { /** * Provides a list of DB cluster snapshots for the user . * @ param dBClusterSnapshots * Provides a list of DB cluster snapshots for the user . */ public void setDBClusterSnapshots ( java . util . Collection < DBClusterSnapshot > dBClusterSnapshots ) { } }
if ( dBClusterSnapshots == null ) { this . dBClusterSnapshots = null ; return ; } this . dBClusterSnapshots = new java . util . ArrayList < DBClusterSnapshot > ( dBClusterSnapshots ) ;
public class Record { /** * Gets the fields at the given positions into an array . * If at any position a field is null , then this method returns false . * All fields that have been successfully read until the failing read are correctly contained in the record . * All other fields are not set . * @ param positions The positions of the fields to get . * @ param targets The values into which the content of the fields is put . * @ return True if all fields were successfully read , false if some read failed . */ public boolean getFieldsInto ( int [ ] positions , Value [ ] targets ) { } }
for ( int i = 0 ; i < positions . length ; i ++ ) { if ( ! getFieldInto ( positions [ i ] , targets [ i ] ) ) { return false ; } } return true ;
public class UIContextHolder { /** * A utility function to iterate to the primary ( top most ) context and return it . * @ param uic the UIContext to retrieve the primary context for . * @ return the primary context for the given context . */ public static UIContext getPrimaryUIContext ( final UIContext uic ) { } }
if ( uic == null ) { return null ; } UIContext primary = null ; UIContext current = uic ; while ( primary == null ) { if ( current instanceof UIContextDelegate ) { UIContext backing = ( ( UIContextDelegate ) current ) . getBacking ( ) ; if ( backing != null ) { current = backing ; } else { // This case should probably never happen . primary = current ; LOG . warn ( "UIContextDelegate found without a backing context" ) ; } } else { primary = current ; } } return primary ;
public class JDBCStatement { /** * < ! - - start generic documentation - - > * Executes the given SQL statement , which may return multiple results . * In some ( uncommon ) situations , a single SQL statement may return * multiple result sets and / or update counts . Normally you can ignore * this unless you are ( 1 ) executing a stored procedure that you know may * return multiple results or ( 2 ) you are dynamically executing an * unknown SQL string . * The < code > execute < / code > method executes an SQL statement and indicates the * form of the first result . You must then use the methods * < code > getResultSet < / code > or < code > getUpdateCount < / code > * to retrieve the result , and < code > getMoreResults < / code > to * move to any subsequent result ( s ) . * < ! - - end generic documentation - - > * @ param sql any SQL statement * @ return < code > true < / code > if the first result is a < code > ResultSet < / code > * object ; < code > false < / code > if it is an update count or there are * no results * @ exception SQLException if a database access error occurs or * this method is called on a closed < code > Statement < / code > * @ see # getResultSet * @ see # getUpdateCount * @ see # getMoreResults */ public synchronized boolean execute ( String sql ) throws SQLException { } }
fetchResult ( sql , StatementTypes . RETURN_ANY , JDBCStatementBase . NO_GENERATED_KEYS , null , null ) ; return resultIn . isData ( ) ;
public class Input { /** * Reads a 1-9 byte long . * @ see # canReadLong ( ) */ public long readVarLong ( boolean optimizePositive ) throws KryoException { } }
if ( require ( 1 ) < 9 ) return readVarLong_slow ( optimizePositive ) ; int p = position ; int b = buffer [ p ++ ] ; long result = b & 0x7F ; if ( ( b & 0x80 ) != 0 ) { byte [ ] buffer = this . buffer ; b = buffer [ p ++ ] ; result |= ( b & 0x7F ) << 7 ; if ( ( b & 0x80 ) != 0 ) { b = buffer [ p ++ ] ; result |= ( b & 0x7F ) << 14 ; if ( ( b & 0x80 ) != 0 ) { b = buffer [ p ++ ] ; result |= ( b & 0x7F ) << 21 ; if ( ( b & 0x80 ) != 0 ) { b = buffer [ p ++ ] ; result |= ( long ) ( b & 0x7F ) << 28 ; if ( ( b & 0x80 ) != 0 ) { b = buffer [ p ++ ] ; result |= ( long ) ( b & 0x7F ) << 35 ; if ( ( b & 0x80 ) != 0 ) { b = buffer [ p ++ ] ; result |= ( long ) ( b & 0x7F ) << 42 ; if ( ( b & 0x80 ) != 0 ) { b = buffer [ p ++ ] ; result |= ( long ) ( b & 0x7F ) << 49 ; if ( ( b & 0x80 ) != 0 ) { b = buffer [ p ++ ] ; result |= ( long ) b << 56 ; } } } } } } } } position = p ; return optimizePositive ? result : ( ( result >>> 1 ) ^ - ( result & 1 ) ) ;
public class NFRule { /** * This function is used by parse ( ) to match the text being parsed * against a possible prefix string . This function * matches characters from the beginning of the string being parsed * to characters from the prospective prefix . If they match , pp is * updated to the first character not matched , and the result is * the unparsed part of the string . If they don ' t match , the whole * string is returned , and pp is left unchanged . * @ param text The string being parsed * @ param prefix The text to match against * @ param pp On entry , ignored and assumed to be 0 . On exit , points * to the first unmatched character ( assuming the whole prefix matched ) , * or is unchanged ( if the whole prefix didn ' t match ) . * @ return If things match , this is the unparsed part of " text " ; * if they didn ' t match , this is " text " . */ private String stripPrefix ( String text , String prefix , ParsePosition pp ) { } }
// if the prefix text is empty , dump out without doing anything if ( prefix . length ( ) == 0 ) { return text ; } else { // otherwise , use prefixLength ( ) to match the beginning of // " text " against " prefix " . This function returns the // number of characters from " text " that matched ( or 0 if // we didn ' t match the whole prefix ) int pfl = prefixLength ( text , prefix ) ; if ( pfl != 0 ) { // if we got a successful match , update the parse position // and strip the prefix off of " text " pp . setIndex ( pp . getIndex ( ) + pfl ) ; return text . substring ( pfl ) ; // if we didn ' t get a successful match , leave everything alone } else { return text ; } }
public class FileUtil { /** * Gets the contents of the given file , as a byte array . * @ param pFile the file to get content from * @ return the content of the file as a byte array . * @ throws IOException if the read operation fails */ public static byte [ ] read ( File pFile ) throws IOException { } }
// Custom implementation , as we know the size of a file if ( ! pFile . exists ( ) ) { throw new FileNotFoundException ( pFile . toString ( ) ) ; } byte [ ] bytes = new byte [ ( int ) pFile . length ( ) ] ; InputStream in = null ; try { // Use buffer size two times byte array , to avoid i / o bottleneck in = new BufferedInputStream ( new FileInputStream ( pFile ) , BUF_SIZE * 2 ) ; int off = 0 ; int len ; while ( ( len = in . read ( bytes , off , in . available ( ) ) ) != - 1 && ( off < bytes . length ) ) { off += len ; // System . out . println ( " read : " + len ) ; } } // Just pass any IOException on up the stack finally { close ( in ) ; } return bytes ;
public class TaskInfo { /** * Initialization for a fixed - rate or fixed - delay task . * @ param isFixedRate indicates if this is a fixed - rate or fixed - delay task * @ param initialDelay milliseconds before first execution * @ param interval milliseconds between task executions */ @ Trivial void initForRepeatingTask ( boolean isFixedRate , long initialDelay , long interval ) { } }
this . initialDelay = initialDelay ; this . interval = interval ; this . isFixedRate = isFixedRate ;
public class CPDefinitionInventoryUtil { /** * Returns the cp definition inventories before and after the current cp definition inventory in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; . * @ param CPDefinitionInventoryId the primary key of the current cp definition inventory * @ param uuid the uuid * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next cp definition inventory * @ throws NoSuchCPDefinitionInventoryException if a cp definition inventory with the primary key could not be found */ public static CPDefinitionInventory [ ] findByUuid_C_PrevAndNext ( long CPDefinitionInventoryId , String uuid , long companyId , OrderByComparator < CPDefinitionInventory > orderByComparator ) throws com . liferay . commerce . exception . NoSuchCPDefinitionInventoryException { } }
return getPersistence ( ) . findByUuid_C_PrevAndNext ( CPDefinitionInventoryId , uuid , companyId , orderByComparator ) ;
public class UpdateGeoMatchSetRequest { /** * An array of < code > GeoMatchSetUpdate < / code > objects that you want to insert into or delete from an * < a > GeoMatchSet < / a > . For more information , see the applicable data types : * < ul > * < li > * < a > GeoMatchSetUpdate < / a > : Contains < code > Action < / code > and < code > GeoMatchConstraint < / code > * < / li > * < li > * < a > GeoMatchConstraint < / a > : Contains < code > Type < / code > and < code > Value < / code > * You can have only one < code > Type < / code > and < code > Value < / code > per < code > GeoMatchConstraint < / code > . To add * multiple countries , include multiple < code > GeoMatchSetUpdate < / code > objects in your request . * < / li > * < / ul > * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setUpdates ( java . util . Collection ) } or { @ link # withUpdates ( java . util . Collection ) } if you want to override * the existing values . * @ param updates * An array of < code > GeoMatchSetUpdate < / code > objects that you want to insert into or delete from an * < a > GeoMatchSet < / a > . For more information , see the applicable data types : < / p > * < ul > * < li > * < a > GeoMatchSetUpdate < / a > : Contains < code > Action < / code > and < code > GeoMatchConstraint < / code > * < / li > * < li > * < a > GeoMatchConstraint < / a > : Contains < code > Type < / code > and < code > Value < / code > * You can have only one < code > Type < / code > and < code > Value < / code > per < code > GeoMatchConstraint < / code > . To add * multiple countries , include multiple < code > GeoMatchSetUpdate < / code > objects in your request . * < / li > * @ return Returns a reference to this object so that method calls can be chained together . */ public UpdateGeoMatchSetRequest withUpdates ( GeoMatchSetUpdate ... updates ) { } }
if ( this . updates == null ) { setUpdates ( new java . util . ArrayList < GeoMatchSetUpdate > ( updates . length ) ) ; } for ( GeoMatchSetUpdate ele : updates ) { this . updates . add ( ele ) ; } return this ;
public class JarExtensionHandler { /** * Find of the passes extension if a webjar . * @ param extension the extension to test * @ return true of the passed extension is a webjar , false otherwise * @ since 9.0RC1 */ public static boolean isWebjar ( Extension extension ) { } }
// Ideally webjar extensions should have " webjar " type if ( extension . getType ( ) . equals ( WEBJAR ) ) { return true ; } // But it ' s not the case for : // * * webjar . org releases ( i . e . most of the webjars ) . We assume " org . webjars : * " id means it ' s a webjar if ( StringUtils . startsWithAny ( extension . getId ( ) . getId ( ) , "org.webjars:" , "org.webjars." ) ) { return true ; } // * * contrib extensions which support version of XWiki older than 9.0RC1 . We support a custom property which // does not have any effect on older versions of XWiki if ( JarExtensionHandler . WEBJAR . equals ( extension . getProperty ( JarExtensionHandler . PROPERTY_TYPE ) ) ) { return true ; } return false ;
public class StreamSegments { /** * Checks that replacementSegments provided are consistent with the segments that are currently being used . * @ param replacedSegment The segment on which EOS was reached * @ param replacementSegments The StreamSegmentsWithPredecessors to verify */ private void verifyReplacementRange ( SegmentWithRange replacedSegment , StreamSegmentsWithPredecessors replacementSegments ) { } }
log . debug ( "Verification of replacement segments {} with the current segments {}" , replacementSegments , segments ) ; Map < Long , List < SegmentWithRange > > replacementRanges = replacementSegments . getReplacementRanges ( ) ; List < SegmentWithRange > replacements = replacementRanges . get ( replacedSegment . getSegment ( ) . getSegmentId ( ) ) ; Preconditions . checkArgument ( replacements != null , "Replacement segments did not contain replacements for segment being replaced" ) ; if ( replacementRanges . size ( ) == 1 ) { // Simple split Preconditions . checkArgument ( replacedSegment . getHigh ( ) == getUpperBound ( replacements ) ) ; Preconditions . checkArgument ( replacedSegment . getLow ( ) == getLowerBound ( replacements ) ) ; } else { Preconditions . checkArgument ( replacedSegment . getHigh ( ) <= getUpperBound ( replacements ) ) ; Preconditions . checkArgument ( replacedSegment . getLow ( ) >= getLowerBound ( replacements ) ) ; } for ( Entry < Long , List < SegmentWithRange > > ranges : replacementRanges . entrySet ( ) ) { Entry < Double , SegmentWithRange > upperReplacedSegment = segments . floorEntry ( getUpperBound ( ranges . getValue ( ) ) ) ; Entry < Double , SegmentWithRange > lowerReplacedSegment = segments . higherEntry ( getLowerBound ( ranges . getValue ( ) ) ) ; Preconditions . checkArgument ( upperReplacedSegment != null , "Missing replaced replacement segments %s" , replacementSegments ) ; Preconditions . checkArgument ( lowerReplacedSegment != null , "Missing replaced replacement segments %s" , replacementSegments ) ; }
public class MeteredBalancingPolicy { /** * Determines the current load on this instance when smart rebalancing is enabled . * This load is determined by the sum of all of this node ' s meters ' one minute rate . */ public double myLoad ( ) { } }
double load = 0d ; /* LOG . debug ( cluster . loadMap . toString ) ; LOG . debug ( cluster . myWorkUnits . toString ) ; */ for ( String wu : cluster . myWorkUnits ) { Double d = cluster . getWorkUnitLoad ( wu ) ; if ( d != null ) { load += d ; } } return load ;
public class RobotExtensions { /** * Type the given string with the given robot . * @ param robot * the robot * @ param input * the input * @ throws NoSuchFieldException * the no such field exception * @ throws IllegalAccessException * the illegal access exception */ public static void typeString ( final Robot robot , final String input ) throws NoSuchFieldException , IllegalAccessException { } }
if ( input != null && ! input . isEmpty ( ) ) { for ( final char character : input . toCharArray ( ) ) { typeCharacter ( robot , character ) ; } }
public class DateTimeUtil { /** * Turn Date into " yyyy - MM - ddTHH : mm : ss " * @ param val date * @ return String " yyyy - MM - ddTHH : mm : ss " */ public static String rfcDateTime ( final Date val ) { } }
synchronized ( rfcDateTimeFormat ) { try { rfcDateTimeFormat . setTimeZone ( Timezones . getDefaultTz ( ) ) ; } catch ( TimezonesException tze ) { throw new RuntimeException ( tze ) ; } return rfcDateTimeFormat . format ( val ) ; }
public class Request { /** * Returns the current session associated with this request , * or if the request does not have a session , creates one . * @ return the session associated with this request */ public Session session ( ) { } }
if ( session == null || ! validSession ) { validSession ( true ) ; session = new Session ( servletRequest . getSession ( ) , this ) ; } return session ;
public class EncodingXmlWriter { /** * Method that will try to output the content as specified . If * the content passed in has embedded " - - " in it , it will either * add an intervening space between consequtive hyphens ( if content * fixing is enabled ) , or return the offset of the first hyphen in * multi - hyphen sequence . */ @ Override public int writeComment ( String data ) throws IOException { } }
writeAscii ( "<!--" ) ; int ix = writeCommentContent ( data ) ; if ( ix >= 0 ) { // unfixable ' - - ' ? return ix ; } writeAscii ( "-->" ) ; return - 1 ;
public class JexlScriptExecutor { /** * < pre > * 1 . 接受JexlScriptContext上下文 * 2 . script针对对应name下的script脚本 * < / pre > */ public Object evaluate ( ScriptContext context , String script ) { } }
Expression expr = engine . createExpression ( script ) ; return expr . evaluate ( ( JexlContext ) context ) ;
public class CommerceWarehouseUtil { /** * Returns the last commerce warehouse in the ordered set where groupId = & # 63 ; and commerceCountryId = & # 63 ; . * @ param groupId the group ID * @ param commerceCountryId the commerce country ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce warehouse , or < code > null < / code > if a matching commerce warehouse could not be found */ public static CommerceWarehouse fetchByG_C_Last ( long groupId , long commerceCountryId , OrderByComparator < CommerceWarehouse > orderByComparator ) { } }
return getPersistence ( ) . fetchByG_C_Last ( groupId , commerceCountryId , orderByComparator ) ;
public class FloatArray { /** * Inserts values into the array at the specified index . * @ param index * The index at which to insert the new values . * @ param items * A < code > FloatArray < / code > containing the values to insert . * @ return A value indicating if the array has changed . * @ throws IndexOutOfBoundsException * if < code > index & lt ; 0 | | index & gt ; size ( ) < / code > . */ public boolean addAll ( int index , FloatArray items ) { } }
if ( index < 0 || index > size ) { throw new IndexOutOfBoundsException ( ) ; } ensureCapacity ( size + items . size ) ; if ( index < size ) { for ( int i = size - 1 ; i >= index ; i -- ) { elements [ i + items . size ] = elements [ i ] ; } } for ( int i = 0 ; i < items . size ; i ++ ) { elements [ index ++ ] = items . elements [ i ] ; } size += items . size ; return items . size > 0 ;
public class NonBlockingByteArrayOutputStream { /** * Converts the buffer ' s contents into a string by decoding the bytes using * the specified { @ link java . nio . charset . Charset charsetName } . The length of * the new < tt > String < / tt > is a function of the charset , and hence may not be * equal to the length of the byte array . * This method always replaces malformed - input and unmappable - character * sequences with this charset ' s default replacement string . The * { @ link java . nio . charset . CharsetDecoder } class should be used when more * control over the decoding process is required . * @ param aCharset * the charset to be used . May not be < code > null < / code > . * @ return String decoded from the buffer ' s contents . */ @ Nonnull public String getAsString ( @ Nonnull final Charset aCharset ) { } }
return new String ( m_aBuf , 0 , m_nCount , aCharset ) ;
public class Bus { /** * Adds a listener . * @ param listener * The listener . * @ param instance * The instance associated with this listener . * @ param < T > * The listener event type . */ public < T > void addListener ( final Listener < T > listener , final Object instance ) { } }
final ListenerInstancePair < T > pair = new ListenerInstancePair < T > ( listener , instance ) ; this . listeners . add ( pair ) ; final Type observableType = GenericsUtil . getEntityGenericType ( listener . getClass ( ) , 0 , Listener . class ) ; final Class < ? > observable = GenericsUtil . guessClazz ( observableType ) ; if ( ! this . cachedListeners . containsKey ( observable ) ) { this . cachedListeners . put ( observable , new LinkedList < ListenerInstancePair < ? > > ( ) ) ; } for ( final Map . Entry < Class < ? > , List < ListenerInstancePair < ? > > > entry : this . cachedListeners . entrySet ( ) ) { final Class < ? > type = entry . getKey ( ) ; if ( observable . isAssignableFrom ( type ) ) { entry . getValue ( ) . add ( pair ) ; } }
public class FieldList { /** * The removePropertyChangeListener method was generated to support the propertyChange field . * @ param listener The propery change listener to remove from my listeners . */ public synchronized void removePropertyChangeListener ( java . beans . PropertyChangeListener listener ) { } }
if ( propertyChange != null ) propertyChange . removePropertyChangeListener ( listener ) ;
public class BytecodeLoader { /** * Compile ( load from ) an archive , if it contains any . class files . */ @ Override public boolean shouldCompile ( ScriptArchive archive ) { } }
Set < String > entries = archive . getArchiveEntryNames ( ) ; boolean shouldCompile = false ; for ( String entry : entries ) { if ( entry . endsWith ( ".class" ) ) { shouldCompile = true ; } } return shouldCompile ;
public class RequestDiagnosticsJsonCommandHandler { /** * { @ inheritDoc } */ @ Override public void handle ( String chargingStationId , JsonObject commandObject , IdentityContext identityContext ) throws UserIdentityUnauthorizedException { } }
ChargingStationId csId = new ChargingStationId ( chargingStationId ) ; if ( ! commandAuthorizationService . isAuthorized ( csId , identityContext . getUserIdentity ( ) , RequestDiagnosticsCommand . class ) ) { throw new UserIdentityUnauthorizedException ( chargingStationId , identityContext . getUserIdentity ( ) , RequestDiagnosticsCommand . class ) ; } try { ChargingStation chargingStation = repository . findOne ( chargingStationId ) ; if ( chargingStation != null && chargingStation . communicationAllowed ( ) ) { RequestDiagnosticsApiCommand command = gson . fromJson ( commandObject , RequestDiagnosticsApiCommand . class ) ; commandGateway . send ( new RequestDiagnosticsCommand ( csId , new DiagnosticsUploadSettings ( command . getTargetLocation ( ) ) , identityContext ) , new CorrelationToken ( ) ) ; } } catch ( JsonSyntaxException ex ) { throw new IllegalArgumentException ( "Data transfer command not able to parse the payload, is your json correctly formatted?" , ex ) ; }
public class MethodBuilder { /** * Add proxy method to set hashcode to uniqueu id of binder */ private void addHashCode ( TypeSpec . Builder classBuilder ) { } }
MethodSpec . Builder methodBuilder = MethodSpec . methodBuilder ( "hashCode" ) . addModifiers ( Modifier . PUBLIC ) . returns ( int . class ) . addAnnotation ( Override . class ) . addStatement ( "return _binderID" ) ; classBuilder . addMethod ( methodBuilder . build ( ) ) ;
public class SFImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setRawData ( byte [ ] newRawData ) { } }
byte [ ] oldRawData = rawData ; rawData = newRawData ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , BasePackage . SF__RAW_DATA , oldRawData , rawData ) ) ;
public class ComponentsInner { /** * Updates an existing component ' s tags . To update other fields use the CreateOrUpdate method . * @ param resourceGroupName The name of the resource group . * @ param resourceName The name of the Application Insights component resource . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ApplicationInsightsComponentInner object if successful . */ public ApplicationInsightsComponentInner updateTags ( String resourceGroupName , String resourceName ) { } }
return updateTagsWithServiceResponseAsync ( resourceGroupName , resourceName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class AndCondition { /** * Optimize and condition . * @ return and condition */ public AndCondition optimize ( ) { } }
AndCondition result = new AndCondition ( ) ; for ( Condition each : conditions ) { if ( Condition . class . equals ( each . getClass ( ) ) ) { result . getConditions ( ) . add ( each ) ; } } if ( result . getConditions ( ) . isEmpty ( ) ) { result . getConditions ( ) . add ( new NullCondition ( ) ) ; } return result ;
public class AWSShieldClient { /** * Authorizes the DDoS Response team ( DRT ) to access the specified Amazon S3 bucket containing your flow logs . You * can associate up to 10 Amazon S3 buckets with your subscription . * To use the services of the DRT and make an < code > AssociateDRTLogBucket < / code > request , you must be subscribed to * the < a href = " https : / / aws . amazon . com / premiumsupport / business - support / " > Business Support plan < / a > or the < a * href = " https : / / aws . amazon . com / premiumsupport / enterprise - support / " > Enterprise Support plan < / a > . * @ param associateDRTLogBucketRequest * @ return Result of the AssociateDRTLogBucket operation returned by the service . * @ throws InternalErrorException * Exception that indicates that a problem occurred with the service infrastructure . You can retry the * request . * @ throws InvalidOperationException * Exception that indicates that the operation would not cause any change to occur . * @ throws NoAssociatedRoleException * The ARN of the role that you specifed does not exist . * @ throws LimitsExceededException * Exception that indicates that the operation would exceed a limit . < / p > * < code > Type < / code > is the type of limit that would be exceeded . * < code > Limit < / code > is the threshold that would be exceeded . * @ throws InvalidParameterException * Exception that indicates that the parameters passed to the API are invalid . * @ throws AccessDeniedForDependencyException * In order to grant the necessary access to the DDoS Response Team , the user submitting * < code > AssociateDRTRole < / code > must have the < code > iam : PassRole < / code > permission . This error indicates * the user did not have the appropriate permissions . For more information , see < a * href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / id _ roles _ use _ passrole . html " > Granting a User * Permissions to Pass a Role to an AWS Service < / a > . * @ throws OptimisticLockException * Exception that indicates that the protection state has been modified by another client . You can retry the * request . * @ throws ResourceNotFoundException * Exception indicating the specified resource does not exist . * @ sample AWSShield . AssociateDRTLogBucket * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / shield - 2016-06-02 / AssociateDRTLogBucket " target = " _ top " > AWS * API Documentation < / a > */ @ Override public AssociateDRTLogBucketResult associateDRTLogBucket ( AssociateDRTLogBucketRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAssociateDRTLogBucket ( request ) ;
public class JMessageClient { /** * Set cross app no disturb * https : / / docs . jiguang . cn / jmessage / server / rest _ api _ im / # api _ 1 * @ param username Necessary * @ param array CrossNoDisturb array * @ return No content * @ throws APIConnectionException connect exception * @ throws APIRequestException request exception */ public ResponseWrapper setCrossNoDisturb ( String username , CrossNoDisturb [ ] array ) throws APIConnectionException , APIRequestException { } }
return _crossAppClient . setCrossNoDisturb ( username , array ) ;
public class CacheElement { /** * Populates this cache element with the given authenticated state * and named values , then puts it in the valid state . * Note : Prior to the call , the element must be in the invalid state . * TODO : The predicates parameter is deprecated and should be removed . * For now , callers can avoid a warning by giving it as null . */ public final void populate ( Boolean authenticated , Set < ? > predicates , Map < String , Set < ? > > map , String errorMessage ) { } }
String m = m_cacheabbrev + " populate() " ; logger . debug ( "{}>" , m ) ; try { if ( predicates != null ) { logger . warn ( m + " predicates are deprecated; will be ignored" ) ; } assertInvalid ( ) ; if ( errorMessage != null ) { logger . error ( m + "errorMessage==" + errorMessage ) ; throw new Exception ( errorMessage ) ; } else { validate ( authenticated , map ) ; // can ' t set expiration here - - don ' t have cache reference // can ' t set pwd here , don ' t have it } } catch ( Throwable t ) { logger . error ( m + "invalidating to be sure" ) ; this . invalidate ( errorMessage ) ; } finally { logger . debug ( "{}<" , m ) ; }
public class LogRecorder { /** * Gets a view of log records per agent matching this recorder . * @ return a map ( sorted by display name ) from computer to ( nonempty ) list of log records * @ since 1.519 */ public Map < Computer , List < LogRecord > > getSlaveLogRecords ( ) { } }
Map < Computer , List < LogRecord > > result = new TreeMap < Computer , List < LogRecord > > ( new Comparator < Computer > ( ) { final Collator COLL = Collator . getInstance ( ) ; public int compare ( Computer c1 , Computer c2 ) { return COLL . compare ( c1 . getDisplayName ( ) , c2 . getDisplayName ( ) ) ; } } ) ; for ( Computer c : Jenkins . getInstance ( ) . getComputers ( ) ) { if ( c . getName ( ) . length ( ) == 0 ) { continue ; // master } List < LogRecord > recs = new ArrayList < LogRecord > ( ) ; try { for ( LogRecord rec : c . getLogRecords ( ) ) { for ( Target t : targets ) { if ( t . includes ( rec ) ) { recs . add ( rec ) ; break ; } } } } catch ( IOException x ) { continue ; } catch ( InterruptedException x ) { continue ; } if ( ! recs . isEmpty ( ) ) { result . put ( c , recs ) ; } } return result ;
public class ChronicleMapBuilder { /** * { @ inheritDoc } * < p > Example : if keys in your map ( s ) are English words in { @ link String } form , average English * word length is 5.1 , configure average key size of 6 : < pre > { @ code * ChronicleMap < String , LongValue > wordFrequencies = ChronicleMapBuilder * . of ( String . class , LongValue . class ) * . entries ( 50000) * . averageKeySize ( 6) * . create ( ) ; } < / pre > * ( Note that 6 is chosen as average key size in bytes despite strings in Java are UTF - 16 * encoded ( and each character takes 2 bytes on - heap ) , because default off - heap { @ link String } * encoding is UTF - 8 in { @ code ChronicleMap } . ) * @ param averageKeySize the average size of the key * @ throws IllegalStateException { @ inheritDoc } * @ throws IllegalArgumentException { @ inheritDoc } * @ see # averageKey ( Object ) * @ see # constantKeySizeBySample ( Object ) * @ see # averageValueSize ( double ) * @ see # actualChunkSize ( int ) */ @ Override public ChronicleMapBuilder < K , V > averageKeySize ( double averageKeySize ) { } }
checkSizeIsStaticallyKnown ( keyBuilder , "Key" ) ; checkAverageSize ( averageKeySize , "key" ) ; this . averageKeySize = averageKeySize ; averageKey = null ; sampleKey = null ; return this ;
public class ListResourceServersRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListResourceServersRequest listResourceServersRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listResourceServersRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listResourceServersRequest . getUserPoolId ( ) , USERPOOLID_BINDING ) ; protocolMarshaller . marshall ( listResourceServersRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listResourceServersRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Json { /** * Converts netscaler resources to Json string . * @ param resources nitro resources . * @ param id sessionId . * @ param option options class object . * @ return returns a String */ public String resource_to_string ( base_resource resources [ ] , String id , options option , Boolean warning , String onerror ) { } }
String objecttype = resources [ 0 ] . get_object_type ( ) ; String request = "{" ; if ( ( option != null && option . get_action ( ) != null ) || ( warning != null && warning == true ) || ( ! onerror . equals ( "" ) ) ) { request = request + "\"params\":{" ; if ( option != null ) { if ( option . get_action ( ) != null ) { request = request + "\"action\":\"" + option . get_action ( ) + "\"," ; } } if ( warning != null && warning == true ) { request = request + "\"warning\":\"yes\"," ; } if ( ( ! onerror . equals ( "" ) ) ) { request = request + "\"onerror\":\"" + onerror + "\"" ; } request = request + "}," ; } String sessionid = id ; if ( sessionid != null ) request = request + "\"sessionid\":\"" + sessionid + "\"," ; request = request + "\"" + objecttype + "\":[" ; for ( int i = 0 ; i < resources . length ; i ++ ) { String str = this . resource_to_string ( resources [ i ] ) ; request = request + str + "," ; } request = request + "]}" ; return request ;
public class RelationUtil { /** * Get the vector field type information from a relation . * @ param relation relation * @ param < V > Vector type * @ return Vector field type information */ public static < V extends FeatureVector < ? > > VectorFieldTypeInformation < V > assumeVectorField ( Relation < V > relation ) { } }
try { return ( ( VectorFieldTypeInformation < V > ) relation . getDataTypeInformation ( ) ) ; } catch ( Exception e ) { throw new UnsupportedOperationException ( "Expected a vector field, got type information: " + relation . getDataTypeInformation ( ) . toString ( ) , e ) ; }
public class SwipeRecyclerView { /** * open menu . * @ param position position . * @ param direction use { @ link # LEFT _ DIRECTION } , { @ link # RIGHT _ DIRECTION } . * @ param duration time millis . */ public void smoothOpenMenu ( int position , @ DirectionMode int direction , int duration ) { } }
if ( mOldSwipedLayout != null ) { if ( mOldSwipedLayout . isMenuOpen ( ) ) { mOldSwipedLayout . smoothCloseMenu ( ) ; } } position += getHeaderCount ( ) ; ViewHolder vh = findViewHolderForAdapterPosition ( position ) ; if ( vh != null ) { View itemView = getSwipeMenuView ( vh . itemView ) ; if ( itemView instanceof SwipeMenuLayout ) { mOldSwipedLayout = ( SwipeMenuLayout ) itemView ; if ( direction == RIGHT_DIRECTION ) { mOldTouchedPosition = position ; mOldSwipedLayout . smoothOpenRightMenu ( duration ) ; } else if ( direction == LEFT_DIRECTION ) { mOldTouchedPosition = position ; mOldSwipedLayout . smoothOpenLeftMenu ( duration ) ; } } }
public class Configuration { /** * Returns the default configuration provided by { @ link # defaults ( ) } . * @ return Non - null string */ protected String defaultConfiguration ( ) { } }
final StringBuilder bldr = new StringBuilder ( ) ; for ( final Entry < String , String > entry : defaults ( ) . entrySet ( ) ) { final String name = entry . getKey ( ) ; final String value = entry . getValue ( ) ; bldr . append ( name ) ; bldr . append ( " " ) ; bldr . append ( NAME_VALUE_DELIMITER ) ; bldr . append ( " " ) ; bldr . append ( value ) ; bldr . append ( "\n" ) ; } return bldr . toString ( ) ;
public class AssociationBuilder { /** * Retrieves associated entities from secondary index . There are two * alternatives here : * 1 . Via running Lucene query into Lucene powered secondary index . 2. * Searching into a secondary index by custom secondary index class provided * by user . * @ see PersistenceProperties # KUNDERA _ INDEX _ HOME _ DIR * @ see PersistenceProperties # KUNDERA _ INDEXER _ CLASS * TODO : Which secondary index to use should be transparent . All we * should bother about is indexer . index ( ) , indexer . search ( ) etc . */ List getAssociatedEntitiesFromIndex ( Class owningClazz , Object entityId , Class < ? > childClass , Client childClient ) { } }
List associatedEntities ; IndexManager indexManager = childClient . getIndexManager ( ) ; Map < String , Object > results = indexManager != null ? indexManager . search ( owningClazz , childClass , entityId ) : new HashMap < String , Object > ( ) ; Set rsSet = results != null ? new HashSet ( results . values ( ) ) : new HashSet ( ) ; if ( childClass . equals ( owningClazz ) ) { associatedEntities = ( List < Object > ) childClient . findAll ( childClass , null , rsSet . toArray ( new Object [ ] { } ) ) ; } else { associatedEntities = ( List < Object > ) childClient . findAll ( childClass , null , rsSet . toArray ( new Object [ ] { } ) ) ; } return associatedEntities ;
public class GraknSparkComputer { /** * When using a persistent context the running Context ' s configuration will override a passed * in configuration . Spark allows us to override these inherited properties via * SparkContext . setLocalProperty */ private static void updateLocalConfiguration ( final JavaSparkContext sparkContext , final Configuration configuration ) { } }
/* * While we could enumerate over the entire SparkConfiguration and copy into the Thread * Local properties of the Spark Context this could cause adverse effects with future * versions of Spark . Since the api for setting multiple local properties at once is * restricted as private , we will only set those properties we know can effect SparkGraphComputer * Execution rather than applying the entire configuration . */ final String [ ] validPropertyNames = { "spark.job.description" , "spark.jobGroup.id" , "spark.job.interruptOnCancel" , "spark.scheduler.pool" } ; for ( String propertyName : validPropertyNames ) { String propertyValue = configuration . get ( propertyName ) ; if ( propertyValue != null ) { LOGGER . info ( "Setting Thread Local SparkContext Property - " + propertyName + " : " + propertyValue ) ; sparkContext . setLocalProperty ( propertyName , configuration . get ( propertyName ) ) ; } }
public class SnapshotSiteProcessor { /** * Create an output buffer for each task . * @ return null if there aren ' t enough buffers left in the pool . */ private List < BBContainer > getOutputBuffers ( Collection < SnapshotTableTask > tableTasks , boolean noSchedule ) { } }
final int desired = tableTasks . size ( ) ; while ( true ) { int available = m_availableSnapshotBuffers . get ( ) ; // Limit the number of buffers used concurrently if ( desired > available ) { return null ; } if ( m_availableSnapshotBuffers . compareAndSet ( available , available - desired ) ) break ; } List < BBContainer > outputBuffers = new ArrayList < BBContainer > ( tableTasks . size ( ) ) ; for ( int ii = 0 ; ii < tableTasks . size ( ) ; ii ++ ) { final BBContainer origin = DBBPool . allocateDirectAndPool ( m_snapshotBufferLength ) ; outputBuffers . add ( createNewBuffer ( origin , noSchedule ) ) ; } return outputBuffers ;
public class UpdateNFSFileShareRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateNFSFileShareRequest updateNFSFileShareRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateNFSFileShareRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateNFSFileShareRequest . getFileShareARN ( ) , FILESHAREARN_BINDING ) ; protocolMarshaller . marshall ( updateNFSFileShareRequest . getKMSEncrypted ( ) , KMSENCRYPTED_BINDING ) ; protocolMarshaller . marshall ( updateNFSFileShareRequest . getKMSKey ( ) , KMSKEY_BINDING ) ; protocolMarshaller . marshall ( updateNFSFileShareRequest . getNFSFileShareDefaults ( ) , NFSFILESHAREDEFAULTS_BINDING ) ; protocolMarshaller . marshall ( updateNFSFileShareRequest . getDefaultStorageClass ( ) , DEFAULTSTORAGECLASS_BINDING ) ; protocolMarshaller . marshall ( updateNFSFileShareRequest . getObjectACL ( ) , OBJECTACL_BINDING ) ; protocolMarshaller . marshall ( updateNFSFileShareRequest . getClientList ( ) , CLIENTLIST_BINDING ) ; protocolMarshaller . marshall ( updateNFSFileShareRequest . getSquash ( ) , SQUASH_BINDING ) ; protocolMarshaller . marshall ( updateNFSFileShareRequest . getReadOnly ( ) , READONLY_BINDING ) ; protocolMarshaller . marshall ( updateNFSFileShareRequest . getGuessMIMETypeEnabled ( ) , GUESSMIMETYPEENABLED_BINDING ) ; protocolMarshaller . marshall ( updateNFSFileShareRequest . getRequesterPays ( ) , REQUESTERPAYS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CukedoctorMojo { /** * only for tests */ public void execute ( ) throws MojoExecutionException , MojoFailureException { } }
if ( skip ) { getLog ( ) . info ( "Skipping cukedoctor-maven-plugin" ) ; return ; } String startDir = null ; if ( featuresDir != null ) { startDir = featuresDir ; } if ( startDir == null || ! new File ( startDir ) . exists ( ) ) { startDir = project . getBuild ( ) . getDirectory ( ) != null ? new File ( project . getBuild ( ) . getDirectory ( ) ) . getAbsolutePath ( ) : null ; if ( startDir == null || ! new File ( startDir ) . exists ( ) ) { // last resource use project dir startDir = project . getBasedir ( ) . getAbsolutePath ( ) ; } } if ( introChapterDir != null ) { System . setProperty ( "INTRO_CHAPTER_DIR" , introChapterDir ) ; } if ( hideFeaturesSection != null ) { System . setProperty ( "HIDE_FEATURES_SECTION" , Boolean . toString ( hideFeaturesSection ) ) ; } if ( hideSummarySection != null ) { System . setProperty ( "HIDE_SUMMARY_SECTION" , Boolean . toString ( hideSummarySection ) ) ; } if ( hideScenarioKeyword != null ) { System . setProperty ( "HIDE_SCENARIO_KEYWORD" , Boolean . toString ( hideScenarioKeyword ) ) ; } if ( hideStepTime != null ) { System . setProperty ( "HIDE_STEP_TIME" , Boolean . toString ( hideStepTime ) ) ; } if ( hideTags != null ) { System . setProperty ( "HIDE_TAGS" , Boolean . toString ( hideTags ) ) ; } getLog ( ) . info ( "Searching cucumber features in path: " + startDir ) ; List < Feature > featuresFound = FeatureParser . findAndParse ( startDir ) ; if ( featuresFound == null || featuresFound . isEmpty ( ) ) { getLog ( ) . warn ( "No cucumber json files found in " + startDir ) ; return ; } else { getLog ( ) . info ( "Generating living documentation for " + featuresFound . size ( ) + " feature(s)..." ) ; } if ( chapterLabel == null ) { chapterLabel = "Chapter" ; } if ( versionLabel == null ) { versionLabel = "Version" ; } configExtensions ( ) ; DocumentAttributes documentAttributes = GlobalConfig . newInstance ( ) . getDocumentAttributes ( ) . backend ( format . name ( ) . toLowerCase ( ) ) . toc ( toc . name ( ) . toLowerCase ( ) ) . revNumber ( docVersion ) . hardBreaks ( hardBreaks ) . numbered ( numbered ) . chapterLabel ( chapterLabel ) . versionLabel ( versionLabel ) ; if ( documentTitle == null ) { documentTitle = "Living Documentation" ; } if ( sourceHighlighter != null ) { documentAttributes . sourceHighlighter ( sourceHighlighter ) ; } documentAttributes . docTitle ( documentTitle ) ; CukedoctorConverter converter = Cukedoctor . instance ( featuresFound , documentAttributes ) ; String targetFile = "" ; if ( outputFileName . contains ( "." ) ) { targetFile = outputFileName . substring ( 0 , outputFileName . lastIndexOf ( "." ) ) + ".adoc" ; } else { targetFile = outputFileName + ".adoc" ; } String targetDir = getDocumentationDir ( ) ; String pathToSave = targetDir + targetFile ; converter . setFilename ( pathToSave ) ; // needed by docinfo , pdf - theme generatedFile = converter . renderDocumentation ( ) ; File adocFile = FileUtil . saveFile ( pathToSave , generatedFile ) ; Asciidoctor asciidoctor = Asciidoctor . Factory . create ( ) ; if ( format . equals ( Format . all ) ) { documentAttributes . backend ( Format . html5 . name ( ) . toLowerCase ( ) ) ; generateDocumentation ( documentAttributes , adocFile , asciidoctor ) ; // pdf backend documentAttributes = new DocumentAttributes ( ) . backend ( Format . pdf . name ( ) ) . toc ( toc . name ( ) . toLowerCase ( ) ) . revNumber ( docVersion ) . hardBreaks ( hardBreaks ) . numbered ( numbered ) . chapterLabel ( chapterLabel ) . versionLabel ( versionLabel ) ; documentAttributes . docTitle ( documentTitle ) ; converter = Cukedoctor . instance ( featuresFound , documentAttributes ) ; converter . setFilename ( pathToSave ) ; // needed by docinfo , pdf - theme generatedFile = converter . renderDocumentation ( ) ; adocFile = FileUtil . saveFile ( pathToSave , generatedFile ) ; generateDocumentation ( documentAttributes , adocFile , asciidoctor ) ; } else { generateDocumentation ( documentAttributes , adocFile , asciidoctor ) ; } asciidoctor . shutdown ( ) ;
public class UserPreferences { /** * Gets the hashtable . * @ param name the name * @ return the hashtable */ public static Hashtable < String , String > getHashtable ( final String name ) { } }
final Hashtable < String , String > hash = new Hashtable < String , String > ( ) ; try { final String configStr = UserPreferences . get ( fixKey ( name ) , "" ) ; if ( ! configStr . equals ( "" ) ) { final String [ ] rows = configStr . split ( ";" ) ; for ( final String row : rows ) { final String [ ] split = row . split ( ":" ) ; if ( split . length == 2 ) { final String key = split [ 0 ] ; final String value = split [ 1 ] ; hash . put ( key , value ) ; } } } } catch ( final Exception e ) { // just eat the exception to avoid any system crash on system issues } return hash ;
public class MFPPush { /** * Set the listener class to receive the notification status changes . * @ param statusListener - Mandatory listener class . When the notification status changes * { @ link MFPPushNotificationStatusListener } . onStatusChange method is called */ public void setNotificationStatusListener ( MFPPushNotificationStatusListener statusListener ) { } }
this . statusListener = statusListener ; synchronized ( pendingStatus ) { if ( ! pendingStatus . isEmpty ( ) ) { for ( Map . Entry < String , MFPPushNotificationStatus > entry : pendingStatus . entrySet ( ) ) { changeStatus ( entry . getKey ( ) , entry . getValue ( ) ) ; } pendingStatus . clear ( ) ; } }
public class AbstractBlueprintBeanDefinitionParser { /** * < p > addPropertyValueFromElement . < / p > * @ param id a { @ link java . lang . String } object . * @ param node a { @ link org . w3c . dom . Element } object . * @ param context a { @ link org . apache . aries . blueprint . ParserContext } object . * @ param beanMetadata a { @ link org . apache . aries . blueprint . mutable . MutableBeanMetadata } object . */ protected void addPropertyValueFromElement ( String id , Element node , ParserContext context , MutableBeanMetadata beanMetadata ) { } }
String attribute = node . getAttribute ( id ) ; beanMetadata . addProperty ( id , createStringValue ( context , attribute ) ) ;
public class AmazonGuardDutyClient { /** * Unarchives Amazon GuardDuty findings specified by the list of finding IDs . * @ param unarchiveFindingsRequest * UnarchiveFindings request body . * @ return Result of the UnarchiveFindings operation returned by the service . * @ throws BadRequestException * 400 response * @ throws InternalServerErrorException * 500 response * @ sample AmazonGuardDuty . UnarchiveFindings * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / guardduty - 2017-11-28 / UnarchiveFindings " target = " _ top " > AWS * API Documentation < / a > */ @ Override public UnarchiveFindingsResult unarchiveFindings ( UnarchiveFindingsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUnarchiveFindings ( request ) ;
public class CompareFileExtensions { /** * Compare files . * @ param sourceFile * the source file * @ param fileToCompare * the file to compare * @ return the i file content result bean */ public static IFileContentResultBean compareFiles ( final File sourceFile , final File fileToCompare ) { } }
return compareFiles ( sourceFile , fileToCompare , true , false , false , true , false , true ) ;
public class Sign { /** * 初始化 * @ param algorithm 算法 * @ param privateKey 私钥 * @ param publicKey 公钥 * @ return this */ @ Override public Sign init ( String algorithm , PrivateKey privateKey , PublicKey publicKey ) { } }
try { signature = Signature . getInstance ( algorithm ) ; } catch ( NoSuchAlgorithmException e ) { throw new CryptoException ( e ) ; } super . init ( algorithm , privateKey , publicKey ) ; return this ;
public class TopologyUtils { /** * TODO ( nbhagat ) : libs is dependent on pants for building . Instead take classpath as argument . */ public static String makeClassPath ( TopologyAPI . Topology topology , String originalPackageFile ) { } }
String originalPackage = new File ( originalPackageFile ) . getName ( ) ; StringBuilder classPathBuilder = new StringBuilder ( ) ; // TODO ( nbhagat ) : Take type of package as argument . if ( originalPackage . endsWith ( ".jar" ) ) { // Bundled jar classPathBuilder . append ( originalPackage ) ; } else { // Bundled tar String topologyJar = originalPackage . replace ( ".tar.gz" , "" ) . replace ( ".tar" , "" ) + ".jar" ; classPathBuilder . append ( String . format ( "libs/*:%s" , topologyJar ) ) ; } String additionalClasspath = TopologyUtils . getAdditionalClassPath ( topology ) ; if ( ! additionalClasspath . isEmpty ( ) ) { classPathBuilder . append ( ":" ) ; classPathBuilder . append ( TopologyUtils . getAdditionalClassPath ( topology ) ) ; } return classPathBuilder . toString ( ) ;
public class Dklu_analyze_given { /** * Order the matrix with BTF ( or not ) , then use natural or given ordering * P and Q on the blocks . P and Q are interpreted as identity * if NULL . * @ param n A is n - by - n * @ param Ap size n + 1 , column pointers * @ param Ai size nz , row indices * @ param Puser size n , user ' s row permutation ( may be null ) * @ param Quser size n , user ' s column permutation ( may be null ) * @ param Common * @ return */ public static KLU_symbolic klu_analyze_given ( int n , int [ ] Ap , int [ ] Ai , int [ ] Puser , int [ ] Quser , KLU_common Common ) { } }
KLU_symbolic Symbolic ; double [ ] Lnz ; int nblocks , nz , block , maxblock , nzoff , p , pend , do_btf , k ; int [ ] P , Q , R ; /* determine if input matrix is valid , and get # of nonzeros */ Symbolic = klu_alloc_symbolic ( n , Ap , Ai , Common ) ; if ( Symbolic == null ) { return ( null ) ; } P = Symbolic . P ; Q = Symbolic . Q ; R = Symbolic . R ; Lnz = Symbolic . Lnz ; nz = Symbolic . nz ; /* Q = Quser , or identity if Quser is null */ if ( Quser == null ) { for ( k = 0 ; k < n ; k ++ ) { Q [ k ] = k ; } } else { for ( k = 0 ; k < n ; k ++ ) { Q [ k ] = Quser [ k ] ; } } /* get the control parameters for BTF and ordering method */ do_btf = Common . btf ; do_btf = ( do_btf != 0 ) ? TRUE : FALSE ; Symbolic . ordering = 2 ; Symbolic . do_btf = do_btf ; /* find the block triangular form , if requested */ if ( do_btf != 0 ) { /* get workspace for BTF _ strongcomp */ int [ ] Pinv , Work , Bi ; int k1 , k2 , nk , oldcol ; Work = klu_malloc_int ( 4 * n , Common ) ; Pinv = klu_malloc_int ( n , Common ) ; if ( Puser != null ) { Bi = klu_malloc_int ( nz + 1 , Common ) ; } else { Bi = Ai ; } if ( Common . status < KLU_OK ) { /* out of memory */ // klu _ free ( Work , 4 * n , sizeof ( int ) , Common ) ; Work = null ; // klu _ free ( Pinv , n , sizeof ( int ) , Common ) ; Pinv = null ; if ( Puser != null ) { // klu _ free ( Bi , nz + 1 , sizeof ( int ) , Common ) ; Bi = null ; } // klu _ free _ symbolic ( Symbolic , Common ) ; Symbolic = null ; Common . status = KLU_OUT_OF_MEMORY ; return ( null ) ; } /* B = Puser * A */ if ( Puser != null ) { for ( k = 0 ; k < n ; k ++ ) { Pinv [ Puser [ k ] ] = k ; } for ( p = 0 ; p < nz ; p ++ ) { Bi [ p ] = Pinv [ Ai [ p ] ] ; } } /* find the strongly - connected components */ /* modifies Q , and determines P and R */ nblocks = btf_strongcomp ( n , Ap , Bi , Q , P , R ) ; /* P = P * Puser */ if ( Puser != null ) { for ( k = 0 ; k < n ; k ++ ) { Work [ k ] = Puser [ P [ k ] ] ; } for ( k = 0 ; k < n ; k ++ ) { P [ k ] = Work [ k ] ; } } /* Pinv = inverse of P */ for ( k = 0 ; k < n ; k ++ ) { Pinv [ P [ k ] ] = k ; } /* analyze each block */ nzoff = 0 ; /* nz in off - diagonal part */ maxblock = 1 ; /* size of the largest block */ for ( block = 0 ; block < nblocks ; block ++ ) { /* the block is from rows / columns k1 to k2-1 */ k1 = R [ block ] ; k2 = R [ block + 1 ] ; nk = k2 - k1 ; PRINTF ( "BLOCK %d, k1 %d k2-1 %d nk %d\n" , block , k1 , k2 - 1 , nk ) ; maxblock = MAX ( maxblock , nk ) ; /* scan the kth block , C */ for ( k = k1 ; k < k2 ; k ++ ) { oldcol = Q [ k ] ; pend = Ap [ oldcol + 1 ] ; for ( p = Ap [ oldcol ] ; p < pend ; p ++ ) { if ( Pinv [ Ai [ p ] ] < k1 ) { nzoff ++ ; } } } /* fill - in not estimated */ Lnz [ block ] = EMPTY ; } /* free all workspace */ // klu _ free ( Work , 4 * n , sizeof ( int ) , Common ) ; Work = null ; // klu _ free ( Pinv , n , sizeof ( int ) , Common ) ; Pinv = null ; if ( Puser != null ) { // klu _ free ( Bi , nz + 1 , sizeof ( int ) , Common ) ; Bi = null ; } } else { /* BTF not requested */ nzoff = 0 ; nblocks = 1 ; maxblock = n ; R [ 0 ] = 0 ; R [ 1 ] = n ; Lnz [ 0 ] = EMPTY ; /* P = Puser , or identity if Puser is null */ for ( k = 0 ; k < n ; k ++ ) { P [ k ] = ( Puser == null ) ? k : Puser [ k ] ; } } /* return the symbolic object */ Symbolic . nblocks = nblocks ; Symbolic . maxblock = maxblock ; Symbolic . lnz = EMPTY ; Symbolic . unz = EMPTY ; Symbolic . nzoff = nzoff ; return ( Symbolic ) ;
public class PresentsSession { /** * Derived client classes can override this member to populate the bootstrap data with * additional information . They should be sure to call < code > super . populateBootstrapData < / code > * before doing their own populating , however . * < p > < em > Note : < / em > This function will be called on the dobjmgr thread which means that object * manipulations are OK , but client instance manipulations must be done carefully . */ protected void populateBootstrapData ( BootstrapData data ) { } }
// give them the connection id Connection conn = getConnection ( ) ; if ( conn != null ) { data . connectionId = conn . getConnectionId ( ) ; } else { log . warning ( "Connection disappeared before we could send bootstrap response." , "client" , this ) ; return ; // stop here as we ' re just going to throw away this bootstrap } // and the client object id data . clientOid = _clobj . getOid ( ) ; // fill in the list of bootstrap services if ( _areq . getBootGroups ( ) == null ) { log . warning ( "Client provided no invocation service boot groups? " + this ) ; data . services = Lists . newArrayList ( ) ; } else { data . services = _invmgr . getBootstrapServices ( _areq . getBootGroups ( ) ) ; }
public class MimeMessageHelper { /** * Fills the { @ link Message } instance with reply - to address . * @ param email The message in which the recipients are defined . * @ param message The javax message that needs to be filled with reply - to address . * @ throws UnsupportedEncodingException See { @ link InternetAddress # InternetAddress ( String , String ) } . * @ throws MessagingException See { @ link Message # setReplyTo ( Address [ ] ) } */ static void setReplyTo ( final Email email , final Message message ) throws UnsupportedEncodingException , MessagingException { } }
final Recipient replyToRecipient = email . getReplyToRecipient ( ) ; if ( replyToRecipient != null ) { final InternetAddress replyToAddress = new InternetAddress ( replyToRecipient . getAddress ( ) , replyToRecipient . getName ( ) , CHARACTER_ENCODING ) ; message . setReplyTo ( new Address [ ] { replyToAddress } ) ; }
public class Fingerprint { /** * this is for remote API */ @ Exported ( name = "usage" ) public @ Nonnull List < RangeItem > _getUsages ( ) { } }
List < RangeItem > r = new ArrayList < > ( ) ; final Jenkins instance = Jenkins . getInstance ( ) ; for ( Entry < String , RangeSet > e : usages . entrySet ( ) ) { final String itemName = e . getKey ( ) ; if ( instance . hasPermission ( Jenkins . ADMINISTER ) || canDiscoverItem ( itemName ) ) { r . add ( new RangeItem ( itemName , e . getValue ( ) ) ) ; } } return r ;
public class ClientBehaviorBase { /** * { @ inheritDoc } */ public void decode ( FacesContext context , UIComponent component ) { } }
if ( context == null ) { throw new NullPointerException ( "context" ) ; } if ( component == null ) { throw new NullPointerException ( "component" ) ; } // If a BehaviorRenderer is available for the specified behavior renderer type , this method delegates // to the BehaviorRenderer ' s decode ( ) method . Otherwise , no decoding is performed . ClientBehaviorRenderer renderer = getRenderer ( context ) ; if ( renderer != null ) { renderer . decode ( context , component , this ) ; }
public class Log { /** * What a Terrible Failure : Report a condition that should never happen . The error will always * be logged at level ASSERT with the call stack and with { @ link SUBSYSTEM # MAIN } as default one . * @ param tag Used to identify the source of a log message . It usually identifies the class or * activity where the log call occurs . * @ param msg The message you would like logged . * @ return */ public static int wtf ( String tag , String msg ) { } }
return wtf ( SUBSYSTEM . MAIN , tag , msg ) ;
public class Interpreter { /** * Get the prompt string defined by the getBshPrompt ( ) method in the * global namespace . This may be from the getBshPrompt ( ) command or may * be defined by the user as with any other method . * Defaults to " bsh % " if the method is not defined or there is an error . */ private String getBshPrompt ( ) { } }
if ( null != prompt ) return prompt ; try { prompt = ( String ) eval ( "getBshPrompt()" ) ; } catch ( Exception e ) { prompt = "bsh % " ; } return prompt ;
public class NettyChannelBuilder { /** * SSL / TLS context to use instead of the system default . It must have been configured with { @ link * GrpcSslContexts } , but options could have been overridden . */ public NettyChannelBuilder sslContext ( SslContext sslContext ) { } }
if ( sslContext != null ) { checkArgument ( sslContext . isClient ( ) , "Server SSL context can not be used for client channel" ) ; GrpcSslContexts . ensureAlpnAndH2Enabled ( sslContext . applicationProtocolNegotiator ( ) ) ; } this . sslContext = sslContext ; return this ;
public class AbstractScriptEngine { /** * Same as < code > eval ( Reader , Bindings ) < / code > except that the abstract * < code > eval ( String , ScriptContext ) < / code > is used . * @ param script A < code > String < / code > containing the source of the script . * @ param bindings A < code > Bindings < / code > to use as the < code > ENGINE _ SCOPE < / code > while the script * executes . * @ return The return value from < code > eval ( String , ScriptContext ) < / code > * @ throws ScriptException if an error occurs in script . * @ throws NullPointerException if any of the parameters is null . */ public Object eval ( String script , Bindings bindings ) throws ScriptException { } }
ScriptContext ctxt = getScriptContext ( bindings ) ; return eval ( script , ctxt ) ;
public class Period { /** * Returns a new instance with each element in this period multiplied * by the specified scalar . * This returns a period with each of the years , months and days units * individually multiplied . * For example , a period of " 2 years , - 3 months and 4 days " multiplied by * 3 will return " 6 years , - 9 months and 12 days " . * No normalization is performed . * @ param scalar the scalar to multiply by , not null * @ return a { @ code Period } based on this period with the amounts multiplied by the scalar , not null * @ throws ArithmeticException if numeric overflow occurs */ public Period multipliedBy ( int scalar ) { } }
if ( this == ZERO || scalar == 1 ) { return this ; } return create ( Math . multiplyExact ( years , scalar ) , Math . multiplyExact ( months , scalar ) , Math . multiplyExact ( days , scalar ) ) ;
public class DescribeEmergencyContactSettingsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeEmergencyContactSettingsRequest describeEmergencyContactSettingsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeEmergencyContactSettingsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ZipUtil { /** * Compresses the given file into a ZIP file . * The ZIP file must not be a directory and its parent directory must exist . * @ param fileToPack * file that needs to be zipped . * @ param destZipFile * ZIP file that will be created or overwritten . */ public static void packEntry ( File fileToPack , File destZipFile ) { } }
packEntry ( fileToPack , destZipFile , IdentityNameMapper . INSTANCE ) ;
public class MirrorTable { /** * Read the record that matches this record ' s current key . < p > * @ exception DBException File exception . */ public boolean seek ( String strSeekSign ) throws DBException { } }
boolean bSuccess = super . seek ( strSeekSign ) ; Iterator < BaseTable > iterator = this . getTables ( ) ; while ( iterator . hasNext ( ) ) { BaseTable table = iterator . next ( ) ; if ( ( table != null ) && ( table != this . getNextTable ( ) ) ) this . syncTables ( table , this . getRecord ( ) ) ; } return bSuccess ;
public class SolrIndexer { /** * Index a specific annotation * @ param object * : The annotation ' s object * @ param pid * : The annotation payload * @ throws IndexerException * if there were errors during indexing */ private void annotate ( DigitalObject object , Payload payload ) throws IndexerException { } }
String pid = payload . getId ( ) ; if ( propertiesId . equals ( pid ) ) { return ; } try { Properties props = new Properties ( ) ; props . setProperty ( "metaPid" , pid ) ; String doc = indexByPythonScript ( object , payload , null , ANOTAR_RULES_OID , props ) ; if ( doc != null ) { doc = "<add>" + doc + "</add>" ; anotar . request ( new DirectXmlRequest ( "/update" , doc ) ) ; if ( anotarAutoCommit ) { anotar . commit ( ) ; } } } catch ( Exception e ) { log . error ( "Indexing failed!\n-----\n" , e ) ; }
public class JMXClient { /** * Removes listener as notification and connection notification listener . * @ return true if successful , false otherwise * @ throws java . lang . Exception */ public boolean removeNotificationListener ( String mbeanName , NotificationListener listener ) throws Exception { } }
if ( isConnected ( ) ) { ObjectName objectName = new ObjectName ( mbeanName ) ; mbsc . removeNotificationListener ( objectName , listener , null , null ) ; jmxc . removeConnectionNotificationListener ( listener ) ; return true ; } else { return false ; }
public class Recycler { /** * Acquire on object instance of type T , either by reusing a previously recycled instance if possible , or if * there are no currently - unused instances , by allocating a new instance . * @ return Either a new or a recycled object instance . * @ throws E * if { @ link # newInstance ( ) } threw an exception of type E . * @ throws NullPointerException * if { @ link # newInstance ( ) } returned null . */ public T acquire ( ) throws E { } }
final T instance ; final T recycledInstance = unusedInstances . poll ( ) ; if ( recycledInstance == null ) { // Allocate a new instance - - may throw an exception of type E final T newInstance = newInstance ( ) ; if ( newInstance == null ) { throw new NullPointerException ( "Failed to allocate a new recyclable instance" ) ; } instance = newInstance ; } else { // Reuse an unused instance instance = recycledInstance ; } usedInstances . add ( instance ) ; return instance ;
public class Stylesheet { /** * Set the " xsl : strip - space " properties . * @ see < a href = " http : / / www . w3 . org / TR / xslt # strip " > strip in XSLT Specification < / a > * @ param wsi WhiteSpaceInfo element to add to list */ public void setStripSpaces ( WhiteSpaceInfo wsi ) { } }
if ( null == m_whitespaceStrippingElements ) { m_whitespaceStrippingElements = new Vector ( ) ; } m_whitespaceStrippingElements . addElement ( wsi ) ;
public class CmsSetupStep04Modules { /** * Moves to the next step . */ private void forward ( ) { } }
Set < String > selected = new HashSet < > ( ) ; for ( CheckBox checkbox : m_componentCheckboxes ) { CmsSetupComponent component = ( CmsSetupComponent ) ( checkbox . getData ( ) ) ; if ( checkbox . getValue ( ) . booleanValue ( ) ) { selected . add ( component . getId ( ) ) ; } } String error = null ; for ( String compId : selected ) { CmsSetupComponent component = m_componentMap . get ( compId ) ; for ( String dep : component . getDependencies ( ) ) { if ( ! selected . contains ( dep ) ) { error = "Unfulfilled dependency: The component " + component . getName ( ) + " can not be installed because its dependency " + m_componentMap . get ( dep ) . getName ( ) + " is not selected" ; break ; } } } if ( error == null ) { Set < String > modules = new HashSet < > ( ) ; for ( CmsSetupComponent component : m_componentMap . values ( ) ) { if ( selected . contains ( component . getId ( ) ) ) { for ( CmsModule module : m_context . getSetupBean ( ) . getAvailableModules ( ) . values ( ) ) { if ( component . match ( module . getName ( ) ) ) { modules . add ( module . getName ( ) ) ; } } } } List < String > moduleList = new ArrayList < > ( modules ) ; m_context . getSetupBean ( ) . setInstallModules ( CmsStringUtil . listAsString ( moduleList , "|" ) ) ; m_context . stepForward ( ) ; } else { CmsSetupErrorDialog . showErrorDialog ( error , error ) ; }
public class MappingUtils { /** * Invokes class function using Reflection * @ param clazz Class which function would be invoked * @ param functionName function name * @ param parameters function parameters ( array of Class ) * @ param values function values ( array of Object ) * @ return function return * @ throws org . midao . jdbc . core . exception . MjdbcException in case function doesn ' t exists */ public static Object invokeStaticFunction ( Class clazz , String functionName , Class [ ] parameters , Object [ ] values ) throws MjdbcException { } }
Object result = null ; try { Method method = clazz . getMethod ( functionName , parameters ) ; method . setAccessible ( true ) ; result = method . invoke ( null , values ) ; } catch ( Exception ex ) { throw new MjdbcException ( ex ) ; } return result ;
public class DynamicReportBuilder { /** * Defines the text to show when the data source is empty . < br > * @ param text * @ param style : the style of the text * @ param showTitle : if true , the title is shown * @ param showColumnHeader : if true , the column headers are shown * @ return A Dynamic Report Builder */ public DynamicReportBuilder setWhenNoData ( String text , Style style , boolean showTitle , boolean showColumnHeader ) { } }
this . report . setWhenNoDataStyle ( style ) ; this . report . setWhenNoDataText ( text ) ; this . report . setWhenNoDataType ( DJConstants . WHEN_NO_DATA_TYPE_NO_DATA_SECTION ) ; this . report . setWhenNoDataShowColumnHeader ( showColumnHeader ) ; this . report . setWhenNoDataShowTitle ( showTitle ) ; return this ;
public class VariableNumMap { /** * Get the discrete variables in this map , ordered by variable * index . */ public final List < DiscreteVariable > getDiscreteVariables ( ) { } }
List < DiscreteVariable > discreteVars = new ArrayList < DiscreteVariable > ( ) ; for ( int i = 0 ; i < vars . length ; i ++ ) { if ( vars [ i ] instanceof DiscreteVariable ) { discreteVars . add ( ( DiscreteVariable ) vars [ i ] ) ; } } return discreteVars ;
public class CustomManualTaskActivity { /** * Creates a task instance unless the INSTANCE _ ID _ VAR attribute points * to a pre - existing instance . If the attribute is populated but the variable * value is null , the variable will be set to the newly - created instanceId . */ @ Override public void execute ( ) throws ActivityException { } }
Long instanceId = null ; // pre - existing instanceId String instanceIdSpec = getInstanceIdVariable ( ) ; if ( instanceIdSpec != null ) { Object value = getValue ( instanceIdSpec ) ; if ( value instanceof Long ) instanceId = ( Long ) value ; else if ( value != null ) instanceId = Long . parseLong ( value . toString ( ) ) ; } try { if ( instanceId == null ) { TaskInstance taskInstance = createTaskInstance ( ) ; instanceId = taskInstance . getTaskInstanceId ( ) ; if ( instanceIdSpec != null ) setValue ( instanceIdSpec , instanceId ) ; } else { // update secondary owner updateOwningTransition ( instanceId ) ; } if ( needSuspend ( ) ) { getEngine ( ) . createEventWaitInstance ( getActivityInstanceId ( ) , "TaskAction-" + getActivityInstanceId ( ) , null , true , true ) ; EventWaitInstance received = registerWaitEvents ( false , true ) ; if ( received != null ) resume ( getExternalEventInstanceDetails ( received . getMessageDocumentId ( ) ) , received . getCompletionCode ( ) ) ; } } catch ( Exception ex ) { logger . severeException ( ex . getMessage ( ) , ex ) ; throw new ActivityException ( - 1 , ex . getMessage ( ) , ex ) ; }
public class AbstractEvaluatorToPartitionStrategy { /** * Initializes the locations of the splits where we ' d like to be loaded into . * Sets all the splits to unallocated * @ param splitsPerPartition * a map containing the input splits per data partition */ private void init ( final Map < DistributedDataSetPartition , InputSplit [ ] > splitsPerPartition ) { } }
final Pair < InputSplit [ ] , DistributedDataSetPartition [ ] > splitsAndPartitions = getSplitsAndPartitions ( splitsPerPartition ) ; final InputSplit [ ] splits = splitsAndPartitions . getFirst ( ) ; final DistributedDataSetPartition [ ] partitions = splitsAndPartitions . getSecond ( ) ; Validate . isTrue ( splits . length == partitions . length ) ; for ( int splitNum = 0 ; splitNum < splits . length ; splitNum ++ ) { LOG . log ( Level . FINE , "Processing split: " + splitNum ) ; final InputSplit split = splits [ splitNum ] ; final NumberedSplit < InputSplit > numberedSplit = new NumberedSplit < > ( split , splitNum , partitions [ splitNum ] ) ; unallocatedSplits . add ( numberedSplit ) ; updateLocations ( numberedSplit ) ; } if ( LOG . isLoggable ( Level . FINE ) ) { for ( final Map . Entry < String , BlockingQueue < NumberedSplit < InputSplit > > > locSplit : locationToSplits . entrySet ( ) ) { LOG . log ( Level . FINE , locSplit . getKey ( ) + ": " + locSplit . getValue ( ) . toString ( ) ) ; } }
public class DemuxingIoHandler { /** * Deregisters a { @ link MessageHandler } that handles the sent messages of * the specified < code > type < / code > . * @ return the removed handler if successfully removed . < tt > null < / tt > otherwise . */ @ SuppressWarnings ( "unchecked" ) public < E > MessageHandler < ? super E > removeSentMessageHandler ( Class < E > type ) { } }
sentMessageHandlerCache . clear ( ) ; return ( MessageHandler < ? super E > ) sentMessageHandlers . remove ( type ) ;
public class BatchUtil { /** * Method that can be used when iterating over an array and you want to retain * only maxRetention items . * @ param i * index of element in ordered array of length * @ param maxRetention * total number of elements to retain . * @ param length * of the ordered array * @ return whether this element should be retained or not . */ public static boolean shouldRetain ( int i , int maxRetention , int length ) { } }
// Files with a zero - based index greater or equal than the retentionCutoff // should be retained . int retentionCutoff = length - maxRetention ; boolean retain = ( i >= retentionCutoff ) ? true : false ; return retain ;
public class TQRootBean { /** * Add a Text common terms expression ( document store only ) . * This automatically makes the query a document store query . */ public R textCommonTerms ( String query , TextCommonTerms options ) { } }
peekExprList ( ) . textCommonTerms ( query , options ) ; return root ;
public class HtmlInput { /** * Capture events that occur anywhere on the page . Event values will be relative to the page * ( not the rootElement ) { @ see # getRelativeX ( NativeEvent , Element ) } and * { @ see # getRelativeY ( NativeEvent , Element ) } . */ static HandlerRegistration capturePageEvent ( String name , EventHandler handler ) { } }
return addEventListener ( Document . get ( ) , name , handler , true ) ;
public class ZipFileSliceReader { /** * Get a string from the zipfile slice . * @ param off * the offset to start reading from * @ param lenBytes * the length of the string in bytes * @ return the string * @ throws IOException * if an I / O exception occurs . * @ throws InterruptedException * if the thread was interrupted . */ String getString ( final long off , final int lenBytes ) throws IOException , InterruptedException { } }
if ( off < 0 || off > zipFileSlice . len - lenBytes ) { throw new IndexOutOfBoundsException ( ) ; } final byte [ ] scratchToUse = lenBytes <= scratch . length ? scratch : new byte [ lenBytes ] ; if ( read ( off , scratchToUse , 0 , lenBytes ) < lenBytes ) { throw new EOFException ( "Unexpected EOF" ) ; } // Assume the entry names are encoded in UTF - 8 ( should be the case for all jars ; the only other // valid zipfile charset is CP437 , which is the same as ASCII for printable high - bit - clear chars ) return new String ( scratchToUse , 0 , lenBytes , StandardCharsets . UTF_8 ) ;
public class HiveMetastoreClientPool { /** * Get a { @ link HiveMetastoreClientPool } for the requested metastore URI . Useful for using the same pools across * different classes in the code base . Note that if a pool already exists for that metastore , the max number of * objects available will be unchanged , and it might be lower than requested by this method . * @ param properties { @ link Properties } used to generate the pool . * @ param metastoreURI URI of the Hive metastore . If absent , use default metastore . * @ return a { @ link HiveMetastoreClientPool } . * @ throws IOException */ public static HiveMetastoreClientPool get ( final Properties properties , final Optional < String > metastoreURI ) throws IOException { } }
synchronized ( HiveMetastoreClientPool . class ) { if ( poolCache == null ) { poolCache = createPoolCache ( properties ) ; } } try { return poolCache . get ( metastoreURI , new Callable < HiveMetastoreClientPool > ( ) { @ Override public HiveMetastoreClientPool call ( ) throws Exception { return new HiveMetastoreClientPool ( properties , metastoreURI ) ; } } ) ; } catch ( ExecutionException ee ) { throw new IOException ( "Failed to get " + HiveMetastoreClientPool . class . getSimpleName ( ) , ee . getCause ( ) ) ; }
public class OpenCmsCore { /** * Gets a string containing all keys and variations currently in the flex cache , for debug purposes . < p > * @ return a debug information string with the flex cache data */ public String getFlexCacheKeyDump ( ) { } }
if ( m_flexCache != null ) { StringBuffer buffer = new StringBuffer ( ) ; m_flexCache . dumpKeys ( buffer ) ; return buffer . toString ( ) ; } else { return null ; }
public class BigramExtractor { /** * Updates the statistics for the bigram formed from the provided left and * right token . * @ param left the left token in the bigram * @ param right the right token in the bigram */ private void processBigram ( String left , String right ) { } }
TokenStats leftStats = getStatsFor ( left ) ; TokenStats rightStats = getStatsFor ( right ) ; // mark that both appeared leftStats . count ++ ; rightStats . count ++ ; // Mark the respective positions of each leftStats . leftCount ++ ; rightStats . rightCount ++ ; // Increase the number of bigrams seen numBigramsInCorpus ++ ; // Update the bigram statistics // Map the two token ' s indices into a single long long bigram = ( ( ( long ) leftStats . index ) << 32 ) | rightStats . index ; Number curBigramCount = bigramCounts . get ( bigram ) ; int i = ( curBigramCount == null ) ? 1 : 1 + curBigramCount . intValue ( ) ; // Compact the count into the smallest numeric type that can represent // it . This hopefully results in some space savings . Number val = null ; if ( i < Byte . MAX_VALUE ) val = Byte . valueOf ( ( byte ) i ) ; else if ( i < Short . MAX_VALUE ) val = Short . valueOf ( ( short ) i ) ; else val = Integer . valueOf ( i ) ; bigramCounts . put ( bigram , val ) ;
public class InJvmContainerExecutor { /** * Extracts { @ link LocalResource } s from the { @ link Container } . */ @ SuppressWarnings ( "unchecked" ) private Set < Path > extractUserProvidedClassPathEntries ( Container container ) { } }
Map < Path , List < String > > localizedResources ; try { Field lf = container . getClass ( ) . getDeclaredField ( "localizedResources" ) ; lf . setAccessible ( true ) ; localizedResources = ( Map < Path , List < String > > ) lf . get ( container ) ; Set < Path > paths = localizedResources . keySet ( ) ; // Needed for Tez for ( Path path : paths ) { if ( path . toString ( ) . endsWith ( "tez-conf.pb" ) || path . toString ( ) . endsWith ( "tez-dag.pb" ) ) { File sourceFile = new File ( path . toUri ( ) ) ; File targetFile = new File ( System . getenv ( Environment . PWD . name ( ) ) + "/" + sourceFile . getName ( ) ) ; FileUtils . copyFile ( sourceFile , targetFile ) ; // System . out . println ( " # # # # # Copied file : " + targetFile ) ; // FileInputStream fis = new FileInputStream ( new File ( System . getenv ( Environment . PWD . name ( ) ) , targetFile . getName ( ) ) ) ; // System . out . println ( fis . available ( ) ) ; // fis . close ( ) ; // break ; } } return paths ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
class HarmonicSum { /** * Compute the harmonic sum of an integer decremented by 1. * @ param number The integer to compute the harmonic sum for . * @ return The computed harmonic sum of the integer minus one . */ public static double calculateHarmonicSum ( int number ) { } }
if ( number < 2 ) { return 1 ; } else { return ( 1.0 / ( double ) number ) + calculateHarmonicSum ( number - 1 ) ; }
public class ZooKeeperMasterModel { /** * Given a jobId , returns the N most recent events in its history in the cluster . */ @ Override public List < TaskStatusEvent > getJobHistory ( final JobId jobId ) throws JobDoesNotExistException { } }
return getJobHistory ( jobId , null ) ;
public class StreamingResponseBandwidthBenchmark { /** * Useful for triggering a subset of the benchmark in a profiler . */ public static void main ( String [ ] argv ) throws Exception { } }
StreamingResponseBandwidthBenchmark bench = new StreamingResponseBandwidthBenchmark ( ) ; bench . setup ( ) ; Thread . sleep ( 30000 ) ; bench . teardown ( ) ; System . exit ( 0 ) ;
public class Sort { /** * bucket sort algorithm * @ param arr an int array * @ param m the large - most one for all the Integers in arr */ public static void bucketSort ( int [ ] arr , int m ) { } }
int [ ] count = new int [ m ] ; int j , i = 0 ; // System . out . println ( count [ 0 ] = = 0 ? " true " : " false " ) ; for ( j = 0 ; j < arr . length ; j ++ ) { count [ arr [ j ] ] ++ ; } // loop and filter the elements for ( j = 0 ; j < m ; j ++ ) { if ( count [ j ] > 0 ) { while ( count [ j ] -- > 0 ) { arr [ i ++ ] = j ; } } }
public class NodeStack { /** * Push a node and associated state onto the stack . */ public void push ( int state , GBSNode node , String pusher ) { } }
_lastStacker = pusher ; push ( state , node ) ;
public class WebResourceCollection { /** * Gets the match for the resource name . * < pre > * To perform a URL match , * 1 . For each URL pattern determine if the resource matches it * 2 . Construct the match object * Exact match has first priority . The longest path match has second priority . The extension match has last priority . * < / pre > * @ param uriName * @ return */ public CollectionMatch performUrlMatch ( String resourceName ) { } }
CollectionMatch match = null ; String longestUrlPattern = null ; for ( String urlPattern : urlPatterns ) { if ( URLMatchingUtils . isExactMatch ( resourceName , urlPattern ) ) { return new CollectionMatch ( resourceName , MatchType . EXACT_MATCH ) ; } else if ( URLMatchingUtils . isPathNameMatch ( resourceName , urlPattern ) ) { longestUrlPattern = URLMatchingUtils . getLongestUrlPattern ( longestUrlPattern , urlPattern ) ; } else if ( URLMatchingUtils . isExtensionMatch ( resourceName , urlPattern ) ) { match = new CollectionMatch ( urlPattern , MatchType . EXTENSION_MATCH ) ; } } if ( longestUrlPattern != null ) { match = new CollectionMatch ( longestUrlPattern , MatchType . PATH_MATCH ) ; } return match ;
public class JCRAssert { /** * Asserts the equality of a property value of a node with an expected value * @ param node * the node containing the property to be verified * @ param propertyName * the property name to be verified * @ param actualValue * the actual value that should be compared to the propert node * @ throws RepositoryException */ public static void assertStringPropertyEquals ( final Node node , final String propertyName , final String actualValue ) throws RepositoryException { } }
assertTrue ( "Node " + node . getPath ( ) + " has no property " + propertyName , node . hasProperty ( propertyName ) ) ; final Property prop = node . getProperty ( propertyName ) ; assertEquals ( "Property type is not STRING " , PropertyType . STRING , prop . getType ( ) ) ; assertEquals ( actualValue , prop . getString ( ) ) ;
public class CmsScrollBar { /** * Calculates the scroll knob height . < p > * @ param outerHeight the height of the scrollable element * @ param innerHeight the height of the scroll content */ private void adjustKnobHeight ( int outerHeight , int innerHeight ) { } }
int result = ( int ) ( ( 1.0 * outerHeight * outerHeight ) / innerHeight ) ; result = result > ( outerHeight - 5 ) ? 5 : ( result < 8 ? 8 : result ) ; m_positionValueRatio = ( 1.0 * ( outerHeight - result ) ) / ( innerHeight - outerHeight ) ; m_knobHeight = result - ( 2 * SCROLL_KNOB_OFFSET ) ; m_knobHeight = m_knobHeight < SCROLL_KNOB_MIN_HEIGHT ? SCROLL_KNOB_MIN_HEIGHT : m_knobHeight ; m_knob . getStyle ( ) . setHeight ( m_knobHeight , Unit . PX ) ;
public class Rcli { /** * A method to update with a VOID * @ param pathinfo * @ param resp * @ param expected * @ return * @ throws APIException * @ throws CadiException */ public < T > Future < Void > update ( String pathinfo ) throws APIException , CadiException { } }
final int idx = pathinfo . indexOf ( '?' ) ; final String qp ; if ( idx >= 0 ) { qp = pathinfo . substring ( idx + 1 ) ; pathinfo = pathinfo . substring ( 0 , idx ) ; } else { qp = queryParams ; } EClient < CT > client = client ( ) ; client . setMethod ( PUT ) ; client . addHeader ( CONTENT_TYPE , typeString ( Void . class ) ) ; client . setQueryParams ( qp ) ; client . setFragment ( fragment ) ; client . setPathInfo ( pathinfo ) ; // client . setPayload ( new EClient . Transfer ( ) { // @ Override // public void transfer ( OutputStream os ) throws IOException , APIException { client . send ( ) ; queryParams = fragment = null ; return client . future ( null ) ;
public class CreateIndexCallable { /** * This method generates the virtual table create SQL for the specified index . * Note : Any column that contains an ' = ' will cause the statement to fail * because it triggers SQLite to expect that a parameter / value is being passed in . * @ param indexName the index name to be used when creating the SQLite virtual table * @ param columns the columns in the table * @ param indexSettings the special settings to apply to the virtual table - * ( only ' tokenize ' is current supported ) * @ return the SQL to create the SQLite virtual table */ private String createVirtualTableStatementForIndex ( String indexName , List < String > columns , List < String > indexSettings ) { } }
String tableName = String . format ( Locale . ENGLISH , "\"%s\"" , QueryImpl . tableNameForIndex ( indexName ) ) ; String cols = Misc . join ( "," , columns ) ; String settings = Misc . join ( "," , indexSettings ) ; return String . format ( "CREATE VIRTUAL TABLE %s USING FTS4 ( %s, %s )" , tableName , cols , settings ) ;
public class JoinRecordReader { /** * Emit the next set of key , value pairs as defined by the child * RecordReaders and operation associated with this composite RR . */ public boolean next ( K key , TupleWritable value ) throws IOException { } }
if ( jc . flush ( value ) ) { WritableUtils . cloneInto ( key , jc . key ( ) ) ; return true ; } jc . clear ( ) ; K iterkey = createKey ( ) ; final PriorityQueue < ComposableRecordReader < K , ? > > q = getRecordReaderQueue ( ) ; while ( ! q . isEmpty ( ) ) { fillJoinCollector ( iterkey ) ; jc . reset ( iterkey ) ; if ( jc . flush ( value ) ) { WritableUtils . cloneInto ( key , jc . key ( ) ) ; return true ; } jc . clear ( ) ; } return false ;