signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class VirtualNetworkRulesInner { /** * Updates the specified virtual network rule . * @ param resourceGroupName The name of the Azure resource group . * @ param accountName The name of the Data Lake Store account . * @ param virtualNetworkRuleName The name of the virtual network rule to update . * @ param parameters Parameters supplied to update the virtual network rule . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the VirtualNetworkRuleInner object */ public Observable < VirtualNetworkRuleInner > updateAsync ( String resourceGroupName , String accountName , String virtualNetworkRuleName , UpdateVirtualNetworkRuleParameters parameters ) { } }
return updateWithServiceResponseAsync ( resourceGroupName , accountName , virtualNetworkRuleName , parameters ) . map ( new Func1 < ServiceResponse < VirtualNetworkRuleInner > , VirtualNetworkRuleInner > ( ) { @ Override public VirtualNetworkRuleInner call ( ServiceResponse < VirtualNetworkRuleInner > response ) { return response . body ( ) ; } } ) ;
public class DefaultHttpHeaderMatcherBuilder { /** * { @ inheritDoc } */ @ Override public HttpFilterBuilder containsHeader ( final String name ) { } }
return addFilter ( new RequestFilter < HttpRequest > ( ) { @ Override public boolean matches ( HttpRequest request ) { return request . containsHeader ( name ) ; } @ Override public String toString ( ) { return String . format ( "containsHeader('%s')" , name ) ; } } ) ;
public class LsCommand { /** * Displays information for all directories and files directly under the path specified in args . * @ param path The { @ link AlluxioURI } path as the input of the command * @ param recursive Whether list the path recursively * @ param dirAsFile list the directory status as a plain file * @ param hSize print human - readable format sizes * @ param sortField sort the result by this field */ private void ls ( AlluxioURI path , boolean recursive , boolean forceLoadMetadata , boolean dirAsFile , boolean hSize , boolean pinnedOnly , String sortField , boolean reverse ) throws AlluxioException , IOException { } }
URIStatus pathStatus = mFileSystem . getStatus ( path ) ; if ( dirAsFile ) { if ( pinnedOnly && ! pathStatus . isPinned ( ) ) { return ; } printLsString ( pathStatus , hSize ) ; return ; } ListStatusPOptions . Builder optionsBuilder = ListStatusPOptions . newBuilder ( ) ; if ( forceLoadMetadata ) { optionsBuilder . setLoadMetadataType ( LoadMetadataPType . ALWAYS ) ; } optionsBuilder . setRecursive ( recursive ) ; // If list status takes too long , print the message Timer timer = new Timer ( ) ; if ( pathStatus . isFolder ( ) ) { timer . schedule ( new TimerTask ( ) { @ Override public void run ( ) { System . out . printf ( "Getting directory status of %s files or sub-directories " + "may take a while." , pathStatus . getLength ( ) ) ; } } , 10000 ) ; } List < URIStatus > statuses = mFileSystem . listStatus ( path , optionsBuilder . build ( ) ) ; timer . cancel ( ) ; List < URIStatus > sorted = sortByFieldAndOrder ( statuses , sortField , reverse ) ; for ( URIStatus status : sorted ) { if ( ! pinnedOnly || status . isPinned ( ) ) { printLsString ( status , hSize ) ; } }
public class HostKeyHelper { /** * Persists an SSH key to disk for the requested host . This effectively marks * the requested key as trusted for all future connections to the host , until * any future save attempt replaces this key . * @ param host the host the key is being saved for * @ param hostKey the key to be saved as the trusted key for this host * @ throws IOException on failure saving the key for the host */ public void saveHostKey ( Computer host , HostKey hostKey ) throws IOException { } }
XmlFile xmlHostKeyFile = new XmlFile ( getSshHostKeyFile ( host . getNode ( ) ) ) ; xmlHostKeyFile . write ( hostKey ) ; cache . put ( host , hostKey ) ;
public class ListApplicationsResult { /** * An array of application summaries . * @ param applications * An array of application summaries . */ public void setApplications ( java . util . Collection < ApplicationSummary > applications ) { } }
if ( applications == null ) { this . applications = null ; return ; } this . applications = new java . util . ArrayList < ApplicationSummary > ( applications ) ;
public class ComputerVisionImpl { /** * This interface is used for getting text operation result . The URL to this interface should be retrieved from ' Operation - Location ' field returned from Recognize Text interface . * @ param operationId Id of the text operation returned in the response of the ' Recognize Text ' * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the TextOperationResult object */ public Observable < ServiceResponse < TextOperationResult > > getTextOperationResultWithServiceResponseAsync ( String operationId ) { } }
if ( this . client . endpoint ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.endpoint() is required and cannot be null." ) ; } if ( operationId == null ) { throw new IllegalArgumentException ( "Parameter operationId is required and cannot be null." ) ; } String parameterizedHost = Joiner . on ( ", " ) . join ( "{Endpoint}" , this . client . endpoint ( ) ) ; return service . getTextOperationResult ( operationId , this . client . acceptLanguage ( ) , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < TextOperationResult > > > ( ) { @ Override public Observable < ServiceResponse < TextOperationResult > > call ( Response < ResponseBody > response ) { try { ServiceResponse < TextOperationResult > clientResponse = getTextOperationResultDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class WeeklyAutoScalingSchedule { /** * The schedule for Sunday . * @ return The schedule for Sunday . */ public java . util . Map < String , String > getSunday ( ) { } }
if ( sunday == null ) { sunday = new com . amazonaws . internal . SdkInternalMap < String , String > ( ) ; } return sunday ;
public class PutVoiceConnectorTerminationCredentialsRequest { /** * The termination SIP credentials . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setCredentials ( java . util . Collection ) } or { @ link # withCredentials ( java . util . Collection ) } if you want to * override the existing values . * @ param credentials * The termination SIP credentials . * @ return Returns a reference to this object so that method calls can be chained together . */ public PutVoiceConnectorTerminationCredentialsRequest withCredentials ( Credential ... credentials ) { } }
if ( this . credentials == null ) { setCredentials ( new java . util . ArrayList < Credential > ( credentials . length ) ) ; } for ( Credential ele : credentials ) { this . credentials . add ( ele ) ; } return this ;
public class AbstractRemoteSupport { /** * Method closeRemoteConsumer * < p > Close the remote consumers for a given remote ME * @ param remoteMEUuid * @ throws SIResourceException */ public void closeRemoteConsumers ( SIBUuid8 remoteMEId ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "closeRemoteConsumers" , remoteMEId ) ; Iterator aihs = _anycastInputHandlers . keySet ( ) . iterator ( ) ; while ( aihs . hasNext ( ) ) { String key = ( String ) aihs . next ( ) ; AnycastInputHandler aih = _anycastInputHandlers . get ( key ) ; if ( aih . getLocalisationUuid ( ) . equals ( remoteMEId ) ) closeRemoteConsumer ( key ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "closeRemoteConsumers" ) ; return ;
public class UtilAbstractAction { /** * Get a multi - valued request parameter stripped of white space . * Return null for zero length . * @ param req * @ param name name of parameter * @ return Collection < String > or null * @ throws Throwable */ protected Collection < String > getReqPars ( final HttpServletRequest req , final String name ) throws Throwable { } }
String [ ] s = req . getParameterValues ( name ) ; ArrayList < String > res = null ; if ( ( s == null ) || ( s . length == 0 ) ) { return null ; } for ( String par : s ) { par = Util . checkNull ( par ) ; if ( par != null ) { if ( res == null ) { res = new ArrayList < String > ( ) ; } res . add ( par ) ; } } return res ;
public class CmsUserDriver { /** * Updates the user additional information map . < p > * @ param dbc the current database context * @ param userId the id of the user to update * @ param additionalInfo the info to write * @ throws CmsDataAccessException if user data could not be written */ protected void internalWriteUserInfos ( CmsDbContext dbc , CmsUUID userId , Map < String , Object > additionalInfo ) throws CmsDataAccessException { } }
Lock lock = USER_INFO_LOCKS . get ( userId ) ; try { lock . lock ( ) ; // get the map of existing additional infos to compare it new additional infos Map < String , Object > existingInfos = readUserInfos ( dbc , userId ) ; // loop over all entries of the existing additional infos Iterator < Entry < String , Object > > itEntries = existingInfos . entrySet ( ) . iterator ( ) ; while ( itEntries . hasNext ( ) ) { Entry < String , Object > entry = itEntries . next ( ) ; if ( ( entry . getKey ( ) != null ) && ( entry . getValue ( ) != null ) ) { // entry does not exist in new additional infos - > delete it if ( ! additionalInfo . containsKey ( entry . getKey ( ) ) ) { dbc . setAttribute ( ATTRIBUTE_USERADDINFO , ATTRIBUTE_USERADDINFO_VALUE_DELETE ) ; writeUserInfo ( dbc , userId , entry . getKey ( ) , entry . getValue ( ) ) ; } else { Object newValue = additionalInfo . get ( entry . getKey ( ) ) ; // entry does exist but has different value - > update it if ( ( newValue != null ) && ! newValue . equals ( entry . getValue ( ) ) ) { dbc . setAttribute ( ATTRIBUTE_USERADDINFO , ATTRIBUTE_USERADDINFO_VALUE_UPDATE ) ; writeUserInfo ( dbc , userId , entry . getKey ( ) , newValue ) ; } } } } // loop over all entries of the new additional infos Iterator < Entry < String , Object > > itNewEntries = additionalInfo . entrySet ( ) . iterator ( ) ; while ( itNewEntries . hasNext ( ) ) { Entry < String , Object > entry = itNewEntries . next ( ) ; if ( ( entry . getKey ( ) != null ) && ( entry . getValue ( ) != null ) ) { // entry doews not exist in the existing additional infos - > create a new one if ( ! existingInfos . containsKey ( entry . getKey ( ) ) ) { dbc . setAttribute ( ATTRIBUTE_USERADDINFO , ATTRIBUTE_USERADDINFO_VALUE_INSERT ) ; writeUserInfo ( dbc , userId , entry . getKey ( ) , entry . getValue ( ) ) ; } } } } finally { lock . unlock ( ) ; }
public class UIViewAction { /** * < p class = " changed _ added _ 2_2 " > Enable the method invocation * specified by this component instance to return a value that * performs navigation , similar in spirit to { @ link * UICommand # broadcast } . < / p > * < div class = " changed _ added _ 2_2 " > * < p > Take no action and return immediately if any of the following * conditions are true . < / p > * < ul > * < li > < p > The response has already been marked as complete . < / p > < / li > * < li > < p > The current < code > UIViewRoot < / code > is different from the * event ' s source ' s < code > UIViewRoot < / code > . < / p > < / li > * < / ul > * < p > Save a local reference to the viewId of the current * < code > UIViewRoot < / code > . For discussion , let this reference be * < em > viewIdBeforeAction < / em > . < / p > * < p > Obtain the { @ link ActionListener } from the { @ link * javax . faces . application . Application } . Wrap the current { @ link * FacesContext } in an implementation of { @ link * javax . faces . context . FacesContextWrapper } that overrides the * { @ link FacesContext # renderResponse } method such that it takes no * action . Set the current < code > FacesContext < / code > to be the * < code > FacesContextWrapper < / code > instance . Make it so a call to * { @ link # isProcessingBroadcast } on the current FacesContext will * return < code > true < / code > . This is necessary because the { @ link * javax . faces . application . NavigationHandler } will call this method * to determine if the navigation is happening as the result of a * < code > UIViewAction < / code > . Invoke { @ link * ActionListener # processAction } . In a < code > finally < / code > block , * restore the original < code > FacesContext < / code > , make it so a call * to { @ link # isProcessingBroadcast } on the current context will * return < code > false < / code > and discard the wrapper . < / p > * < p > If the response has been marked as complete during the * invocation of < code > processAction ( ) < / code > , take no further * action and return . Otherwise , compare * < em > viewIdBeforeAction < / em > with the viewId of the * < code > UIViewRoot < / code > on the < code > FacesContext < / code > after * the invocation of < code > processAction ( ) < / code > . If the two * viewIds are the same and no more < code > UIViewAction < / code > events * have been queued by a call to { @ link # decode } , call { @ link * FacesContext # renderResponse } and return . It is possible to * detect the case where no more < code > UIViewAction < / code > events * have been queued because the number of such events queued has * been noted in the specification for { @ link # decode } . Otherwise , * execute the lifecycle on the new < code > UIViewRoot < / code > . < / p > * < / div > * @ param event { @ link FacesEvent } to be broadcast * @ throws AbortProcessingException Signal the JavaServer Faces * implementation that no further processing on the current event * should be performed * @ throws IllegalArgumentException if the implementation class * of this { @ link FacesEvent } is not supported by this component * @ throws NullPointerException if < code > event < / code > is * < code > null < / code > * @ since 2.2 */ @ Override public void broadcast ( final FacesEvent event ) throws AbortProcessingException { } }
super . broadcast ( event ) ; FacesContext context = getFacesContext ( ) ; if ( ! ( event instanceof ActionEvent ) ) { throw new IllegalArgumentException ( ) ; } // OPEN QUESTION : should we consider a navigation to the same view as a // no - op navigation ? // only proceed if the response has not been marked complete and // navigation to another view has not occurred if ( ! context . getResponseComplete ( ) && ( context . getViewRoot ( ) == getViewRootOf ( event ) ) ) { ActionListener listener = context . getApplication ( ) . getActionListener ( ) ; if ( listener != null ) { boolean hasMoreViewActionEvents = false ; UIViewRoot viewRootBefore = context . getViewRoot ( ) ; assert ( null != viewRootBefore ) ; InstrumentedFacesContext instrumentedContext = null ; try { instrumentedContext = new InstrumentedFacesContext ( context ) ; setIsProcessingUIViewActionBroadcast ( context , true ) ; // defer the call to renderResponse ( ) that happens in // ActionListener # processAction ( ActionEvent ) instrumentedContext . disableRenderResponseControl ( ) . set ( ) ; listener . processAction ( ( ActionEvent ) event ) ; hasMoreViewActionEvents = ! decrementEventCountAndReturnTrueIfZero ( context ) ; } finally { setIsProcessingUIViewActionBroadcast ( context , false ) ; if ( null != instrumentedContext ) { instrumentedContext . restore ( ) ; } } // if the response is marked complete , the story is over if ( ! context . getResponseComplete ( ) ) { UIViewRoot viewRootAfter = context . getViewRoot ( ) ; assert ( null != viewRootAfter ) ; // if the view id changed as a result of navigation , // then execute the JSF lifecycle for the new view // id String viewIdBefore = viewRootBefore . getViewId ( ) ; String viewIdAfter = viewRootAfter . getViewId ( ) ; assert ( null != viewIdBefore && null != viewIdAfter ) ; boolean viewIdsSame = viewIdBefore . equals ( viewIdAfter ) ; if ( viewIdsSame && ! hasMoreViewActionEvents ) { // apply the deferred call ( relevant when immediate is true ) context . renderResponse ( ) ; } } } }
public class MarketEquilibrium { /** * Will calculate the risk aversion factor that is the best fit for an observed pair of market portfolio * weights and equilibrium / historical excess returns . */ Scalar < ? > calculateImpliedRiskAversion ( final PrimitiveMatrix assetWeights , final PrimitiveMatrix assetReturns ) { } }
Scalar < ? > retVal = myCovariances . multiply ( assetWeights ) . solve ( assetReturns ) . toScalar ( 0 , 0 ) ; if ( retVal . isSmall ( PrimitiveMath . ONE ) ) { retVal = BigScalar . ONE ; } else if ( ! retVal . isAbsolute ( ) ) { retVal = retVal . negate ( ) ; } return retVal ;
public class CmsResourceUtil { /** * Returns the site of the current resources , * taking into account the set site mode . < p > * @ return the site path */ public String getSite ( ) { } }
String site = null ; if ( ( m_siteMode == SITE_MODE_MATCHING ) || ( m_cms == null ) ) { site = OpenCms . getSiteManager ( ) . getSiteRoot ( m_resource . getRootPath ( ) ) ; } else if ( m_siteMode == SITE_MODE_CURRENT ) { site = m_cms . getRequestContext ( ) . getSiteRoot ( ) ; } else if ( m_siteMode == SITE_MODE_ROOT ) { site = "" ; } return ( site == null ? "" : site ) ;
public class ReportDownloadResponse { /** * Writes the contents of the response to the specified File . * @ param outputFile the output file to write to * @ throws FileNotFoundException if unable to write to { @ code outputFile } * @ throws IOException if unable to read the response contents */ public void saveToFile ( String outputFile ) throws FileNotFoundException , IOException { } }
Streams . copy ( getInputStream ( ) , new BufferedOutputStream ( new FileOutputStream ( outputFile ) ) ) ;
public class CommerceAccountPersistenceImpl { /** * Returns the number of commerce accounts where userId = & # 63 ; and type = & # 63 ; . * @ param userId the user ID * @ param type the type * @ return the number of matching commerce accounts */ @ Override public int countByU_T ( long userId , int type ) { } }
FinderPath finderPath = FINDER_PATH_COUNT_BY_U_T ; Object [ ] finderArgs = new Object [ ] { userId , type } ; Long count = ( Long ) finderCache . getResult ( finderPath , finderArgs , this ) ; if ( count == null ) { StringBundler query = new StringBundler ( 3 ) ; query . append ( _SQL_COUNT_COMMERCEACCOUNT_WHERE ) ; query . append ( _FINDER_COLUMN_U_T_USERID_2 ) ; query . append ( _FINDER_COLUMN_U_T_TYPE_2 ) ; String sql = query . toString ( ) ; Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( sql ) ; QueryPos qPos = QueryPos . getInstance ( q ) ; qPos . add ( userId ) ; qPos . add ( type ) ; count = ( Long ) q . uniqueResult ( ) ; finderCache . putResult ( finderPath , finderArgs , count ) ; } catch ( Exception e ) { finderCache . removeResult ( finderPath , finderArgs ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return count . intValue ( ) ;
public class ArrayUtils { /** * Tests two int [ ] [ ] arrays for having equal contents . * @ return true iff for each i , < code > equalContents ( xs [ i ] , ys [ i ] ) < / code > is true */ public static boolean equalContents ( int [ ] [ ] xs , int [ ] [ ] ys ) { } }
if ( xs == null ) return ys == null ; if ( ys == null ) return false ; if ( xs . length != ys . length ) return false ; for ( int i = xs . length - 1 ; i >= 0 ; i -- ) { if ( ! equalContents ( xs [ i ] , ys [ i ] ) ) return false ; } return true ;
public class TypeDecoder { /** * Static array length cannot be passed as a type . */ @ SuppressWarnings ( "unchecked" ) static < T extends Type > T decodeStaticArray ( String input , int offset , TypeReference < T > typeReference , int length ) { } }
BiFunction < List < T > , String , T > function = ( elements , typeName ) -> { if ( elements . isEmpty ( ) ) { throw new UnsupportedOperationException ( "Zero length fixed array is invalid type" ) ; } else { return instantiateStaticArray ( typeReference , elements , length ) ; } } ; return decodeArrayElements ( input , offset , typeReference , length , function ) ;
public class TransformerFactoryImpl { /** * Allows the user to set specific attributes on the underlying * implementation . * @ param name The name of the attribute . * @ param value The value of the attribute ; Boolean or String = " true " | " false " * @ throws IllegalArgumentException thrown if the underlying * implementation doesn ' t recognize the attribute . */ public void setAttribute ( String name , Object value ) throws IllegalArgumentException { } }
if ( name . equals ( FEATURE_INCREMENTAL ) ) { if ( value instanceof Boolean ) { // Accept a Boolean object . . m_incremental = ( ( Boolean ) value ) . booleanValue ( ) ; } else if ( value instanceof String ) { // . . or a String object m_incremental = ( new Boolean ( ( String ) value ) ) . booleanValue ( ) ; } else { // Give a more meaningful error message throw new IllegalArgumentException ( XSLMessages . createMessage ( XSLTErrorResources . ER_BAD_VALUE , new Object [ ] { name , value } ) ) ; // name + " bad value " + value ) ; } } else if ( name . equals ( FEATURE_OPTIMIZE ) ) { if ( value instanceof Boolean ) { // Accept a Boolean object . . m_optimize = ( ( Boolean ) value ) . booleanValue ( ) ; } else if ( value instanceof String ) { // . . or a String object m_optimize = ( new Boolean ( ( String ) value ) ) . booleanValue ( ) ; } else { // Give a more meaningful error message throw new IllegalArgumentException ( XSLMessages . createMessage ( XSLTErrorResources . ER_BAD_VALUE , new Object [ ] { name , value } ) ) ; // name + " bad value " + value ) ; } } // Custom Xalan feature : annotate DTM with SAX source locator fields . // This gets used during SAX2DTM instantiation . // % REVIEW % Should the name of this field really be in XalanProperties ? // % REVIEW % I hate that it ' s a global static , but didn ' t want to change APIs yet . else if ( name . equals ( FEATURE_SOURCE_LOCATION ) ) { if ( value instanceof Boolean ) { // Accept a Boolean object . . m_source_location = ( ( Boolean ) value ) . booleanValue ( ) ; } else if ( value instanceof String ) { // . . or a String object m_source_location = ( new Boolean ( ( String ) value ) ) . booleanValue ( ) ; } else { // Give a more meaningful error message throw new IllegalArgumentException ( XSLMessages . createMessage ( XSLTErrorResources . ER_BAD_VALUE , new Object [ ] { name , value } ) ) ; // name + " bad value " + value ) ; } } else { throw new IllegalArgumentException ( XSLMessages . createMessage ( XSLTErrorResources . ER_NOT_SUPPORTED , new Object [ ] { name } ) ) ; // name + " not supported " ) ; }
public class DescribeInstancesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeInstancesRequest describeInstancesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeInstancesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeInstancesRequest . getFleetId ( ) , FLEETID_BINDING ) ; protocolMarshaller . marshall ( describeInstancesRequest . getInstanceId ( ) , INSTANCEID_BINDING ) ; protocolMarshaller . marshall ( describeInstancesRequest . getLimit ( ) , LIMIT_BINDING ) ; protocolMarshaller . marshall ( describeInstancesRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class RegularPactTask { /** * Creates all the serializers and comparators . */ protected void initInputsSerializersAndComparators ( int numInputs ) throws Exception { } }
this . inputSerializers = new TypeSerializerFactory < ? > [ numInputs ] ; this . inputComparators = this . driver . requiresComparatorOnInput ( ) ? new TypeComparator [ numInputs ] : null ; this . inputIterators = new MutableObjectIterator [ numInputs ] ; for ( int i = 0 ; i < numInputs ; i ++ ) { // - - - - - create the serializer first - - - - - final TypeSerializerFactory < ? > serializerFactory = this . config . getInputSerializer ( i , this . userCodeClassLoader ) ; this . inputSerializers [ i ] = serializerFactory ; // - - - - - create the driver ' s comparator - - - - - if ( this . inputComparators != null ) { final TypeComparatorFactory < ? > comparatorFactory = this . config . getDriverComparator ( i , this . userCodeClassLoader ) ; this . inputComparators [ i ] = comparatorFactory . createComparator ( ) ; } this . inputIterators [ i ] = createInputIterator ( this . inputReaders [ i ] , this . inputSerializers [ i ] ) ; }
public class MetricReportReporter { /** * Serializes metrics and pushes the byte arrays to Kafka . Uses the serialize * methods in { @ link MetricReportReporter } . * @ param gauges map of { @ link com . codahale . metrics . Gauge } to report and their name . * @ param counters map of { @ link com . codahale . metrics . Counter } to report and their name . * @ param histograms map of { @ link com . codahale . metrics . Histogram } to report and their name . * @ param meters map of { @ link com . codahale . metrics . Meter } to report and their name . * @ param timers map of { @ link com . codahale . metrics . Timer } to report and their name . */ @ Override protected void report ( SortedMap < String , Gauge > gauges , SortedMap < String , Counter > counters , SortedMap < String , Histogram > histograms , SortedMap < String , Meter > meters , SortedMap < String , Timer > timers , Map < String , Object > tags ) { } }
List < Metric > metrics = Lists . newArrayList ( ) ; for ( Map . Entry < String , Gauge > gauge : gauges . entrySet ( ) ) { metrics . addAll ( serializeGauge ( gauge . getKey ( ) , gauge . getValue ( ) ) ) ; } for ( Map . Entry < String , Counter > counter : counters . entrySet ( ) ) { metrics . addAll ( serializeCounter ( counter . getKey ( ) , counter . getValue ( ) ) ) ; } for ( Map . Entry < String , Histogram > histogram : histograms . entrySet ( ) ) { metrics . addAll ( serializeSnapshot ( histogram . getKey ( ) , histogram . getValue ( ) . getSnapshot ( ) ) ) ; metrics . addAll ( serializeCounter ( histogram . getKey ( ) , histogram . getValue ( ) ) ) ; } for ( Map . Entry < String , Meter > meter : meters . entrySet ( ) ) { metrics . addAll ( serializeMetered ( meter . getKey ( ) , meter . getValue ( ) ) ) ; } for ( Map . Entry < String , Timer > timer : timers . entrySet ( ) ) { metrics . addAll ( serializeSnapshot ( timer . getKey ( ) , timer . getValue ( ) . getSnapshot ( ) ) ) ; metrics . addAll ( serializeMetered ( timer . getKey ( ) , timer . getValue ( ) ) ) ; metrics . addAll ( serializeSingleValue ( timer . getKey ( ) , timer . getValue ( ) . getCount ( ) , Measurements . COUNT . getName ( ) ) ) ; } Map < String , Object > allTags = Maps . newHashMap ( ) ; allTags . putAll ( tags ) ; allTags . putAll ( this . tags ) ; Map < String , String > allTagsString = Maps . transformValues ( allTags , new Function < Object , String > ( ) { @ Nullable @ Override public String apply ( Object input ) { return input . toString ( ) ; } } ) ; MetricReport report = new MetricReport ( allTagsString , System . currentTimeMillis ( ) , metrics ) ; emitReport ( report ) ;
public class DoublesMergeImpl { /** * also used by DoublesSketch , DoublesUnionImpl and HeapDoublesSketchTest */ static void downSamplingMergeInto ( final DoublesSketch src , final UpdateDoublesSketch tgt ) { } }
final int srcK = src . getK ( ) ; final int tgtK = tgt . getK ( ) ; final long tgtN = tgt . getN ( ) ; if ( ( srcK % tgtK ) != 0 ) { throw new SketchesArgumentException ( "source.getK() must equal target.getK() * 2^(nonnegative integer)." ) ; } final int downFactor = srcK / tgtK ; checkIfPowerOf2 ( downFactor , "source.getK()/target.getK() ratio" ) ; final int lgDownFactor = Integer . numberOfTrailingZeros ( downFactor ) ; if ( src . isEmpty ( ) ) { return ; } final DoublesSketchAccessor srcSketchBuf = DoublesSketchAccessor . wrap ( src ) ; final long nFinal = tgtN + src . getN ( ) ; for ( int i = 0 ; i < srcSketchBuf . numItems ( ) ; i ++ ) { // update only the base buffer tgt . update ( srcSketchBuf . get ( i ) ) ; } final int spaceNeeded = DoublesUpdateImpl . getRequiredItemCapacity ( tgtK , nFinal ) ; final int curCombBufCap = tgt . getCombinedBufferItemCapacity ( ) ; if ( spaceNeeded > curCombBufCap ) { // copies base buffer plus current levels tgt . growCombinedBuffer ( curCombBufCap , spaceNeeded ) ; } // working scratch buffers final DoublesArrayAccessor scratch2KAcc = DoublesArrayAccessor . initialize ( 2 * tgtK ) ; final DoublesArrayAccessor downScratchKAcc = DoublesArrayAccessor . initialize ( tgtK ) ; final DoublesSketchAccessor tgtSketchBuf = DoublesSketchAccessor . wrap ( tgt , true ) ; long srcBitPattern = src . getBitPattern ( ) ; long newTgtBitPattern = tgt . getBitPattern ( ) ; for ( int srcLvl = 0 ; srcBitPattern != 0L ; srcLvl ++ , srcBitPattern >>>= 1 ) { if ( ( srcBitPattern & 1L ) > 0L ) { justZipWithStride ( srcSketchBuf . setLevel ( srcLvl ) , downScratchKAcc , tgtK , downFactor ) ; newTgtBitPattern = DoublesUpdateImpl . inPlacePropagateCarry ( srcLvl + lgDownFactor , // starting level downScratchKAcc , // optSrcKBuf , scratch2KAcc , // size2KBuf , false , // do mergeInto version tgtK , tgtSketchBuf , newTgtBitPattern ) ; tgt . putBitPattern ( newTgtBitPattern ) ; // off - heap is a no - op } } if ( tgt . isDirect ( ) && ( nFinal > 0 ) ) { final WritableMemory mem = tgt . getMemory ( ) ; mem . clearBits ( FLAGS_BYTE , ( byte ) EMPTY_FLAG_MASK ) ; } tgt . putN ( nFinal ) ; assert ( tgt . getN ( ) / ( 2L * tgtK ) ) == newTgtBitPattern ; // internal consistency check double srcMax = src . getMaxValue ( ) ; srcMax = Double . isNaN ( srcMax ) ? Double . NEGATIVE_INFINITY : srcMax ; double srcMin = src . getMinValue ( ) ; srcMin = Double . isNaN ( srcMin ) ? Double . POSITIVE_INFINITY : srcMin ; double tgtMax = tgt . getMaxValue ( ) ; tgtMax = Double . isNaN ( tgtMax ) ? Double . NEGATIVE_INFINITY : tgtMax ; double tgtMin = tgt . getMinValue ( ) ; tgtMin = Double . isNaN ( tgtMin ) ? Double . POSITIVE_INFINITY : tgtMin ; if ( srcMax > tgtMax ) { tgt . putMaxValue ( srcMax ) ; } if ( srcMin < tgtMin ) { tgt . putMinValue ( srcMin ) ; }
public class DeviceProxy { private int getTangoVersionFromZmqEventSubscriptionChange ( ) throws DevFailed { } }
try { DeviceData argIn = new DeviceData ( ) ; argIn . insert ( new String [ ] { "info" } ) ; DeviceData argOut = get_adm_dev ( ) . command_inout ( "ZmqEventSubscriptionChange" , argIn ) ; DevVarLongStringArray lsa = argOut . extractLongStringArray ( ) ; if ( lsa . lvalue . length == 0 ) return - 1 ; else return lsa . lvalue [ 0 ] ; } catch ( DevFailed e ) { if ( e . errors [ 0 ] . reason . equals ( "API_CommandNotFound" ) ) return - 1 ; else throw e ; }
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcSanitaryTerminalType ( ) { } }
if ( ifcSanitaryTerminalTypeEClass == null ) { ifcSanitaryTerminalTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 502 ) ; } return ifcSanitaryTerminalTypeEClass ;
public class PrimaveraReader { /** * Process tasks . * @ param wbs WBS task data * @ param tasks task data */ public void processTasks ( List < Row > wbs , List < Row > tasks ) { } }
ProjectProperties projectProperties = m_project . getProjectProperties ( ) ; String projectName = projectProperties . getName ( ) ; Set < Integer > uniqueIDs = new HashSet < Integer > ( ) ; Set < Task > wbsTasks = new HashSet < Task > ( ) ; // We set the project name when we read the project properties , but that ' s just // the short name . The full project name lives on the first WBS item . Rather than // querying twice , we ' ll just set it here where we have access to the WBS items . // I haven ' t changed what ' s in the project name attribute as that ' s the value // MPXJ users are used to receiving in that attribute , so we ' ll use the title // attribute instead . if ( ! wbs . isEmpty ( ) ) { projectProperties . setProjectTitle ( wbs . get ( 0 ) . getString ( "wbs_name" ) ) ; } // Read WBS entries and create tasks . // Note that the wbs list is supplied to us in the correct order . for ( Row row : wbs ) { Task task = m_project . addTask ( ) ; task . setProject ( projectName ) ; // P6 task always belongs to project task . setSummary ( true ) ; processFields ( m_wbsFields , row , task ) ; populateUserDefinedFieldValues ( "PROJWBS" , FieldTypeClass . TASK , task , task . getUniqueID ( ) ) ; uniqueIDs . add ( task . getUniqueID ( ) ) ; wbsTasks . add ( task ) ; m_eventManager . fireTaskReadEvent ( task ) ; } // Create hierarchical structure FieldType activityIDField = getActivityIDField ( m_wbsFields ) ; m_project . getChildTasks ( ) . clear ( ) ; for ( Row row : wbs ) { Task task = m_project . getTaskByUniqueID ( row . getInteger ( "wbs_id" ) ) ; Task parentTask = m_project . getTaskByUniqueID ( row . getInteger ( "parent_wbs_id" ) ) ; if ( parentTask == null ) { m_project . getChildTasks ( ) . add ( task ) ; } else { m_project . getChildTasks ( ) . remove ( task ) ; parentTask . getChildTasks ( ) . add ( task ) ; task . setWBS ( parentTask . getWBS ( ) + "." + task . getWBS ( ) ) ; if ( activityIDField != null ) { task . set ( activityIDField , task . getWBS ( ) ) ; } } } // Read Task entries and create tasks int nextID = 1 ; m_clashMap . clear ( ) ; for ( Row row : tasks ) { Task task ; Integer parentTaskID = row . getInteger ( "wbs_id" ) ; Task parentTask = m_project . getTaskByUniqueID ( parentTaskID ) ; if ( parentTask == null ) { task = m_project . addTask ( ) ; } else { task = parentTask . addTask ( ) ; } task . setProject ( projectName ) ; // P6 task always belongs to project processFields ( m_taskFields , row , task ) ; task . setMilestone ( BooleanHelper . getBoolean ( MILESTONE_MAP . get ( row . getString ( "task_type" ) ) ) ) ; // Only " Resource Dependent " activities consider resource calendars during scheduling in P6. task . setIgnoreResourceCalendar ( ! "TT_Rsrc" . equals ( row . getString ( "task_type" ) ) ) ; task . setPercentageComplete ( calculatePercentComplete ( row ) ) ; if ( m_matchPrimaveraWBS && parentTask != null ) { task . setWBS ( parentTask . getWBS ( ) ) ; } Integer uniqueID = task . getUniqueID ( ) ; // Add User Defined Fields - before we handle ID clashes populateUserDefinedFieldValues ( "TASK" , FieldTypeClass . TASK , task , uniqueID ) ; populateActivityCodes ( task ) ; if ( uniqueIDs . contains ( uniqueID ) ) { while ( uniqueIDs . contains ( Integer . valueOf ( nextID ) ) ) { ++ nextID ; } Integer newUniqueID = Integer . valueOf ( nextID ) ; m_clashMap . put ( uniqueID , newUniqueID ) ; uniqueID = newUniqueID ; task . setUniqueID ( uniqueID ) ; } uniqueIDs . add ( uniqueID ) ; Integer calId = row . getInteger ( "clndr_id" ) ; ProjectCalendar cal = m_calMap . get ( calId ) ; task . setCalendar ( cal ) ; Date startDate = row . getDate ( "act_start_date" ) == null ? row . getDate ( "restart_date" ) : row . getDate ( "act_start_date" ) ; task . setStart ( startDate ) ; Date endDate = row . getDate ( "act_end_date" ) == null ? row . getDate ( "reend_date" ) : row . getDate ( "act_end_date" ) ; task . setFinish ( endDate ) ; Duration work = Duration . add ( task . getActualWork ( ) , task . getRemainingWork ( ) , projectProperties ) ; task . setWork ( work ) ; m_eventManager . fireTaskReadEvent ( task ) ; } new ActivitySorter ( TaskField . TEXT1 , wbsTasks ) . sort ( m_project ) ; updateStructure ( ) ; updateDates ( ) ; updateWork ( ) ;
public class SpanData { /** * Returns a new immutable { @ code SpanData } . * @ param context the { @ code SpanContext } of the { @ code Span } . * @ param parentSpanId the parent { @ code SpanId } of the { @ code Span } . { @ code null } if the { @ code * Span } is a root . * @ param hasRemoteParent { @ code true } if the parent { @ code Span } is remote . { @ code null } if this * is a root span . * @ param name the name of the { @ code Span } . * @ param kind the kind of the { @ code Span } . * @ param startTimestamp the start { @ code Timestamp } of the { @ code Span } . * @ param attributes the attributes associated with the { @ code Span } . * @ param annotations the annotations associated with the { @ code Span } . * @ param messageOrNetworkEvents the message events ( or network events for backward compatibility ) * associated with the { @ code Span } . * @ param links the links associated with the { @ code Span } . * @ param childSpanCount the number of child spans that were generated while the span was active . * @ param status the { @ code Status } of the { @ code Span } . { @ code null } if the { @ code Span } is still * active . * @ param endTimestamp the end { @ code Timestamp } of the { @ code Span } . { @ code null } if the { @ code * Span } is still active . * @ return a new immutable { @ code SpanData } . * @ since 0.14 */ @ SuppressWarnings ( { } }
"deprecation" , "InconsistentOverloads" } ) public static SpanData create ( SpanContext context , @ Nullable SpanId parentSpanId , @ Nullable Boolean hasRemoteParent , String name , @ Nullable Kind kind , Timestamp startTimestamp , Attributes attributes , TimedEvents < Annotation > annotations , TimedEvents < ? extends io . opencensus . trace . BaseMessageEvent > messageOrNetworkEvents , Links links , @ Nullable Integer childSpanCount , @ Nullable Status status , @ Nullable Timestamp endTimestamp ) { Utils . checkNotNull ( messageOrNetworkEvents , "messageOrNetworkEvents" ) ; List < TimedEvent < MessageEvent > > messageEventsList = new ArrayList < TimedEvent < MessageEvent > > ( ) ; for ( TimedEvent < ? extends io . opencensus . trace . BaseMessageEvent > timedEvent : messageOrNetworkEvents . getEvents ( ) ) { io . opencensus . trace . BaseMessageEvent event = timedEvent . getEvent ( ) ; if ( event instanceof MessageEvent ) { @ SuppressWarnings ( "unchecked" ) TimedEvent < MessageEvent > timedMessageEvent = ( TimedEvent < MessageEvent > ) timedEvent ; messageEventsList . add ( timedMessageEvent ) ; } else { messageEventsList . add ( TimedEvent . < MessageEvent > create ( timedEvent . getTimestamp ( ) , BaseMessageEventUtils . asMessageEvent ( event ) ) ) ; } } TimedEvents < MessageEvent > messageEvents = TimedEvents . < MessageEvent > create ( messageEventsList , messageOrNetworkEvents . getDroppedEventsCount ( ) ) ; return new AutoValue_SpanData ( context , parentSpanId , hasRemoteParent , name , kind , startTimestamp , attributes , annotations , messageEvents , links , childSpanCount , status , endTimestamp ) ;
public class Allowed { /** * A list of policies that allowed the authentication . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setPolicies ( java . util . Collection ) } or { @ link # withPolicies ( java . util . Collection ) } if you want to override * the existing values . * @ param policies * A list of policies that allowed the authentication . * @ return Returns a reference to this object so that method calls can be chained together . */ public Allowed withPolicies ( Policy ... policies ) { } }
if ( this . policies == null ) { setPolicies ( new java . util . ArrayList < Policy > ( policies . length ) ) ; } for ( Policy ele : policies ) { this . policies . add ( ele ) ; } return this ;
public class ComponentSelector { /** * Takes from the data the selection state . * @ throws QTasteTestFailException if the component is not a { @ link CheckBox } * or a { @ link RadioButton } * or a { @ link ToggleButton } . */ @ Override protected void prepareActions ( ) throws QTasteTestFailException { } }
mSelectState = Boolean . parseBoolean ( mData [ 0 ] . toString ( ) ) ; if ( ! ( component instanceof CheckBox ) && ! ( component instanceof RadioButton ) && ! ( component instanceof ToggleButton ) ) { throw new QTasteTestFailException ( "Unsupported component." ) ; }
public class LegacyDfuImpl { /** * Writes the operation code to the characteristic . This method is SYNCHRONOUS and wait until the * { @ link android . bluetooth . BluetoothGattCallback # onCharacteristicWrite ( android . bluetooth . BluetoothGatt , android . bluetooth . BluetoothGattCharacteristic , int ) } * will be called or the device gets disconnected . * If connection state will change , or an error will occur , an exception will be thrown . * @ param characteristic the characteristic to write to . Should be the DFU CONTROL POINT . * @ param value the value to write to the characteristic . * @ throws DeviceDisconnectedException Thrown when the device will disconnect in the middle of * the transmission . * @ throws DfuException Thrown if DFU error occur . * @ throws UploadAbortedException Thrown if DFU operation was aborted by user . */ private void writeOpCode ( @ NonNull final BluetoothGattCharacteristic characteristic , @ NonNull final byte [ ] value ) throws DeviceDisconnectedException , DfuException , UploadAbortedException { } }
final boolean reset = value [ 0 ] == OP_CODE_RESET_KEY || value [ 0 ] == OP_CODE_ACTIVATE_AND_RESET_KEY ; writeOpCode ( characteristic , value , reset ) ;
public class AbstractTransitionBuilder { /** * Similar to alpha ( float ) , but wait until the transition is about to start to perform the evaluation . * @ param end * @ return self */ public T delayAlpha ( @ FloatRange ( from = 0.0 , to = 1.0 ) float end ) { } }
getDelayedProcessor ( ) . addProcess ( ALPHA , end ) ; return self ( ) ;
public class RootPaneWindowFocusedState { /** * { @ inheritDoc } */ public boolean isInState ( JComponent c ) { } }
Component parent = c ; while ( parent . getParent ( ) != null ) { if ( parent instanceof Window ) { break ; } parent = parent . getParent ( ) ; } if ( parent instanceof Window ) { return ( ( Window ) parent ) . isFocused ( ) ; } // Default to true . return true ;
public class WebFragmentDescriptorImpl { /** * Adds a new namespace * @ return the current instance of < code > WebFragmentDescriptor < / code > */ public WebFragmentDescriptor addNamespace ( String name , String value ) { } }
model . attribute ( name , value ) ; return this ;
public class FunctionKeyReader { /** * Read the saga instance key from the provided message . */ @ Override @ Nullable public KEY readKey ( final MESSAGE message , final LookupContext context ) { } }
return extractFunction . key ( message , context ) ;
public class Assignment { /** * Creates an { @ code Assignment } mapping each variable in { @ code vars } to the value * at the corresponding index of { @ code values } . { @ code vars } does not need * to be sorted in any order . This method does not copy { @ code vars } or * { @ code values } and may modify either array ; the caller should not read * or modify either of these arrays after invoking this method . * @ param vars * @ param values * @ return */ public static final Assignment fromUnsortedArrays ( int [ ] vars , Object [ ] values ) { } }
ArrayUtils . sortKeyValuePairs ( vars , new Object [ ] [ ] { values } , 0 , vars . length ) ; return fromSortedArrays ( vars , values ) ;
public class MongoDBBasicOperations { /** * Extract the value of the primary ID of the entity object * @ param entity * the entity to get primary key value of * @ return the primary key value */ public X getPrimaryID ( T entity ) { } }
if ( mappingContext == null || conversionService == null ) { fetchMappingContextAndConversionService ( ) ; } MongoPersistentEntity < ? > persistentEntity = mappingContext . getPersistentEntity ( entity . getClass ( ) ) ; MongoPersistentProperty idProperty = persistentEntity . getIdProperty ( ) ; if ( idProperty == null ) { return null ; } // X idValue = BeanWrapper . create ( entity , conversionService ) . getProperty ( idProperty , this . primaryIDClass ) ; X idValue = ( X ) this . mappingContext . getPersistentEntity ( this . entityClass ) . getPropertyAccessor ( entity ) . getProperty ( idProperty ) ; return idValue ;
public class PropertyType { /** * Uses the object ' s properties to determine if the supplied string matches * the value of this property . * @ param text the String to validate * @ return whether the text supplied is matched by the value of the property */ public boolean matches ( String text ) { } }
if ( text == null ) { return false ; } if ( this . regex ) { final Pattern rx ; if ( this . caseSensitive ) { rx = Pattern . compile ( this . value ) ; } else { rx = Pattern . compile ( this . value , Pattern . CASE_INSENSITIVE ) ; } return rx . matcher ( text ) . matches ( ) ; } else { if ( this . caseSensitive ) { return value . equals ( text ) ; } else { return value . equalsIgnoreCase ( text ) ; } }
public class ImplicitMappingBuilder { /** * Merges mappings from an existing TypeMap into the type map under construction . */ private void mergeMappings ( TypeMap < ? , ? > destinationMap ) { } }
for ( Mapping mapping : destinationMap . getMappings ( ) ) { InternalMapping internalMapping = ( InternalMapping ) mapping ; mergedMappings . add ( internalMapping . createMergedCopy ( propertyNameInfo . getSourceProperties ( ) , propertyNameInfo . getDestinationProperties ( ) ) ) ; }
public class Version { /** * Returns a new version from the given version string . * @ param version the version string * @ return the version object * @ throws IllegalArgumentException if the version string is invalid */ public static Version from ( String version ) { } }
String [ ] fields = version . split ( "[.-]" , 4 ) ; checkArgument ( fields . length >= 3 , "version number is invalid" ) ; return new Version ( parseInt ( fields [ 0 ] ) , parseInt ( fields [ 1 ] ) , parseInt ( fields [ 2 ] ) , fields . length == 4 ? fields [ 3 ] : null ) ;
public class TokenizerHelper { /** * Convert Tokenizer into serialized options , or empty map if null * @ param tokenizer a { @ link Tokenizer } used by a text index , or null * ( this will be the case for JSON indexes ) * @ return a JSON string representing these options */ public static String tokenizerToJson ( Tokenizer tokenizer ) { } }
Map < String , String > settingsMap = new HashMap < String , String > ( ) ; if ( tokenizer != null ) { settingsMap . put ( TOKENIZE , tokenizer . tokenizerName ) ; // safe to store args even if they are null settingsMap . put ( TOKENIZE_ARGS , tokenizer . tokenizerArguments ) ; } return JSONUtils . serializeAsString ( settingsMap ) ;
public class InternalXbaseParser { /** * InternalXbase . g : 258:1 : entryRuleXEqualityExpression : ruleXEqualityExpression EOF ; */ public final void entryRuleXEqualityExpression ( ) throws RecognitionException { } }
try { // InternalXbase . g : 259:1 : ( ruleXEqualityExpression EOF ) // InternalXbase . g : 260:1 : ruleXEqualityExpression EOF { if ( state . backtracking == 0 ) { before ( grammarAccess . getXEqualityExpressionRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; ruleXEqualityExpression ( ) ; state . _fsp -- ; if ( state . failed ) return ; if ( state . backtracking == 0 ) { after ( grammarAccess . getXEqualityExpressionRule ( ) ) ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return ;
public class IntegerExtensions { /** * The < code > . . < / code > operator yields an { @ link IntegerRange } . * @ param a the start of the range . * @ param b the end of the range . * @ return an { @ link IntegerRange } . Never < code > null < / code > . * @ since 2.3 */ @ Pure @ Inline ( value = "new $3($1, $2)" , imported = IntegerRange . class , statementExpression = false ) public static IntegerRange operator_upTo ( final int a , final int b ) { } }
return new IntegerRange ( a , b ) ;
public class Transform { /** * Get the narrowest possible target type . If this { @ link Transform } operation maps its { @ code TARGET _ POSITION } type * parameter as some { @ link Readable } then this will be deduced from target position type / value , else the target * position will be returned . * @ return Typed */ @ BindTypeVariable public Typed < TARGET > getTargetType ( ) { } }
final TARGET_POSITION target = getTargetPosition ( ) ; final Type targetPositionType = TypeUtils . unrollVariables ( TypeUtils . getTypeArguments ( getClass ( ) , Transform . class ) , Transform . class . getTypeParameters ( ) [ 3 ] ) ; if ( TypeUtils . isAssignable ( targetPositionType , Position . Readable . class ) ) { final Type result = narrow ( ( Position . Readable < TARGET > ) target ) ; if ( ! TypeUtils . equals ( result , target . getType ( ) ) ) { return new Typed < TARGET > ( ) { @ Override public Type getType ( ) { return result ; } } ; } } return target ;
public class AbstractXHTMLLinkTypeRenderer { /** * Default implementation for computing a link label when no label has been specified . Can be overwritten by * implementations to provide a different algorithm . * @ param reference the reference of the link for which to compute the label * @ return the computed label */ protected String computeLabel ( ResourceReference reference ) { } }
// Look for a component implementing URILabelGenerator with a role hint matching the link scheme . // If not found then use the full reference as the label . // If there ' s no scheme separator then use the full reference as the label . Note that this can happen // when we ' re not in wiki mode ( since all links are considered URIs when not in wiki mode ) . String label ; try { URILabelGenerator uriLabelGenerator = this . componentManager . getInstance ( URILabelGenerator . class , reference . getType ( ) . getScheme ( ) ) ; label = uriLabelGenerator . generateLabel ( reference ) ; } catch ( ComponentLookupException e ) { label = reference . getReference ( ) ; } return label ;
public class P2sVpnServerConfigurationsInner { /** * Retrieves the details of a P2SVpnServerConfiguration . * @ param resourceGroupName The resource group name of the P2SVpnServerConfiguration . * @ param virtualWanName The name of the VirtualWan . * @ param p2SVpnServerConfigurationName The name of the P2SVpnServerConfiguration . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < P2SVpnServerConfigurationInner > getAsync ( String resourceGroupName , String virtualWanName , String p2SVpnServerConfigurationName , final ServiceCallback < P2SVpnServerConfigurationInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getWithServiceResponseAsync ( resourceGroupName , virtualWanName , p2SVpnServerConfigurationName ) , serviceCallback ) ;
public class PropertyData { /** * Gets the raw type of the property without generics . * { @ code Foo < String > } will return { @ code Foo } . * @ return the raw type */ public String getTypeRaw ( ) { } }
int pos = type . indexOf ( "<" ) ; return ( pos < 0 ? type : type . substring ( 0 , pos ) ) ;
public class PoolManager { /** * Get a pool ' s min share preemption timeout , in milliseconds . This is the * time after which jobs in the pool may kill other pools ' tasks if they * are below their min share . */ public long getMinSharePreemptionTimeout ( String pool ) { } }
if ( minSharePreemptionTimeouts . containsKey ( pool ) ) { return minSharePreemptionTimeouts . get ( pool ) ; } else { return defaultMinSharePreemptionTimeout ; }
public class PooledWsByteBufferImpl { /** * @ seecom . ibm . ws . bytebuffer . internal . WsByteBufferImpl # readExternal ( java . io . * ObjectInput ) */ @ Override public void readExternal ( ObjectInput s ) throws IOException , ClassNotFoundException { } }
super . readExternal ( s ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( this , tc , "Deserializing " + this ) ; }
public class MainFrame { /** * < / editor - fold > / / GEN - END : initComponents */ private void settingsLoadDefaultsMenuItemActionPerformed ( java . awt . event . ActionEvent evt ) { } }
// GEN - FIRST : event _ settingsLoadDefaultsMenuItemActionPerformed if ( evt != null ) { int response = JOptionPane . showConfirmDialog ( this , "Load defaults settings and lose current ones?" , "Confirm Reset" , JOptionPane . OK_CANCEL_OPTION , JOptionPane . QUESTION_MESSAGE ) ; if ( response == JOptionPane . CANCEL_OPTION ) { return ; } } try { loadSettings ( null ) ; } catch ( Exception ex ) { Logger . getLogger ( MainFrame . class . getName ( ) ) . log ( Level . SEVERE , null , ex ) ; }
public class TableColumnStyle { /** * Add this style to a styles container * @ param stylesContainer the styles container */ public void addToContentStyles ( final StylesContainer stylesContainer ) { } }
stylesContainer . addContentStyle ( this ) ; if ( this . defaultCellStyle != null ) { stylesContainer . addContentStyle ( this . defaultCellStyle ) ; }
public class Positions { /** * Positions the owner to the right inside its parent . < br > * Respects the parent padding . * @ param < T > the generic type * @ param < U > the generic type * @ param spacing the spacing * @ return the int supplier */ public static < T extends ISized & IChild < U > , U extends ISized > IntSupplier rightAligned ( T owner , int spacing ) { } }
return ( ) -> { U parent = owner . getParent ( ) ; if ( parent == null ) return 0 ; return parent . size ( ) . width ( ) - owner . size ( ) . width ( ) - Padding . of ( parent ) . right ( ) - spacing ; } ;
public class CommerceTierPriceEntryPersistenceImpl { /** * Returns the first commerce tier price entry in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; . * @ param uuid the uuid * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce tier price entry * @ throws NoSuchTierPriceEntryException if a matching commerce tier price entry could not be found */ @ Override public CommerceTierPriceEntry findByUuid_C_First ( String uuid , long companyId , OrderByComparator < CommerceTierPriceEntry > orderByComparator ) throws NoSuchTierPriceEntryException { } }
CommerceTierPriceEntry commerceTierPriceEntry = fetchByUuid_C_First ( uuid , companyId , orderByComparator ) ; if ( commerceTierPriceEntry != null ) { return commerceTierPriceEntry ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( ", companyId=" ) ; msg . append ( companyId ) ; msg . append ( "}" ) ; throw new NoSuchTierPriceEntryException ( msg . toString ( ) ) ;
public class CascadingUtils { /** * Mark the output dir of the job for which the context is passed . */ public static void markSuccessfulOutputDir ( Path path , JobConf conf ) throws IOException { } }
FileSystem fs = FileSystem . get ( conf ) ; // create a file in the folder to mark it if ( fs . exists ( path ) ) { Path filePath = new Path ( path , VersionedStore . HADOOP_SUCCESS_FLAG ) ; fs . create ( filePath ) . close ( ) ; }
public class CmsSearchManager { /** * Adds a document type . < p > * @ param documentType a document type */ public void addDocumentTypeConfig ( CmsSearchDocumentType documentType ) { } }
m_documentTypeConfigs . add ( documentType ) ; if ( CmsLog . INIT . isInfoEnabled ( ) ) { CmsLog . INIT . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_SEARCH_DOC_TYPES_2 , documentType . getName ( ) , documentType . getClassName ( ) ) ) ; }
public class DoubleFunctionBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static < R > DoubleFunction < R > dblFunctionFrom ( Consumer < DoubleFunctionBuilder < R > > buildingFunction ) { } }
DoubleFunctionBuilder builder = new DoubleFunctionBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class DoubleHistogram { /** * Encode this histogram in compressed form into a byte array * @ param targetBuffer The buffer to encode into * @ param compressionLevel Compression level ( for java . util . zip . Deflater ) . * @ return The number of bytes written to the buffer */ @ Override synchronized public int encodeIntoCompressedByteBuffer ( final ByteBuffer targetBuffer , final int compressionLevel ) { } }
targetBuffer . putInt ( DHIST_compressedEncodingCookie ) ; targetBuffer . putInt ( getNumberOfSignificantValueDigits ( ) ) ; targetBuffer . putLong ( configuredHighestToLowestValueRatio ) ; return integerValuesHistogram . encodeIntoCompressedByteBuffer ( targetBuffer , compressionLevel ) + 16 ;
public class AmazonIdentityManagementClient { /** * Removes the specified user from the specified group . * @ param removeUserFromGroupRequest * @ return Result of the RemoveUserFromGroup operation returned by the service . * @ throws NoSuchEntityException * The request was rejected because it referenced a resource entity that does not exist . The error message * describes the resource . * @ throws LimitExceededException * The request was rejected because it attempted to create resources beyond the current AWS account limits . * The error message describes the limit exceeded . * @ throws ServiceFailureException * The request processing has failed because of an unknown error , exception or failure . * @ sample AmazonIdentityManagement . RemoveUserFromGroup * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / iam - 2010-05-08 / RemoveUserFromGroup " target = " _ top " > AWS API * Documentation < / a > */ @ Override public RemoveUserFromGroupResult removeUserFromGroup ( RemoveUserFromGroupRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeRemoveUserFromGroup ( request ) ;
public class ImageUtils { /** * The total difference between two images calculated as the sum of the difference in RGB values of each pixel * the images MUST be the same dimensions . * @ since 1.2 * @ param img1 the first image to be compared * @ param img2 the second image to be compared * @ return the difference between the two images */ public static long totalImageDiff ( BufferedImage img1 , BufferedImage img2 ) { } }
int width = img1 . getWidth ( ) ; int height = img1 . getHeight ( ) ; if ( ( width != img2 . getWidth ( ) ) || ( height != img2 . getHeight ( ) ) ) { throw new IllegalArgumentException ( "Image dimensions do not match" ) ; } long diff = 0 ; for ( int x = 0 ; x < width ; x ++ ) { for ( int y = 0 ; y < height ; y ++ ) { int rgb1 = img1 . getRGB ( x , y ) ; int rgb2 = img2 . getRGB ( x , y ) ; int a1 = ColorUtils . getAlpha ( rgb1 ) ; int r1 = ColorUtils . getRed ( rgb1 ) ; int g1 = ColorUtils . getGreen ( rgb1 ) ; int b1 = ColorUtils . getBlue ( rgb1 ) ; int a2 = ColorUtils . getAlpha ( rgb2 ) ; int r2 = ColorUtils . getRed ( rgb2 ) ; int g2 = ColorUtils . getGreen ( rgb2 ) ; int b2 = ColorUtils . getBlue ( rgb2 ) ; diff += Math . abs ( a1 - a2 ) ; diff += Math . abs ( r1 - r2 ) ; diff += Math . abs ( g1 - g2 ) ; diff += Math . abs ( b1 - b2 ) ; } } return diff ;
public class ThreadPool { /** * Schedules an executor task . */ public boolean scheduleExecutorTask ( Runnable task ) { } }
ClassLoader loader = Thread . currentThread ( ) . getContextClassLoader ( ) ; synchronized ( _executorLock ) { _executorTaskCount ++ ; if ( _executorTaskCount <= _executorTaskMax || _executorTaskMax < 0 ) { boolean isPriority = false ; boolean isQueue = true ; boolean isWake = true ; return scheduleImpl ( task , loader , MAX_EXPIRE , isPriority , isQueue , isWake ) ; } else { ExecutorQueueItem item = new ExecutorQueueItem ( task , loader ) ; if ( _executorQueueTail != null ) _executorQueueTail . _next = item ; else _executorQueueHead = item ; _executorQueueTail = item ; return false ; } }
public class Choice3 { /** * { @ inheritDoc } */ @ Override public < D > Choice3 < A , B , D > pure ( D d ) { } }
return c ( d ) ;
public class NuunCore { /** * Creates a kernel with the given configuration . * @ param configuration the kernel configuration * @ return the kernel */ public static Kernel createKernel ( KernelConfiguration configuration ) { } }
KernelCoreFactory factory = new KernelCoreFactory ( ) ; return factory . create ( configuration ) ;
public class SyncListPermissionUpdater { /** * Add the requested post parameters to the Request . * @ param request Request to add post params to */ private void addPostParams ( final Request request ) { } }
if ( read != null ) { request . addPostParam ( "Read" , read . toString ( ) ) ; } if ( write != null ) { request . addPostParam ( "Write" , write . toString ( ) ) ; } if ( manage != null ) { request . addPostParam ( "Manage" , manage . toString ( ) ) ; }
public class ClassUtils { /** * Returns an instance of bean ' s annotation * @ param beanClass class to be searched for * @ param annotationClass type of an annotation * @ param < T > type of an annotation * @ return an instance of an annotation */ public static < T extends Annotation > Optional < T > getAnnotation ( Class < ? > beanClass , Class < T > annotationClass ) { } }
Class < ? > currentClass = beanClass ; while ( currentClass != null && currentClass != Object . class ) { if ( currentClass . isAnnotationPresent ( annotationClass ) ) { return Optional . of ( currentClass . getAnnotation ( annotationClass ) ) ; } currentClass = currentClass . getSuperclass ( ) ; } return Optional . empty ( ) ;
public class ReLookup { public T put ( String regularExpKey , T value ) { } }
return this . lookupMap . put ( regularExpKey , value ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getFNPRG ( ) { } }
if ( fnprgEClass == null ) { fnprgEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 430 ) ; } return fnprgEClass ;
public class Reflector { /** * Returns the number of elements in the array or list object ; * @ return * the number of elements in the array or list object . * @ throws ReflectorException * if the object is not a list or an array . */ public int getArrayLength ( ) throws ReflectorException { } }
if ( object == null ) { logger . error ( "object is null: did you specify it using the 'inspect()' method?" ) ; throw new ReflectorException ( "object is null: did you specify it using the 'inspect()' method?" ) ; } int length = 0 ; if ( object . getClass ( ) . isArray ( ) ) { length = Array . getLength ( object ) ; } else if ( object instanceof List < ? > ) { length = ( ( List < ? > ) object ) . size ( ) ; } else { throw new ReflectorException ( "object is not an array or a list" ) ; } return length ;
public class CmsUriSplitter { /** * Checks if the given URI is well formed . < p > * @ param uri the URI to check * @ return < code > true < / code > if the given URI is well formed */ @ SuppressWarnings ( "unused" ) public static boolean isValidUri ( String uri ) { } }
boolean result = false ; try { new URI ( uri ) ; result = true ; } catch ( Exception e ) { // nothing to do } return result ;
public class Gradient { /** * Generate the image used for texturing the gradient across shapes */ public void genImage ( ) { } }
if ( image == null ) { ImageBuffer buffer = new ImageBuffer ( 128 , 16 ) ; for ( int i = 0 ; i < 128 ; i ++ ) { Color col = getColorAt ( i / 128.0f ) ; for ( int j = 0 ; j < 16 ; j ++ ) { buffer . setRGBA ( i , j , col . getRedByte ( ) , col . getGreenByte ( ) , col . getBlueByte ( ) , col . getAlphaByte ( ) ) ; } } image = buffer . getImage ( ) ; }
public class XMLGISElementUtil { /** * Write the XML description for the given map element . * @ param primitive is the map element to output . * @ param builder is the tool to create XML nodes . * @ param resources is the tool that permits to gather the resources . * @ return the XML node of the map element . * @ throws IOException in case of error . */ public static Element writeMapElement ( MapElement primitive , XMLBuilder builder , XMLResources resources ) throws IOException { } }
return writeMapElement ( primitive , null , builder , resources ) ;
public class ServerStats { /** * Creates new { @ link ServerStats } from specified parameters . * @ param lbLatencyNs Represents request processing latency observed on Load Balancer . It is * measured in nanoseconds . Must not be less than 0 . Value of 0 represents that the latency is * not measured . * @ param serviceLatencyNs Represents request processing latency observed on Server . It is * measured in nanoseconds . Must not be less than 0 . Value of 0 represents that the latency is * not measured . * @ param traceOption Represents set of bits to indicate properties of trace . Currently it used * only the least signification bit to represent sampling of the request on the server side . * Other bits are ignored . * @ return new { @ code ServerStats } with specified fields . * @ throws IllegalArgumentException if the arguments are out of range . * @ since 0.16 */ public static ServerStats create ( long lbLatencyNs , long serviceLatencyNs , byte traceOption ) { } }
if ( lbLatencyNs < 0 ) { throw new IllegalArgumentException ( "'getLbLatencyNs' is less than zero: " + lbLatencyNs ) ; } if ( serviceLatencyNs < 0 ) { throw new IllegalArgumentException ( "'getServiceLatencyNs' is less than zero: " + serviceLatencyNs ) ; } return new AutoValue_ServerStats ( lbLatencyNs , serviceLatencyNs , traceOption ) ;
public class CompositeByteBuf { /** * Add the given { @ link ByteBuf } s on the specific index * Be aware that this method does not increase the { @ code writerIndex } of the { @ link CompositeByteBuf } . * If you need to have it increased you need to handle it by your own . * { @ link ByteBuf # release ( ) } ownership of all { @ link ByteBuf } objects in { @ code buffers } is transferred to this * { @ link CompositeByteBuf } . * @ param cIndex the index on which the { @ link ByteBuf } will be added . * @ param buffers the { @ link ByteBuf } s to add . { @ link ByteBuf # release ( ) } ownership of all * { @ link ByteBuf # release ( ) } ownership of all { @ link ByteBuf } objects is transferred to this * { @ link CompositeByteBuf } . */ public CompositeByteBuf addComponents ( int cIndex , Iterable < ByteBuf > buffers ) { } }
return addComponents ( false , cIndex , buffers ) ;
public class LargeRecordHandler { @ SuppressWarnings ( "unchecked" ) public long addRecord ( T record ) throws IOException { } }
if ( recordsOutFile == null ) { if ( closed ) { throw new IllegalStateException ( "The large record handler has been closed." ) ; } if ( recordsReader != null ) { throw new IllegalStateException ( "The handler has already switched to sorting." ) ; } LOG . debug ( "Initializing the large record spilling..." ) ; // initialize the utilities { final TypeComparator < ? > [ ] keyComps = comparator . getFlatComparators ( ) ; numKeyFields = keyComps . length ; Object [ ] keyHolder = new Object [ numKeyFields ] ; comparator . extractKeys ( record , keyHolder , 0 ) ; TypeSerializer < ? > [ ] keySers = new TypeSerializer < ? > [ numKeyFields ] ; TypeSerializer < ? > [ ] tupleSers = new TypeSerializer < ? > [ numKeyFields + 1 ] ; int [ ] keyPos = new int [ numKeyFields ] ; for ( int i = 0 ; i < numKeyFields ; i ++ ) { keyPos [ i ] = i ; keySers [ i ] = createSerializer ( keyHolder [ i ] , i ) ; tupleSers [ i ] = keySers [ i ] ; } // add the long serializer for the offset tupleSers [ numKeyFields ] = LongSerializer . INSTANCE ; keySerializer = new TupleSerializer < Tuple > ( ( Class < Tuple > ) Tuple . getTupleClass ( numKeyFields + 1 ) , tupleSers ) ; keyComparator = new TupleComparator < Tuple > ( keyPos , keyComps , keySers ) ; keySerializerFactory = new RuntimeSerializerFactory < Tuple > ( keySerializer , keySerializer . getTupleClass ( ) ) ; keyTuple = keySerializer . createInstance ( ) ; } // initialize the spilling final int totalNumSegments = memory . size ( ) ; final int segmentsForKeys = ( totalNumSegments >= 2 * MAX_SEGMENTS_FOR_KEY_SPILLING ) ? MAX_SEGMENTS_FOR_KEY_SPILLING : Math . max ( MIN_SEGMENTS_FOR_KEY_SPILLING , totalNumSegments - MAX_SEGMENTS_FOR_KEY_SPILLING ) ; List < MemorySegment > recordsMemory = new ArrayList < MemorySegment > ( ) ; List < MemorySegment > keysMemory = new ArrayList < MemorySegment > ( ) ; for ( int i = 0 ; i < segmentsForKeys ; i ++ ) { keysMemory . add ( memory . get ( i ) ) ; } for ( int i = segmentsForKeys ; i < totalNumSegments ; i ++ ) { recordsMemory . add ( memory . get ( i ) ) ; } recordsChannel = ioManager . createChannel ( ) ; keysChannel = ioManager . createChannel ( ) ; recordsOutFile = new FileChannelOutputView ( ioManager . createBlockChannelWriter ( recordsChannel ) , memManager , recordsMemory , memManager . getPageSize ( ) ) ; keysOutFile = new FileChannelOutputView ( ioManager . createBlockChannelWriter ( keysChannel ) , memManager , keysMemory , memManager . getPageSize ( ) ) ; } final long offset = recordsOutFile . getWriteOffset ( ) ; if ( offset < 0 ) { throw new RuntimeException ( "wrong offset" ) ; } Object [ ] keyHolder = new Object [ numKeyFields ] ; comparator . extractKeys ( record , keyHolder , 0 ) ; for ( int i = 0 ; i < numKeyFields ; i ++ ) { keyTuple . setField ( keyHolder [ i ] , i ) ; } keyTuple . setField ( offset , numKeyFields ) ; keySerializer . serialize ( keyTuple , keysOutFile ) ; serializer . serialize ( record , recordsOutFile ) ; recordCounter ++ ; return offset ;
public class AbstractWALDAO { /** * Call this method inside the constructor to read the file contents directly . * This method is write locking internally . This method performs WAL file * reading upon startup both after init as well as after read ! * @ throws DAOException * in case initialization or reading failed ! */ protected final void initialRead ( ) throws DAOException { } }
File aFile = null ; final String sFilename = m_aFilenameProvider . get ( ) ; if ( sFilename == null ) { // required for testing if ( ! isSilentMode ( ) ) if ( LOGGER . isWarnEnabled ( ) ) LOGGER . warn ( "This DAO of class " + getClass ( ) . getName ( ) + " will not be able to read from a file" ) ; // do not return - run initialization anyway } else { // Check consistency aFile = getSafeFile ( sFilename , EMode . READ ) ; } final boolean bIsInitialization = aFile == null || ! aFile . exists ( ) ; final File aFinalFile = aFile ; m_aRWLock . writeLock ( ) . lock ( ) ; try { m_bCanWriteWAL = false ; IMicroDocument aDoc = null ; try { ESuccess eWriteSuccess = ESuccess . SUCCESS ; if ( bIsInitialization ) { // initial setup for non - existing file if ( isDebugLogging ( ) ) if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( "Trying to initialize WAL DAO" + ( aFinalFile == null ? "" : " XML file '" + aFinalFile . getAbsolutePath ( ) + "'" ) ) ; beginWithoutAutoSave ( ) ; try { m_aStatsCounterInitTotal . increment ( ) ; final StopWatch aSW = StopWatch . createdStarted ( ) ; if ( onInit ( ) . isChanged ( ) ) if ( aFinalFile != null ) eWriteSuccess = _writeToFile ( ) ; m_aStatsCounterInitTimer . addTime ( aSW . stopAndGetMillis ( ) ) ; m_aStatsCounterInitSuccess . increment ( ) ; m_nInitCount ++ ; m_aLastInitDT = PDTFactory . getCurrentLocalDateTime ( ) ; } finally { endWithoutAutoSave ( ) ; // reset any pending changes , because the initialization should // be read - only . If the implementing class changed something , // the return value of onInit ( ) is what counts internalSetPendingChanges ( false ) ; } } else { // Read existing file ( aFinalFile must be set ) if ( isDebugLogging ( ) ) if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( "Trying to read WAL DAO XML file '" + aFinalFile . getAbsolutePath ( ) + "'" ) ; m_aStatsCounterReadTotal . increment ( ) ; aDoc = MicroReader . readMicroXML ( aFinalFile ) ; if ( aDoc == null ) { if ( LOGGER . isErrorEnabled ( ) ) LOGGER . error ( "Failed to read DAO XML document from file '" + aFinalFile . getAbsolutePath ( ) + "'" ) ; } else { // Valid XML - start interpreting beginWithoutAutoSave ( ) ; try { final StopWatch aSW = StopWatch . createdStarted ( ) ; if ( onRead ( aDoc ) . isChanged ( ) ) eWriteSuccess = _writeToFile ( ) ; m_aStatsCounterReadTimer . addTime ( aSW . stopAndGetMillis ( ) ) ; m_aStatsCounterReadSuccess . increment ( ) ; m_nReadCount ++ ; m_aLastReadDT = PDTFactory . getCurrentLocalDateTime ( ) ; } finally { endWithoutAutoSave ( ) ; // reset any pending changes , because the initialization should // be read - only . If the implementing class changed something , // the return value of onRead ( ) is what counts internalSetPendingChanges ( false ) ; } } } // Check if writing was successful on any of the 2 branches if ( eWriteSuccess . isSuccess ( ) ) { // Reset any pending changes , since the changes were already saved internalSetPendingChanges ( false ) ; } else { // There is something wrong if ( LOGGER . isErrorEnabled ( ) ) LOGGER . error ( "File '" + aFinalFile . getAbsolutePath ( ) + "' has pending changes after initialRead!" ) ; } } catch ( final Exception ex ) { triggerExceptionHandlersRead ( ex , bIsInitialization , aFinalFile ) ; throw new DAOException ( "Error " + ( bIsInitialization ? "initializing" : "reading" ) + ( aFinalFile == null ? "in-memory" : " the file '" + aFinalFile . getAbsolutePath ( ) + "'" ) , ex ) ; } // Trigger after read before WAL if ( aDoc != null ) onBetweenReadAndWAL ( aDoc ) ; // Check if there is any WAL file to recover final String sWALFilename = _getWALFilename ( ) ; final File aWALFile = sWALFilename == null ? null : m_aIO . getFile ( sWALFilename ) ; if ( aWALFile != null && aWALFile . exists ( ) ) { if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( "Trying to recover from WAL file " + aWALFile . getAbsolutePath ( ) ) ; boolean bPerformedAtLeastOnRecovery = false ; boolean bRecoveryContainedErrors = false ; // Avoid writing the recovery actions to the WAL file again : ) try ( final DataInputStream aOIS = new DataInputStream ( FileHelper . getInputStream ( aWALFile ) ) ) { while ( true ) { // Read action type String sActionTypeID ; try { sActionTypeID = StreamHelper . readSafeUTF ( aOIS ) ; } catch ( final EOFException ex ) { // End of file break ; } final EDAOActionType eActionType = EDAOActionType . getFromIDOrThrow ( sActionTypeID ) ; // Read number of elements final int nElements = aOIS . readInt ( ) ; if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( "Trying to recover " + nElements + " " + eActionType + " actions from WAL file" ) ; // Read all elements for ( int i = 0 ; i < nElements ; ++ i ) { final String sElement = StreamHelper . readSafeUTF ( aOIS ) ; final DATATYPE aElement = convertWALStringToNative ( sElement ) ; if ( aElement == null ) { // Cannot recover , because conversion fails bRecoveryContainedErrors = true ; onRecoveryErrorConvertToNative ( eActionType , i , sElement ) ; continue ; } if ( isDebugLogging ( ) ) if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "Trying to recover object [" + i + "] with " + sElement . length ( ) + " chars" ) ; switch ( eActionType ) { case CREATE : try { onRecoveryCreate ( aElement ) ; bPerformedAtLeastOnRecovery = true ; if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( "[WAL] wal-recovery create " + aElement ) ; } catch ( final RuntimeException ex ) { if ( LOGGER . isErrorEnabled ( ) ) LOGGER . error ( "[WAL] wal-recovery create " + aElement + " - " + ex . getClass ( ) . getName ( ) + ": " + ex . getMessage ( ) ) ; throw ex ; } break ; case UPDATE : try { onRecoveryUpdate ( aElement ) ; bPerformedAtLeastOnRecovery = true ; if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( "[WAL] wal-recovery update " + aElement ) ; break ; } catch ( final RuntimeException ex ) { if ( LOGGER . isErrorEnabled ( ) ) LOGGER . error ( "[WAL] wal-recovery update " + aElement + " - " + ex . getClass ( ) . getName ( ) + ": " + ex . getMessage ( ) ) ; throw ex ; } case DELETE : try { onRecoveryDelete ( aElement ) ; bPerformedAtLeastOnRecovery = true ; if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( "[WAL] wal-recovery delete " + aElement ) ; break ; } catch ( final RuntimeException ex ) { if ( LOGGER . isErrorEnabled ( ) ) LOGGER . error ( "[WAL] wal-recovery delete " + aElement + " - " + ex . getClass ( ) . getName ( ) + ": " + ex . getMessage ( ) ) ; throw ex ; } default : throw new IllegalStateException ( "Unsupported action type provided: " + eActionType ) ; } } } if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( "Successfully finished recovery from WAL file " + aWALFile . getAbsolutePath ( ) ) ; } catch ( final IOException | RuntimeException ex ) { if ( LOGGER . isErrorEnabled ( ) ) LOGGER . error ( "Failed to recover from WAL file '" + aWALFile . getAbsolutePath ( ) + "'. Technical details: " + ex . getClass ( ) . getName ( ) + ": " + ex . getMessage ( ) ) ; triggerExceptionHandlersRead ( ex , false , aWALFile ) ; throw new DAOException ( "Error the WAL file '" + aWALFile . getAbsolutePath ( ) + "'" , ex ) ; } // Finished recovery successfully // Perform the remaining actions AFTER the WAL input stream was // closed ! if ( bPerformedAtLeastOnRecovery ) { // Write the file without using WAL _writeToFileAndResetPendingChanges ( "onRecovery" ) ; } // Finally maintain or delete the WAL file , as the recovery has // finished if ( bRecoveryContainedErrors ) _maintainWALFileAfterProcessing ( sWALFilename ) ; else _deleteWALFileAfterProcessing ( sWALFilename ) ; } } finally { // Now a WAL file can be written again m_bCanWriteWAL = true ; m_aRWLock . writeLock ( ) . unlock ( ) ; }
public class ElemTemplateElement { /** * Get the previous sibling ( as a Node ) or return null . * Note that this may be expensive if the parent has many kids ; * we accept that price in exchange for avoiding the prev pointer * TODO : If we were sure parents and sibs are always ElemTemplateElements , * we could hit the fields directly rather than thru accessors . * @ return This node ' s previous sibling or null */ public ElemTemplateElement getPreviousSiblingElem ( ) { } }
ElemTemplateElement walker = getParentNodeElem ( ) ; ElemTemplateElement prev = null ; if ( walker != null ) for ( walker = walker . getFirstChildElem ( ) ; walker != null ; prev = walker , walker = walker . getNextSiblingElem ( ) ) { if ( walker == this ) return prev ; } return null ;
public class ApiOvhMe { /** * Add a TOTP access restriction * REST : POST / me / accessRestriction / totp */ public OvhTOTPSecret accessRestriction_totp_POST ( ) throws IOException { } }
String qPath = "/me/accessRestriction/totp" ; StringBuilder sb = path ( qPath ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhTOTPSecret . class ) ;
public class CustomUserRegistryWrapper { /** * { @ inheritDoc } */ @ Override public SearchResult getUsers ( String pattern , int limit ) throws RegistryException { } }
try { Result result = customUserRegistry . getUsers ( pattern , limit ) ; return new SearchResult ( result . getList ( ) , result . hasMore ( ) ) ; } catch ( Exception e ) { throw new RegistryException ( e . getMessage ( ) , e ) ; }
public class AuthorizationPrincipalImpl { /** * Returns the < code > IPermissions < / code > for this < code > IAuthorizationPrincipal < / code > for the * specified < code > owner < / code > , < code > activity < / code > and < code > target < / code > . Null parameters * are ignored , so < code > getPermissions ( null , null , null ) < / code > should retrieve all < code > * IPermissions < / code > for an < code > IAuthorizationPrincipal < / code > . * @ return org . apereo . portal . security . IPermission [ ] * @ param owner String * @ param activity String * @ param target String * @ exception AuthorizationException indicates authorization information could not be retrieved . */ @ Override public IPermission [ ] getPermissions ( String owner , String activity , String target ) throws AuthorizationException { } }
return getAuthorizationService ( ) . getPermissionsForPrincipal ( this , owner , activity , target ) ;
public class AbcGrammar { /** * note - length : : = ( * DIGIT [ " / " * DIGIT ] ) / 1 * " / " */ Rule NoteLength ( ) { } }
return Sequence ( ZeroOrMoreS ( DIGIT ( ) ) , ZeroOrMoreS ( '/' ) , ZeroOrMoreS ( DIGIT ( ) ) ) . label ( NoteLength ) . suppressSubnodes ( ) ;
public class CmsAccountsApp { /** * Parses a given state string to state bean . < p > * @ param state to be read * @ param baseOU baseOu * @ return CmsStateBean */ public CmsStateBean parseState ( String state , String baseOU ) { } }
String path = baseOU ; String filter = "" ; I_CmsOuTreeType type = CmsOuTreeType . OU ; CmsUUID groupId = null ; List < String > fields = CmsStringUtil . splitAsList ( state , STATE_SEPERATOR ) ; if ( ! fields . isEmpty ( ) ) { if ( fields . size ( ) > 1 ) { path = fields . get ( 1 ) ; // Make sure to only show OUs which are under baseOU if ( path . equals ( "" ) | ! path . startsWith ( baseOU ) ) { path = baseOU ; } } for ( I_CmsOuTreeType ty : getTreeTypeProvider ( ) . getTreeTypes ( ) ) { if ( fields . get ( 0 ) . equals ( ty . getId ( ) ) ) { type = ty ; } } if ( fields . size ( ) > 2 ) { if ( ! CmsStringUtil . isEmptyOrWhitespaceOnly ( fields . get ( 2 ) ) ) { groupId = new CmsUUID ( fields . get ( 2 ) ) ; } } if ( fields . size ( ) > 3 ) { filter = fields . get ( 3 ) ; } } return new CmsStateBean ( path , type , groupId , filter ) ;
public class CommandLineParser { /** * Get the parsed and checked command line arguments for this parser * @ param args * - The command line arguments to add . These can be passed * straight from the parameter of main ( String [ ] ) * @ return A list of strings , the first ( [ 0 ] ) element in each list is the * command line option , if the second ( [ 1 ] ) element exists it is the * parameter for that option . * Returns null if parseArgs ( String [ ] ) has not been called . * @ throws InvalidFormatException */ @ Deprecated public List < List < String > > getParsedArgs ( String [ ] args ) throws InvalidFormatException { } }
for ( int i = 0 ; i < args . length ; i ++ ) { if ( ! args [ i ] . startsWith ( "-" ) ) { if ( this . params . size ( ) > 0 ) { List < String > option = new ArrayList < String > ( ) ; option . add ( this . params . get ( 0 ) . longOption ) ; this . params . remove ( 0 ) ; option . add ( args [ i ] ) ; sortedArgs . add ( option ) ; } else { throw new InvalidFormatException ( "Expected command line option, found " + args [ i ] + " instead." ) ; } } else { for ( Argument option : this . args ) { if ( option . matchesFlag ( args [ i ] ) ) { List < String > command = new ArrayList < String > ( ) ; command . add ( noDashes ( args [ i ] ) ) ; if ( option . takesValue ) { try { if ( args [ i + 1 ] . startsWith ( "-" ) ) { if ( option . valueRequired ) throw new InvalidFormatException ( "Invalid command line format: -" + option . option + " or --" + option . longOption + " requires a parameter, found " + args [ i + 1 ] + " instead." ) ; } else { command . add ( args [ ++ i ] ) ; } } catch ( ArrayIndexOutOfBoundsException e ) { } } sortedArgs . add ( command ) ; break ; } } } } return sortedArgs ;
public class Enforcer { /** * addPermissionForUser adds a permission for a user or role . * Returns false if the user or role already has the permission ( aka not affected ) . * @ param user the user . * @ param permission the permission , usually be ( obj , act ) . It is actually the rule without the subject . * @ return succeeds or not . */ public boolean addPermissionForUser ( String user , List < String > permission ) { } }
return addPermissionForUser ( user , permission . toArray ( new String [ 0 ] ) ) ;
public class AbstractHttpOverXmppProvider { /** * Parses HeadersExtension element if any . * @ param parser parser * @ return HeadersExtension or null if no headers * @ throws XmlPullParserException * @ throws IOException * @ throws SmackParsingException */ protected HeadersExtension parseHeaders ( XmlPullParser parser ) throws IOException , XmlPullParserException , SmackParsingException { } }
HeadersExtension headersExtension = null ; /* We are either at start of headers , start of data or end of req / res */ if ( parser . next ( ) == XmlPullParser . START_TAG && parser . getName ( ) . equals ( HeadersExtension . ELEMENT ) ) { headersExtension = HeadersProvider . INSTANCE . parse ( parser ) ; parser . next ( ) ; } return headersExtension ;
public class RecurlyClient { /** * Get a Plan ' s details * @ param planCode recurly id of plan * @ return the plan object as identified by the passed in ID */ public Plan getPlan ( final String planCode ) { } }
if ( planCode == null || planCode . isEmpty ( ) ) throw new RuntimeException ( "planCode cannot be empty!" ) ; return doGET ( Plan . PLANS_RESOURCE + "/" + planCode , Plan . class ) ;
public class JmsSpout { /** * Sets the JMS Session acknowledgement mode for the JMS seesion associated with this spout . * Possible values : * < ul > * < li > javax . jms . Session . AUTO _ ACKNOWLEDGE < / li > * < li > javax . jms . Session . CLIENT _ ACKNOWLEDGE < / li > * < li > javax . jms . Session . DUPS _ OK _ ACKNOWLEDGE < / li > * < / ul > * @ param mode JMS Session Acknowledgement mode * @ throws IllegalArgumentException if the mode is not recognized . */ public void setJmsAcknowledgeMode ( int mode ) { } }
switch ( mode ) { case Session . AUTO_ACKNOWLEDGE : case Session . CLIENT_ACKNOWLEDGE : case Session . DUPS_OK_ACKNOWLEDGE : break ; default : throw new IllegalArgumentException ( "Unknown Acknowledge mode: " + mode + " (See javax.jms.Session for valid values)" ) ; } this . jmsAcknowledgeMode = mode ;
public class SliceDefinition { /** * Adds a proposition id from which this slice definition is abstracted . * @ param propId * a proposition id < code > String < / code > for an abstract * parameter definition . */ public boolean add ( TemporalExtendedPropositionDefinition tepd ) { } }
if ( tepd != null ) { boolean result = this . abstractedFrom . add ( tepd ) ; if ( result ) { recalculateChildren ( ) ; } return result ; } else { return false ; }
public class ClientPool { /** * Checks out a client for use by the caller . This method waits if necessary until a client is available or until pool is closed . * May return null if InterruptedException occurs during checkout , or if pool is closed during checkout . */ @ FFDCIgnore ( value = { } }
InterruptedException . class } ) public Client checkoutClient ( ) { Client client = null ; try { // need to poll here as the dequeue can be permanently emptied by close method at any time while ( client == null && numClientsClosed . get ( ) == 0 ) { client = clients . poll ( 200 , TimeUnit . MILLISECONDS ) ; } } catch ( InterruptedException e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) Tr . event ( tc , "InterruptedException during checkout" ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { int clientHash = ( client != null ) ? client . hashCode ( ) : 0 ; Tr . event ( tc , "post-checkout - (" + clientHash + ") " + ( numClients - clients . remainingCapacity ( ) ) + " of " + numClients + " clients available." ) ; } return client ;
public class AwsSignature { /** * Create Amazon V2 signature . Reference : * http : / / docs . aws . amazon . com / general / latest / gr / signature - version - 2 . html */ static String createAuthorizationSignature ( HttpServletRequest request , String uri , String credential , boolean queryAuth , boolean bothDateHeader ) { } }
// sort Amazon headers SortedSetMultimap < String , String > canonicalizedHeaders = TreeMultimap . create ( ) ; for ( String headerName : Collections . list ( request . getHeaderNames ( ) ) ) { Collection < String > headerValues = Collections . list ( request . getHeaders ( headerName ) ) ; headerName = headerName . toLowerCase ( ) ; if ( ! headerName . startsWith ( "x-amz-" ) || ( bothDateHeader && headerName . equalsIgnoreCase ( AwsHttpHeaders . DATE ) ) ) { continue ; } if ( headerValues . isEmpty ( ) ) { canonicalizedHeaders . put ( headerName , "" ) ; } for ( String headerValue : headerValues ) { canonicalizedHeaders . put ( headerName , Strings . nullToEmpty ( headerValue ) ) ; } } // Build string to sign StringBuilder builder = new StringBuilder ( ) . append ( request . getMethod ( ) ) . append ( '\n' ) . append ( Strings . nullToEmpty ( request . getHeader ( HttpHeaders . CONTENT_MD5 ) ) ) . append ( '\n' ) . append ( Strings . nullToEmpty ( request . getHeader ( HttpHeaders . CONTENT_TYPE ) ) ) . append ( '\n' ) ; String expires = request . getParameter ( "Expires" ) ; if ( queryAuth ) { // If expires is not nil , then it is query string sign // If expires is nil , maybe also query string sign // So should check other accessid param , presign to judge . // not the expires builder . append ( Strings . nullToEmpty ( expires ) ) ; } else { if ( ! bothDateHeader ) { if ( canonicalizedHeaders . containsKey ( AwsHttpHeaders . DATE ) ) { builder . append ( "" ) ; } else { builder . append ( request . getHeader ( HttpHeaders . DATE ) ) ; } } else { if ( ! canonicalizedHeaders . containsKey ( AwsHttpHeaders . DATE ) ) { builder . append ( request . getHeader ( AwsHttpHeaders . DATE ) ) ; } else { // panic } } } builder . append ( '\n' ) ; for ( Map . Entry < String , String > entry : canonicalizedHeaders . entries ( ) ) { builder . append ( entry . getKey ( ) ) . append ( ':' ) . append ( entry . getValue ( ) ) . append ( '\n' ) ; } builder . append ( uri ) ; char separator = '?' ; List < String > subresources = Collections . list ( request . getParameterNames ( ) ) ; Collections . sort ( subresources ) ; for ( String subresource : subresources ) { if ( SIGNED_SUBRESOURCES . contains ( subresource ) ) { builder . append ( separator ) . append ( subresource ) ; String value = request . getParameter ( subresource ) ; if ( ! "" . equals ( value ) ) { builder . append ( '=' ) . append ( value ) ; } separator = '&' ; } } String stringToSign = builder . toString ( ) ; logger . trace ( "stringToSign: {}" , stringToSign ) ; // Sign string Mac mac ; try { mac = Mac . getInstance ( "HmacSHA1" ) ; mac . init ( new SecretKeySpec ( credential . getBytes ( StandardCharsets . UTF_8 ) , "HmacSHA1" ) ) ; } catch ( InvalidKeyException | NoSuchAlgorithmException e ) { throw new RuntimeException ( e ) ; } return BaseEncoding . base64 ( ) . encode ( mac . doFinal ( stringToSign . getBytes ( StandardCharsets . UTF_8 ) ) ) ;
public class Http { /** * HTTP POST : Reads the contents from the specified stream and sends them to the URL . * @ return * the location , as an URI , where the resource is created . * @ throws HttpException * when an exceptional condition occurred during the HTTP method execution . */ public static String postFrom ( final URL to_url , final InputStream from_stream , final MediaType media_type ) { } }
InputStreamRequestCallback callback = new InputStreamRequestCallback ( from_stream , media_type ) ; String location = _execute ( to_url , HttpMethod . POST , callback , new LocationHeaderResponseExtractor ( ) ) ; return location ;
public class Association { /** * Removes the row with the specified key from this association . * @ param key the key of the association row to remove */ public void remove ( RowKey key ) { } }
currentState . put ( key , new AssociationOperation ( key , null , REMOVE ) ) ;
public class SvdImplicitQrAlgorithm_DDRM { /** * Computes the eigenvalue of the 2 by 2 matrix B < sup > T < / sup > B */ protected void eigenBB_2x2 ( int x1 ) { } }
double b11 = diag [ x1 ] ; double b12 = off [ x1 ] ; double b22 = diag [ x1 + 1 ] ; // normalize to reduce overflow double absA = Math . abs ( b11 ) ; double absB = Math . abs ( b12 ) ; double absC = Math . abs ( b22 ) ; double scale = absA > absB ? absA : absB ; if ( absC > scale ) scale = absC ; // see if it is a pathological case . the diagonal must already be zero // and the eigenvalues are all zero . so just return if ( scale == 0 ) return ; b11 /= scale ; b12 /= scale ; b22 /= scale ; eigenSmall . symm2x2_fast ( b11 * b11 , b11 * b12 , b12 * b12 + b22 * b22 ) ; off [ x1 ] = 0 ; diag [ x1 ] = scale * Math . sqrt ( eigenSmall . value0 . real ) ; double sgn = Math . signum ( eigenSmall . value1 . real ) ; diag [ x1 + 1 ] = sgn * scale * Math . sqrt ( Math . abs ( eigenSmall . value1 . real ) ) ;
public class MavenModelScannerPlugin { /** * Adds information about references licenses . * @ param pomDescriptor * The descriptor for the current POM . * @ param model * The Maven Model . * @ param store * The database . */ private void addLicenses ( MavenPomDescriptor pomDescriptor , Model model , Store store ) { } }
List < License > licenses = model . getLicenses ( ) ; for ( License license : licenses ) { MavenLicenseDescriptor licenseDescriptor = store . create ( MavenLicenseDescriptor . class ) ; licenseDescriptor . setUrl ( license . getUrl ( ) ) ; licenseDescriptor . setComments ( license . getComments ( ) ) ; licenseDescriptor . setName ( license . getName ( ) ) ; licenseDescriptor . setDistribution ( license . getDistribution ( ) ) ; pomDescriptor . getLicenses ( ) . add ( licenseDescriptor ) ; }
public class NFSFileVec { /** * Make a new NFSFileVec key which holds the filename implicitly . This name * is used by the Chunks to load data on - demand . * @ return A NFSFileVec mapped to this file . */ public static NFSFileVec make ( File f , Futures fs ) { } }
if ( ! f . exists ( ) ) throw new IllegalArgumentException ( "File not found: " + f . toString ( ) ) ; long size = f . length ( ) ; Key k = Vec . newKey ( PersistNFS . decodeFile ( f ) ) ; // Insert the top - level FileVec key into the store NFSFileVec nfs = new NFSFileVec ( k , size ) ; DKV . put ( k , nfs , fs ) ; return nfs ;
public class WorkingWeek { /** * Create a new calendar with the intersection of WORKING days . * e . g . if normal and arabic calendars are intersected , the week is 3 days : Fri - Sun . * @ param ww * @ return a new Working week * @ since 1.4.0 */ public WorkingWeek intersection ( final WorkingWeek ww ) { } }
final byte combined = ( byte ) ( this . workingDays & ww . workingDays ) ; return new WorkingWeek ( combined ) ;
public class CachingRemoteConnector { /** * Commit database transaction . This method is a NOOP if there is no * database or if { @ link # _ transaction } is currently true . * @ param force true if the database transaction should be committed * regardless of the { @ link # _ transaction } flag . */ private void commit ( boolean force ) { } }
if ( _db == null ) { return ; } if ( _transaction && ! force ) { return ; } try { _db . commit ( ) ; } catch ( Exception e ) { throw new IllegalStateException ( "Could not commit transaction" , e ) ; }
public class RedisUtil { /** * 创建连接池 . * @ param server 服务器 * @ param timeout 超时时间 * @ return */ public static IJedisPool createJedisPool ( String server , int timeout ) { } }
int maxActive = 32 ; return createJedisPool ( server , timeout , maxActive , null ) ;
public class AmazonEC2Client { /** * [ IPv6 only ] Creates an egress - only internet gateway for your VPC . An egress - only internet gateway is used to * enable outbound communication over IPv6 from instances in your VPC to the internet , and prevents hosts outside of * your VPC from initiating an IPv6 connection with your instance . * @ param createEgressOnlyInternetGatewayRequest * @ return Result of the CreateEgressOnlyInternetGateway operation returned by the service . * @ sample AmazonEC2 . CreateEgressOnlyInternetGateway * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / CreateEgressOnlyInternetGateway " * target = " _ top " > AWS API Documentation < / a > */ @ Override public CreateEgressOnlyInternetGatewayResult createEgressOnlyInternetGateway ( CreateEgressOnlyInternetGatewayRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateEgressOnlyInternetGateway ( request ) ;
public class XpathUtil { /** * 获取同胞中同名元素的数量 * @ param e 元素 * @ return 数量 */ public static int sameTagElNums ( Element e ) { } }
Elements els = e . parent ( ) . getElementsByTag ( e . tagName ( ) ) ; return els . size ( ) ;
public class ManagedBean { /** * Validates the type */ @ Override protected void checkType ( ) { } }
if ( ! isDependent ( ) && getEnhancedAnnotated ( ) . isParameterizedType ( ) ) { throw BeanLogger . LOG . managedBeanWithParameterizedBeanClassMustBeDependent ( type ) ; } boolean passivating = beanManager . isPassivatingScope ( getScope ( ) ) ; if ( passivating && ! isPassivationCapableBean ( ) ) { if ( ! getEnhancedAnnotated ( ) . isSerializable ( ) ) { throw BeanLogger . LOG . passivatingBeanNeedsSerializableImpl ( this ) ; } else if ( hasDecorators ( ) && ! allDecoratorsArePassivationCapable ( ) ) { throw BeanLogger . LOG . passivatingBeanHasNonPassivationCapableDecorator ( this , getFirstNonPassivationCapableDecorator ( ) ) ; } else if ( hasInterceptors ( ) && ! allInterceptorsArePassivationCapable ( ) ) { throw BeanLogger . LOG . passivatingBeanHasNonPassivationCapableInterceptor ( this , getFirstNonPassivationCapableInterceptor ( ) ) ; } }
public class MatrixIO { /** * Converts the contents of a matrix file as a { @ link Matrix } object , using * the provided type description as a hint for what kind to create . The * type of { @ code Matrix } object created will be based on an estimate of * whether the data will fit into the available memory . Note that the * returned { @ link Matrix } instance is not backed by the data on file ; * changes to the { @ code Matrix } will < i > not < / i > be reflected in the * original file ' s data . * @ param matrix a file contain matrix data * @ param format the format of the file * @ param matrixType the expected type and behavior of the matrix in * relation to memory . This value will be used as a hint for what * kind of { @ code Matrix } instance to create * @ return the { @ code Matrix } instance that contains the data in the * provided file */ public static Matrix readMatrix ( File matrix , Format format , Type matrixType ) throws IOException { } }
return readMatrix ( matrix , format , matrixType , false ) ;
public class Crc32 { /** * Feed a bitstring to the crc calculation . */ public void append24 ( int bits ) { } }
long l ; long [ ] a1 ; l = ( ( l = crc ) >> 8L ) ^ ( a1 = CRC32_TABLE ) [ ( int ) ( ( l & 0xFF ) ^ ( long ) ( bits & 0xFF ) ) ] ; l = ( l >> 8L ) ^ a1 [ ( int ) ( ( l & 0xFF ) ^ ( long ) ( ( bits & 0xff00 ) >> 8 ) ) ] ; crc = ( l >> 8L ) ^ a1 [ ( int ) ( ( l & 0xFF ) ^ ( long ) ( ( bits & 0xff0000 ) >> 16 ) ) ] ;