signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class MeasureUnitUtil { /** * Convert the given value expressed in the given unit to meters per second .
* @ param value is the value to convert
* @ param inputUnit is the unit of the { @ code value }
* @ return the result of the convertion . */
@ Pure public static double toMetersPerSecond ( double value , SpeedUnit inputUnit ) { } }
|
switch ( inputUnit ) { case KILOMETERS_PER_HOUR : return 3.6 * value ; case MILLIMETERS_PER_SECOND : return value / 1000. ; case METERS_PER_SECOND : default : } return value ;
|
public class VarTupleSet { /** * Removes the given tuple from use in test cases . */
public void remove ( Tuple tuple ) { } }
|
int i = unused_ . indexOf ( tuple ) ; if ( i >= 0 ) { unused_ . remove ( tuple ) ; }
|
public class Blacklist { /** * Dumps data to the given file .
* @ param filename output file name */
public void write ( String filename ) { } }
|
try { write ( new FileOutputStream ( filename ) ) ; } catch ( FileNotFoundException e ) { e . printStackTrace ( ) ; }
|
public class GetResourceRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetResourceRequest getResourceRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( getResourceRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getResourceRequest . getRestApiId ( ) , RESTAPIID_BINDING ) ; protocolMarshaller . marshall ( getResourceRequest . getResourceId ( ) , RESOURCEID_BINDING ) ; protocolMarshaller . marshall ( getResourceRequest . getEmbed ( ) , EMBED_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class DeviceManagerClient { /** * Creates a device in a device registry .
* < p > Sample code :
* < pre > < code >
* try ( DeviceManagerClient deviceManagerClient = DeviceManagerClient . create ( ) ) {
* RegistryName parent = RegistryName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ REGISTRY ] " ) ;
* Device device = Device . newBuilder ( ) . build ( ) ;
* Device response = deviceManagerClient . createDevice ( parent . toString ( ) , device ) ;
* < / code > < / pre >
* @ param parent The name of the device registry where this device should be created . For example ,
* ` projects / example - project / locations / us - central1 / registries / my - registry ` .
* @ param device The device registration details . The field ` name ` must be empty . The server
* generates ` name ` from the device registry ` id ` and the ` parent ` field .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final Device createDevice ( String parent , Device device ) { } }
|
CreateDeviceRequest request = CreateDeviceRequest . newBuilder ( ) . setParent ( parent ) . setDevice ( device ) . build ( ) ; return createDevice ( request ) ;
|
public class AbstractMfClientHttpRequestFactoryWrapper { /** * This implementation simply calls { @ link # createRequest ( URI , HttpMethod , MfClientHttpRequestFactory ) }
* ( if the matchers are OK ) with the wrapped request factory provided to the { @ linkplain
* # AbstractMfClientHttpRequestFactoryWrapper ( MfClientHttpRequestFactory , UriMatchers , boolean )
* constructor } .
* @ param uri the URI to create a request for
* @ param httpMethod the HTTP method to execute */
public final ClientHttpRequest createRequest ( final URI uri , final HttpMethod httpMethod ) throws IOException { } }
|
if ( uri . getScheme ( ) == null || uri . getScheme ( ) . equals ( "file" ) || this . matchers . matches ( uri , httpMethod ) ) { return createRequest ( uri , httpMethod , this . wrappedFactory ) ; } else if ( this . failIfNotMatch ) { throw new IllegalArgumentException ( uri + " is denied." ) ; } else { return this . wrappedFactory . createRequest ( uri , httpMethod ) ; }
|
public class HeartbeatDetectionJob { /** * 当实例列表中只有一个 , 且是当前实例时就重启
* @ param session Hibernate Session对象
* @ param instanceNames 排队等待的实例名列表 , 如InsA , InsB , InsC , InsD
* @ param currentInstanceName 当前服务器实例名 */
@ SuppressWarnings ( "unchecked" ) private Operation detection ( Session session , String [ ] clusterJobInstanceNames , String currentInstanceName ) { } }
|
Query query = session . createQuery ( "from " + Heartbeat . class . getName ( ) + " b order by b.date desc" ) ; List < Heartbeat > heartbeats = query . setMaxResults ( 1 ) . list ( ) ; int currentPos = getPosition ( clusterJobInstanceNames , currentInstanceName ) + 1 ; if ( heartbeats . size ( ) > 0 ) { Date now = new Date ( ) ; Heartbeat heartbeat = heartbeats . get ( 0 ) ; Date beatDate = heartbeat . getDate ( ) ; Calendar beatCalendar = Calendar . getInstance ( ) ; beatCalendar . setTime ( beatDate ) ; String beatInstanceName = heartbeat . getInstanceName ( ) ; int secondUnit = 40 ; int beatPos = getPosition ( clusterJobInstanceNames , beatInstanceName ) + 1 ; if ( ! currentInstanceName . equals ( beatInstanceName ) ) { int currentSecond = currentPos * secondUnit ; if ( currentPos > beatPos ) { beatCalendar . add ( Calendar . SECOND , currentSecond ) ; } else if ( currentPos < beatPos ) { currentSecond = ( currentPos + ( clusterJobInstanceNames . length - beatPos ) ) * secondUnit ; beatCalendar . add ( Calendar . SECOND , currentSecond ) ; } } else { beatCalendar . add ( Calendar . SECOND , secondUnit * clusterJobInstanceNames . length ) ; } if ( now . compareTo ( beatCalendar . getTime ( ) ) > 0 ) { // 当前时间大于心跳时间 + currentSecond , 说明当前运行JOB的实例挂了
return Operation . reset ; } } else { if ( currentPos == 1 ) return Operation . reset ; } return Operation . donothing ;
|
public class Matrix4fStack { /** * Increment the stack pointer by one and set the values of the new current matrix to the one directly below it .
* @ return this */
public Matrix4fStack pushMatrix ( ) { } }
|
if ( curr == mats . length ) { throw new IllegalStateException ( "max stack size of " + ( curr + 1 ) + " reached" ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
} mats [ curr ++ ] . set ( this ) ; return this ;
|
public class MusicOnHoldApi { /** * Delete WAV file .
* Delete the specified WAV file .
* @ param fileName The musicFile name for deleting from MOH . ( required )
* @ return ApiResponse & lt ; DeleteMOHFilesResponse & gt ;
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public ApiResponse < DeleteMOHFilesResponse > deleteMOHFilesWithHttpInfo ( String fileName ) throws ApiException { } }
|
com . squareup . okhttp . Call call = deleteMOHFilesValidateBeforeCall ( fileName , null , null ) ; Type localVarReturnType = new TypeToken < DeleteMOHFilesResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
|
public class EthiopicCalendar { /** * { @ inheritDoc }
* @ deprecated This API is ICU internal only .
* @ hide draft / provisional / internal are hidden on Android */
@ Deprecated protected void handleComputeFields ( int julianDay ) { } }
|
int era , year ; int [ ] fields = new int [ 3 ] ; jdToCE ( julianDay , getJDEpochOffset ( ) , fields ) ; // fields [ 0 ] eyear
// fields [ 1 ] month
// fields [ 2 ] day
if ( isAmeteAlemEra ( ) ) { era = AMETE_ALEM ; year = fields [ 0 ] + AMETE_MIHRET_DELTA ; } else { if ( fields [ 0 ] > 0 ) { era = AMETE_MIHRET ; year = fields [ 0 ] ; } else { era = AMETE_ALEM ; year = fields [ 0 ] + AMETE_MIHRET_DELTA ; } } internalSet ( EXTENDED_YEAR , fields [ 0 ] ) ; internalSet ( ERA , era ) ; internalSet ( YEAR , year ) ; internalSet ( MONTH , fields [ 1 ] ) ; internalSet ( DAY_OF_MONTH , fields [ 2 ] ) ; internalSet ( DAY_OF_YEAR , ( 30 * fields [ 1 ] ) + fields [ 2 ] ) ;
|
public class TextGenerator { /** * Generate dictionary string .
* @ param length the length
* @ param context the context
* @ param seed the seed
* @ param lookahead the lookahead
* @ param destructive the destructive
* @ return the string */
public String generateDictionary ( int length , int context , final String seed , int lookahead , boolean destructive ) { } }
|
return generateDictionary ( length , context , seed , lookahead , destructive , false ) ;
|
public class OnlineUpdateUASparser { /** * Since we ' ve online access to the data file , we check every day for an update */
@ Override protected synchronized void checkDataMaps ( ) throws IOException { } }
|
if ( lastUpdateCheck == 0 || lastUpdateCheck < System . currentTimeMillis ( ) - updateInterval ) { String versionOnServer = getVersionFromServer ( ) ; if ( currentVersion == null || versionOnServer . compareTo ( currentVersion ) > 0 ) { loadDataFromInternet ( ) ; currentVersion = versionOnServer ; } lastUpdateCheck = System . currentTimeMillis ( ) ; }
|
public class DefaultObjectResultSetMapper { /** * Invoked when the return type of the method is an array type .
* @ param rs ResultSet to process .
* @ param maxRows The maximum size of array to create , a value of 0 indicates that the array
* size will be the same as the result set size ( no limit ) .
* @ param arrayClass The class of object contained within the array
* @ param cal A calendar instance to use for date / time values
* @ return An array of the specified class type
* @ throws SQLException On error . */
protected Object arrayFromResultSet ( ResultSet rs , int maxRows , Class arrayClass , Calendar cal ) throws SQLException { } }
|
ArrayList < Object > list = new ArrayList < Object > ( ) ; Class componentType = arrayClass . getComponentType ( ) ; RowMapper rowMapper = RowMapperFactory . getRowMapper ( rs , componentType , cal ) ; // a value of zero indicates that all rows from the resultset should be included .
if ( maxRows == 0 ) { maxRows = - 1 ; } int numRows ; boolean hasMoreRows = rs . next ( ) ; for ( numRows = 0 ; numRows != maxRows && hasMoreRows ; numRows ++ ) { list . add ( rowMapper . mapRowToReturnType ( ) ) ; hasMoreRows = rs . next ( ) ; } Object array = Array . newInstance ( componentType , numRows ) ; try { for ( int i = 0 ; i < numRows ; i ++ ) { Array . set ( array , i , list . get ( i ) ) ; } } catch ( IllegalArgumentException iae ) { ResultSetMetaData md = rs . getMetaData ( ) ; // assuming no errors in resultSetObject ( ) this can only happen
// for single column result sets .
throw new ControlException ( "The declared Java type for array " + componentType . getName ( ) + "is incompatible with the SQL format of column " + md . getColumnName ( 1 ) + md . getColumnTypeName ( 1 ) + "which returns objects of type + " + list . get ( 0 ) . getClass ( ) . getName ( ) ) ; } return array ;
|
public class EbInterfaceWriter { /** * Create a writer builder for Ebi50InvoiceType .
* @ return The builder and never < code > null < / code > */
@ Nonnull public static EbInterfaceWriter < Ebi50InvoiceType > ebInterface50 ( ) { } }
|
final EbInterfaceWriter < Ebi50InvoiceType > ret = EbInterfaceWriter . create ( Ebi50InvoiceType . class ) ; ret . setNamespaceContext ( EbInterface50NamespaceContext . getInstance ( ) ) ; return ret ;
|
public class UASparser { /** * Creates the internal data structes from the seciontList
* @ param sectionList */
protected void createInternalDataStructre ( List < Section > sectionList ) { } }
|
try { lock . lock ( ) ; for ( Section sec : sectionList ) { if ( "robots" . equals ( sec . getName ( ) ) ) { Map < String , RobotEntry > robotsMapTmp = new HashMap < String , RobotEntry > ( ) ; for ( Entry en : sec . getEntries ( ) ) { RobotEntry re = new RobotEntry ( en . getData ( ) ) ; robotsMapTmp . put ( re . getUserAgentString ( ) , re ) ; } robotsMap = robotsMapTmp ; } else if ( "os" . equals ( sec . getName ( ) ) ) { Map < Long , OsEntry > osMapTmp = new HashMap < Long , OsEntry > ( ) ; for ( Entry en : sec . getEntries ( ) ) { OsEntry oe = new OsEntry ( en . getData ( ) ) ; osMapTmp . put ( Long . parseLong ( en . getKey ( ) ) , oe ) ; } osMap = osMapTmp ; } else if ( "browser" . equals ( sec . getName ( ) ) ) { Map < Long , BrowserEntry > browserMapTmp = new HashMap < Long , BrowserEntry > ( ) ; for ( Entry en : sec . getEntries ( ) ) { BrowserEntry be = new BrowserEntry ( en . getData ( ) ) ; browserMapTmp . put ( Long . parseLong ( en . getKey ( ) ) , be ) ; } browserMap = browserMapTmp ; } else if ( "browser_type" . equals ( sec . getName ( ) ) ) { Map < Long , String > browserTypeMapTmp = new HashMap < Long , String > ( ) ; for ( Entry en : sec . getEntries ( ) ) { browserTypeMapTmp . put ( Long . parseLong ( en . getKey ( ) ) , en . getData ( ) . iterator ( ) . next ( ) ) ; } browserTypeMap = browserTypeMapTmp ; } else if ( "browser_reg" . equals ( sec . getName ( ) ) ) { Map < String , Long > browserRegMapTmp = new LinkedHashMap < String , Long > ( ) ; for ( Entry en : sec . getEntries ( ) ) { Iterator < String > it = en . getData ( ) . iterator ( ) ; browserRegMapTmp . put ( convertPerlToJavaRegex ( it . next ( ) ) , Long . parseLong ( it . next ( ) ) ) ; } browserRegMap = browserRegMapTmp ; } else if ( "browser_os" . equals ( sec . getName ( ) ) ) { Map < Long , Long > browserOsMapTmp = new HashMap < Long , Long > ( ) ; for ( Entry en : sec . getEntries ( ) ) { browserOsMapTmp . put ( Long . parseLong ( en . getKey ( ) ) , Long . parseLong ( en . getData ( ) . iterator ( ) . next ( ) ) ) ; } browserOsMap = browserOsMapTmp ; } else if ( "os_reg" . equals ( sec . getName ( ) ) ) { Map < Pattern , Long > osRegMapTmp = new LinkedHashMap < Pattern , Long > ( ) ; for ( Entry en : sec . getEntries ( ) ) { Iterator < String > it = en . getData ( ) . iterator ( ) ; Pattern pattern = Pattern . compile ( convertPerlToJavaRegex ( it . next ( ) ) , Pattern . CASE_INSENSITIVE | Pattern . DOTALL ) ; osRegMapTmp . put ( pattern , Long . parseLong ( it . next ( ) ) ) ; } osRegMap = osRegMapTmp ; } } } finally { lock . unlock ( ) ; }
|
public class CPDefinitionOptionRelUtil { /** * Returns the last cp definition option rel in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; .
* @ param uuid the uuid
* @ param companyId the company ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching cp definition option rel , or < code > null < / code > if a matching cp definition option rel could not be found */
public static CPDefinitionOptionRel fetchByUuid_C_Last ( String uuid , long companyId , OrderByComparator < CPDefinitionOptionRel > orderByComparator ) { } }
|
return getPersistence ( ) . fetchByUuid_C_Last ( uuid , companyId , orderByComparator ) ;
|
public class JStaffLine { /** * Returns true if this staff line has note ( s ) .
* Returns false if it only contains clef , key , time sig . , tempo , barlines . . . */
public boolean hasNotes ( ) { } }
|
for ( Object m_staffElement : m_staffElements ) { JScoreElement element = ( JScoreElement ) m_staffElement ; if ( ( element instanceof JNoteElementAbstract ) || ( element instanceof JGroupOfNotes ) ) return true ; } return false ;
|
public class Track { /** * track an event
* @ param historyToken */
private static void track ( String historyToken ) { } }
|
if ( historyToken == null ) { historyToken = "historyToken_null" ; } historyToken = URL . encode ( "/WWARN-GWT-Analytics/V1.0/" + historyToken ) ; boolean hasErrored = false ; try { trackGoogleAnalytics ( historyToken ) ; } catch ( JavaScriptException e ) { hasErrored = true ; GWT . log ( "Unable to track" , e ) ; } if ( ! hasErrored ) GWT . log ( "Tracked " + historyToken ) ;
|
public class LoggedInChecker { /** * get logged in user .
* @ return UserData or null if no one is logged in */
public User getLoggedInUser ( ) { } }
|
User user = null ; final Authentication authentication = SecurityContextHolder . getContext ( ) . getAuthentication ( ) ; if ( authentication != null ) { final Object principal = authentication . getPrincipal ( ) ; // principal can be " anonymousUser " ( String )
if ( principal instanceof UserDetails ) { user = userDetailsConverter . convert ( ( UserDetails ) principal ) ; } } return user ;
|
public class PullSocketImporter { /** * Set the socket to newSocket , unless we ' re shutting down .
* The most reliable way to ensure the importer thread exits is to close its socket .
* @ param newSocket socket to replace any previous socket . May be null . */
private void replaceSocket ( Socket newSocket ) { } }
|
synchronized ( m_socketLock ) { closeSocket ( m_socket ) ; if ( m_eos . get ( ) ) { closeSocket ( newSocket ) ; m_socket = null ; } else { m_socket = newSocket ; } }
|
public class AmazonEC2Client { /** * Links an EC2 - Classic instance to a ClassicLink - enabled VPC through one or more of the VPC ' s security groups . You
* cannot link an EC2 - Classic instance to more than one VPC at a time . You can only link an instance that ' s in the
* < code > running < / code > state . An instance is automatically unlinked from a VPC when it ' s stopped - you can link it
* to the VPC again when you restart it .
* After you ' ve linked an instance , you cannot change the VPC security groups that are associated with it . To change
* the security groups , you must first unlink the instance , and then link it again .
* Linking your instance to a VPC is sometimes referred to as < i > attaching < / i > your instance .
* @ param attachClassicLinkVpcRequest
* @ return Result of the AttachClassicLinkVpc operation returned by the service .
* @ sample AmazonEC2 . AttachClassicLinkVpc
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / AttachClassicLinkVpc " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public AttachClassicLinkVpcResult attachClassicLinkVpc ( AttachClassicLinkVpcRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeAttachClassicLinkVpc ( request ) ;
|
public class BranchController { /** * Sync . this template instance against its template definition */
@ RequestMapping ( value = "branches/{branchId}/sync" , method = RequestMethod . POST ) public Ack syncTemplateInstance ( @ PathVariable ID branchId ) { } }
|
return branchTemplateService . syncInstance ( branchId ) ;
|
public class Util { /** * Compare two strings . null is less than any non - null string .
* @ param s1 first string .
* @ param s2 second string .
* @ return int 0 if the s1 is equal to s2;
* < 0 if s1 is lexicographically less than s2;
* > 0 if s1 is lexicographically greater than s2. */
public static int compareStrings ( final String s1 , final String s2 ) { } }
|
if ( s1 == null ) { if ( s2 != null ) { return - 1 ; } return 0 ; } if ( s2 == null ) { return 1 ; } return s1 . compareTo ( s2 ) ;
|
public class ServiceFuture { /** * Creates a ServiceCall from an observable object .
* @ param observable the observable to create from
* @ param < T > the type of the response
* @ return the created ServiceCall */
public static < T > ServiceFuture < T > fromResponse ( final Observable < ServiceResponse < T > > observable ) { } }
|
final ServiceFuture < T > serviceFuture = new ServiceFuture < > ( ) ; serviceFuture . subscription = observable . last ( ) . subscribe ( new Action1 < ServiceResponse < T > > ( ) { @ Override public void call ( ServiceResponse < T > t ) { serviceFuture . set ( t . body ( ) ) ; } } , new Action1 < Throwable > ( ) { @ Override public void call ( Throwable throwable ) { serviceFuture . setException ( throwable ) ; } } ) ; return serviceFuture ;
|
public class NetworkEnvironmentConfiguration { /** * Validates the ( new ) network buffer configuration .
* @ param pageSize size of memory buffers
* @ param networkBufFractionfraction of JVM memory to use for network buffers
* @ param networkBufMin minimum memory size for network buffers ( in bytes )
* @ param networkBufMax maximum memory size for network buffers ( in bytes )
* @ throws IllegalConfigurationException if the condition does not hold */
private static void checkNewNetworkConfig ( final int pageSize , final float networkBufFraction , final long networkBufMin , final long networkBufMax ) throws IllegalConfigurationException { } }
|
ConfigurationParserUtils . checkConfigParameter ( networkBufFraction > 0.0f && networkBufFraction < 1.0f , networkBufFraction , TaskManagerOptions . NETWORK_BUFFERS_MEMORY_FRACTION . key ( ) , "Network buffer memory fraction of the free memory must be between 0.0 and 1.0" ) ; ConfigurationParserUtils . checkConfigParameter ( networkBufMin >= pageSize , networkBufMin , TaskManagerOptions . NETWORK_BUFFERS_MEMORY_MIN . key ( ) , "Minimum memory for network buffers must allow at least one network " + "buffer with respect to the memory segment size" ) ; ConfigurationParserUtils . checkConfigParameter ( networkBufMax >= pageSize , networkBufMax , TaskManagerOptions . NETWORK_BUFFERS_MEMORY_MAX . key ( ) , "Maximum memory for network buffers must allow at least one network " + "buffer with respect to the memory segment size" ) ; ConfigurationParserUtils . checkConfigParameter ( networkBufMax >= networkBufMin , networkBufMax , TaskManagerOptions . NETWORK_BUFFERS_MEMORY_MAX . key ( ) , "Maximum memory for network buffers must not be smaller than minimum memory (" + TaskManagerOptions . NETWORK_BUFFERS_MEMORY_MAX . key ( ) + ": " + networkBufMin + ")" ) ;
|
public class CryptoServiceImpl { /** * { @ inheritDoc } */
@ Override public String decrypt ( String encrypted ) throws TechnicalException { } }
|
return decrypt ( Context . getCryptoKey ( ) , encrypted ) ;
|
public class AtomicCounterMapBuilder { /** * Sets extra serializable types on the map .
* @ param extraTypes the types to set
* @ return the map builder */
@ SuppressWarnings ( "unchecked" ) public AtomicCounterMapBuilder < K > withExtraTypes ( Class < ? > ... extraTypes ) { } }
|
config . setExtraTypes ( Lists . newArrayList ( extraTypes ) ) ; return this ;
|
public class Parser { /** * 12.6.4 The for - in Statement */
private ParseTree parseForInStatement ( SourcePosition start , ParseTree initializer ) { } }
|
eat ( TokenType . IN ) ; ParseTree collection = parseExpression ( ) ; eat ( TokenType . CLOSE_PAREN ) ; ParseTree body = parseStatement ( ) ; return new ForInStatementTree ( getTreeLocation ( start ) , initializer , collection , body ) ;
|
public class ASTUtil { /** * Returns the < CODE > FieldDeclaration < / CODE > for the specified
* < CODE > FieldAnnotation < / CODE > . The field has to be declared in the
* specified < CODE > TypeDeclaration < / CODE > .
* @ param type
* The < CODE > TypeDeclaration < / CODE > , where the
* < CODE > FieldDeclaration < / CODE > is declared in .
* @ param fieldAnno
* The < CODE > FieldAnnotation < / CODE > , which contains the field
* name of the < CODE > FieldDeclaration < / CODE > .
* @ return the < CODE > FieldDeclaration < / CODE > found in the specified
* < CODE > TypeDeclaration < / CODE > .
* @ throws FieldDeclarationNotFoundException
* if no matching < CODE > FieldDeclaration < / CODE > was found . */
public static FieldDeclaration getFieldDeclaration ( TypeDeclaration type , FieldAnnotation fieldAnno ) throws FieldDeclarationNotFoundException { } }
|
requireNonNull ( fieldAnno , "field annotation" ) ; return getFieldDeclaration ( type , fieldAnno . getFieldName ( ) ) ;
|
public class MFSResource { /** * Extract most frequent sense baseline from WordNet data , using Ciaramita and
* Altun ' s ( 2006 ) approach for a bio encoding .
* @ param lemmas
* in the sentence
* @ param posTags
* the postags of the sentence
* @ return the most frequent senses for the sentence */
public List < String > getFirstSenseBio ( final List < String > lemmas , final Span [ ] posTags ) { } }
|
final List < String > mostFrequentSenseList = new ArrayList < String > ( ) ; String prefix = "-" + BioCodec . START ; String mostFrequentSense = null ; String searchSpan = null ; // iterative over lemmas from the beginning
for ( int i = 0 ; i < lemmas . size ( ) ; i ++ ) { mostFrequentSense = null ; final String pos = posTags [ i ] . getType ( ) ; int j ; // iterate over lemmas from the end
for ( j = lemmas . size ( ) - 1 ; j >= i ; j -- ) { // create span for search in multimap ; the first search takes as span
// the whole sentence
final String endPos = posTags [ j ] . getType ( ) ; searchSpan = createSpan ( lemmas , i , j ) ; final String firstSpan = ( searchSpan + "#" + pos . substring ( 0 , 1 ) ) . toLowerCase ( ) ; final TreeMultimap < Integer , String > mfsMap = getOrderedMap ( firstSpan ) ; if ( ! mfsMap . isEmpty ( ) ) { mostFrequentSense = getMFS ( mfsMap ) ; break ; } final String lastSpan = ( searchSpan + "#" + endPos . substring ( 0 , 1 ) ) . toLowerCase ( ) ; final TreeMultimap < Integer , String > mfsMapEnd = getOrderedMap ( lastSpan ) ; if ( ! mfsMapEnd . isEmpty ( ) ) { mostFrequentSense = getMFS ( mfsMapEnd ) ; break ; } } prefix = "-" + BioCodec . START ; // multi - token case
if ( mostFrequentSense != null ) { while ( i < j ) { mostFrequentSenseList . add ( ( mostFrequentSense + prefix ) . intern ( ) ) ; prefix = "-" + BioCodec . CONTINUE ; i ++ ; } } // one word case or last member of multispan
if ( mostFrequentSense != null ) { mostFrequentSenseList . add ( ( mostFrequentSense + prefix ) . intern ( ) ) ; } else { mostFrequentSenseList . add ( BioCodec . OTHER ) ; } } return mostFrequentSenseList ;
|
public class LanternaTable { /** * TODO remove when Lanterna 3.1.0 is released */
@ Override public synchronized Table < String > setSize ( TerminalSize size ) { } }
|
setVisibleRows ( size . getRows ( ) - 1 ) ; return super . setSize ( size ) ;
|
public class Span { /** * Returns true if the specified span intersects with this span .
* @ param s
* The span to compare with this span .
* @ return true is the spans overlap ; false otherwise . */
public boolean intersects ( final Span s ) { } }
|
final int sstart = s . getStart ( ) ; // either s ' s start is in this or this ' start is in s
return this . contains ( s ) || s . contains ( this ) || getStart ( ) <= sstart && sstart < getEnd ( ) || sstart <= getStart ( ) && getStart ( ) < s . getEnd ( ) ;
|
public class ActiveSyncManager { /** * Launches polling thread on a particular mount point with starting txId .
* @ param mountId launch polling thread on a mount id
* @ param txId specifies the transaction id to initialize the pollling thread */
public void launchPollingThread ( long mountId , long txId ) { } }
|
LOG . debug ( "launch polling thread for mount id {}, txId {}" , mountId , txId ) ; if ( ! mPollerMap . containsKey ( mountId ) ) { try ( CloseableResource < UnderFileSystem > ufsClient = mMountTable . getUfsClient ( mountId ) . acquireUfsResource ( ) ) { ufsClient . get ( ) . startActiveSyncPolling ( txId ) ; } catch ( IOException e ) { LOG . warn ( "IO Exception trying to launch Polling thread {}" , e ) ; } ActiveSyncer syncer = new ActiveSyncer ( mFileSystemMaster , this , mMountTable , mountId ) ; Future < ? > future = getExecutor ( ) . submit ( new HeartbeatThread ( HeartbeatContext . MASTER_ACTIVE_UFS_SYNC , syncer , ( int ) ServerConfiguration . getMs ( PropertyKey . MASTER_ACTIVE_UFS_SYNC_INTERVAL ) , ServerConfiguration . global ( ) ) ) ; mPollerMap . put ( mountId , future ) ; }
|
public class AppendableExpression { /** * Returns a similar { @ link AppendableExpression } but with the given ( string valued ) expression
* appended to it . */
AppendableExpression appendString ( Expression exp ) { } }
|
return withNewDelegate ( delegate . invoke ( APPEND , exp ) , true ) ;
|
public class IOUtils { /** * Load the parameters in the { @ code TrainingParameters } file .
* @ param paramFile
* the parameter file
* @ param supportSequenceTraining
* wheter sequence training is supported
* @ return the parameters */
private static TrainingParameters loadTrainingParameters ( final String paramFile , final boolean supportSequenceTraining ) { } }
|
TrainingParameters params = null ; if ( paramFile != null ) { checkInputFile ( "Training Parameter" , new File ( paramFile ) ) ; InputStream paramsIn = null ; try { paramsIn = new FileInputStream ( new File ( paramFile ) ) ; params = new opennlp . tools . util . TrainingParameters ( paramsIn ) ; } catch ( final IOException e ) { throw new TerminateToolException ( - 1 , "Error during parameters loading: " + e . getMessage ( ) , e ) ; } finally { try { if ( paramsIn != null ) { paramsIn . close ( ) ; } } catch ( final IOException e ) { System . err . println ( "Error closing the input stream" ) ; } } if ( ! TrainerFactory . isValid ( params . getSettings ( ) ) ) { throw new TerminateToolException ( 1 , "Training parameters file '" + paramFile + "' is invalid!" ) ; } } return params ;
|
public class NormalM { /** * Sets the mean and covariance for this distribution . For an < i > n < / i > dimensional distribution ,
* < tt > mean < / tt > should be of length < i > n < / i > and < tt > covariance < / tt > should be an < i > n < / i > by < i > n < / i > matrix .
* It is also a requirement that the matrix be symmetric positive definite .
* @ param mean the mean for the distribution . A copy will be used .
* @ param covariance the covariance for this distribution . A copy will be used .
* @ throws ArithmeticException if the < tt > mean < / tt > and < tt > covariance < / tt > do not agree , or the covariance is not
* positive definite . An exception may not be throw for all bad matrices . */
public void setMeanCovariance ( Vec mean , Matrix covariance ) { } }
|
if ( ! covariance . isSquare ( ) ) throw new ArithmeticException ( "Covariance matrix must be square" ) ; else if ( mean . length ( ) != covariance . rows ( ) ) throw new ArithmeticException ( "The mean vector and matrix must have the same dimension," + mean . length ( ) + " does not match [" + covariance . rows ( ) + ", " + covariance . rows ( ) + "]" ) ; // Else , we are good !
this . mean = mean . clone ( ) ; setCovariance ( covariance ) ;
|
public class DescribeEventsResult { /** * The events that match the specified filter criteria .
* @ param events
* The events that match the specified filter criteria . */
public void setEvents ( java . util . Collection < Event > events ) { } }
|
if ( events == null ) { this . events = null ; return ; } this . events = new java . util . ArrayList < Event > ( events ) ;
|
public class UnmodifiableSet { /** * Returns an unmodifiable view of the set created from the given elements .
* @ param elements Array of elements
* @ param < E > type of the elements
* @ return an unmodifiable view of the set created from the given elements . */
@ SafeVarargs public static < E > Set < E > of ( E ... elements ) { } }
|
Set < E > result = new HashSet < > ( ) ; Collections . addAll ( result , elements ) ; return Collections . unmodifiableSet ( result ) ;
|
public class BoundedLocalCache { /** * Removes the mapping for a key without notifying the writer .
* @ param key key whose mapping is to be removed
* @ return the removed value or null if no mapping was found */
@ Nullable V removeNoWriter ( Object key ) { } }
|
Node < K , V > node = data . remove ( nodeFactory . newLookupKey ( key ) ) ; if ( node == null ) { return null ; } V oldValue ; synchronized ( node ) { oldValue = node . getValue ( ) ; if ( node . isAlive ( ) ) { node . retire ( ) ; } } RemovalCause cause ; if ( oldValue == null ) { cause = RemovalCause . COLLECTED ; } else if ( hasExpired ( node , expirationTicker ( ) . read ( ) ) ) { cause = RemovalCause . EXPIRED ; } else { cause = RemovalCause . EXPLICIT ; } if ( hasRemovalListener ( ) ) { @ SuppressWarnings ( "unchecked" ) K castKey = ( K ) key ; notifyRemoval ( castKey , oldValue , cause ) ; } afterWrite ( new RemovalTask ( node ) ) ; return ( cause == RemovalCause . EXPLICIT ) ? oldValue : null ;
|
public class ECSTarget { /** * The < code > ECSTaskSet < / code > objects associated with the ECS target .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setTaskSetsInfo ( java . util . Collection ) } or { @ link # withTaskSetsInfo ( java . util . Collection ) } if you want to
* override the existing values .
* @ param taskSetsInfo
* The < code > ECSTaskSet < / code > objects associated with the ECS target .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ECSTarget withTaskSetsInfo ( ECSTaskSet ... taskSetsInfo ) { } }
|
if ( this . taskSetsInfo == null ) { setTaskSetsInfo ( new com . amazonaws . internal . SdkInternalList < ECSTaskSet > ( taskSetsInfo . length ) ) ; } for ( ECSTaskSet ele : taskSetsInfo ) { this . taskSetsInfo . add ( ele ) ; } return this ;
|
public class MCWrapper { /** * Updates the < code > UOWCoordinator < / code > instance .
* This will be used to retrieve the current UOW and update the instance variable .
* This will be used to re - initialize the uowCoord at a new transaction boundary .
* This will need to be set back to null at the completion of the current UOW .
* @ return UOWCoordinator */
protected UOWCoordinator updateUOWCoordinator ( ) { } }
|
UOWCurrent uowCurrent = ( UOWCurrent ) pm . connectorSvc . transactionManager ; uowCoord = uowCurrent == null ? null : uowCurrent . getUOWCoord ( ) ; return uowCoord ;
|
public class XMLChar { /** * Returns true if the specified character is a BaseChar as defined by production [ 85 ] in the XML
* 1.0 specification .
* @ param ch
* The character to check . */
public static boolean isBaseChar ( int ch ) { } }
|
return ( 0x0041 <= ch && ch <= 0x005A ) || ( 0x0061 <= ch && ch <= 0x007A ) || ( 0x00C0 <= ch && ch <= 0x00D6 ) || ( 0x00D8 <= ch && ch <= 0x00F6 ) || ( 0x00F8 <= ch && ch <= 0x00FF ) || ( 0x0100 <= ch && ch <= 0x0131 ) || ( 0x0134 <= ch && ch <= 0x013E ) || ( 0x0141 <= ch && ch <= 0x0148 ) || ( 0x014A <= ch && ch <= 0x017E ) || ( 0x0180 <= ch && ch <= 0x01C3 ) || ( 0x01CD <= ch && ch <= 0x01F0 ) || ( 0x01F4 <= ch && ch <= 0x01F5 ) || ( 0x01FA <= ch && ch <= 0x0217 ) || ( 0x0250 <= ch && ch <= 0x02A8 ) || ( 0x02BB <= ch && ch <= 0x02C1 ) || 0x0386 == ch || ( 0x0388 <= ch && ch <= 0x038A ) || 0x038C == ch || ( 0x038E <= ch && ch <= 0x03A1 ) || ( 0x03A3 <= ch && ch <= 0x03CE ) || ( 0x03D0 <= ch && ch <= 0x03D6 ) || 0x03DA == ch || 0x03DC == ch || 0x03DE == ch || 0x03E0 == ch || ( 0x03E2 <= ch && ch <= 0x03F3 ) || ( 0x0401 <= ch && ch <= 0x040C ) || ( 0x040E <= ch && ch <= 0x044F ) || ( 0x0451 <= ch && ch <= 0x045C ) || ( 0x045E <= ch && ch <= 0x0481 ) || ( 0x0490 <= ch && ch <= 0x04C4 ) || ( 0x04C7 <= ch && ch <= 0x04C8 ) || ( 0x04CB <= ch && ch <= 0x04CC ) || ( 0x04D0 <= ch && ch <= 0x04EB ) || ( 0x04EE <= ch && ch <= 0x04F5 ) || ( 0x04F8 <= ch && ch <= 0x04F9 ) || ( 0x0531 <= ch && ch <= 0x0556 ) || 0x0559 == ch || ( 0x0561 <= ch && ch <= 0x0586 ) || ( 0x05D0 <= ch && ch <= 0x05EA ) || ( 0x05F0 <= ch && ch <= 0x05F2 ) || ( 0x0621 <= ch && ch <= 0x063A ) || ( 0x0641 <= ch && ch <= 0x064A ) || ( 0x0671 <= ch && ch <= 0x06B7 ) || ( 0x06BA <= ch && ch <= 0x06BE ) || ( 0x06C0 <= ch && ch <= 0x06CE ) || ( 0x06D0 <= ch && ch <= 0x06D3 ) || 0x06D5 == ch || ( 0x06E5 <= ch && ch <= 0x06E6 ) || ( 0x0905 <= ch && ch <= 0x0939 ) || 0x093D == ch || ( 0x0958 <= ch && ch <= 0x0961 ) || ( 0x0985 <= ch && ch <= 0x098C ) || ( 0x098F <= ch && ch <= 0x0990 ) || ( 0x0993 <= ch && ch <= 0x09A8 ) || ( 0x09AA <= ch && ch <= 0x09B0 ) || 0x09B2 == ch || ( 0x09B6 <= ch && ch <= 0x09B9 ) || ( 0x09DC <= ch && ch <= 0x09DD ) || ( 0x09DF <= ch && ch <= 0x09E1 ) || ( 0x09F0 <= ch && ch <= 0x09F1 ) || ( 0x0A05 <= ch && ch <= 0x0A0A ) || ( 0x0A0F <= ch && ch <= 0x0A10 ) || ( 0x0A13 <= ch && ch <= 0x0A28 ) || ( 0x0A2A <= ch && ch <= 0x0A30 ) || ( 0x0A32 <= ch && ch <= 0x0A33 ) || ( 0x0A35 <= ch && ch <= 0x0A36 ) || ( 0x0A38 <= ch && ch <= 0x0A39 ) || ( 0x0A59 <= ch && ch <= 0x0A5C ) || 0x0A5E == ch || ( 0x0A72 <= ch && ch <= 0x0A74 ) || ( 0x0A85 <= ch && ch <= 0x0A8B ) || 0x0A8D == ch || ( 0x0A8F <= ch && ch <= 0x0A91 ) || ( 0x0A93 <= ch && ch <= 0x0AA8 ) || ( 0x0AAA <= ch && ch <= 0x0AB0 ) || ( 0x0AB2 <= ch && ch <= 0x0AB3 ) || ( 0x0AB5 <= ch && ch <= 0x0AB9 ) || 0x0ABD == ch || 0x0AE0 == ch || ( 0x0B05 <= ch && ch <= 0x0B0C ) || ( 0x0B0F <= ch && ch <= 0x0B10 ) || ( 0x0B13 <= ch && ch <= 0x0B28 ) || ( 0x0B2A <= ch && ch <= 0x0B30 ) || ( 0x0B32 <= ch && ch <= 0x0B33 ) || ( 0x0B36 <= ch && ch <= 0x0B39 ) || 0x0B3D == ch || ( 0x0B5C <= ch && ch <= 0x0B5D ) || ( 0x0B5F <= ch && ch <= 0x0B61 ) || ( 0x0B85 <= ch && ch <= 0x0B8A ) || ( 0x0B8E <= ch && ch <= 0x0B90 ) || ( 0x0B92 <= ch && ch <= 0x0B95 ) || ( 0x0B99 <= ch && ch <= 0x0B9A ) || 0x0B9C == ch || ( 0x0B9E <= ch && ch <= 0x0B9F ) || ( 0x0BA3 <= ch && ch <= 0x0BA4 ) || ( 0x0BA8 <= ch && ch <= 0x0BAA ) || ( 0x0BAE <= ch && ch <= 0x0BB5 ) || ( 0x0BB7 <= ch && ch <= 0x0BB9 ) || ( 0x0C05 <= ch && ch <= 0x0C0C ) || ( 0x0C0E <= ch && ch <= 0x0C10 ) || ( 0x0C12 <= ch && ch <= 0x0C28 ) || ( 0x0C2A <= ch && ch <= 0x0C33 ) || ( 0x0C35 <= ch && ch <= 0x0C39 ) || ( 0x0C60 <= ch && ch <= 0x0C61 ) || ( 0x0C85 <= ch && ch <= 0x0C8C ) || ( 0x0C8E <= ch && ch <= 0x0C90 ) || ( 0x0C92 <= ch && ch <= 0x0CA8 ) || ( 0x0CAA <= ch && ch <= 0x0CB3 ) || ( 0x0CB5 <= ch && ch <= 0x0CB9 ) || 0x0CDE == ch || ( 0x0CE0 <= ch && ch <= 0x0CE1 ) || ( 0x0D05 <= ch && ch <= 0x0D0C ) || ( 0x0D0E <= ch && ch <= 0x0D10 ) || ( 0x0D12 <= ch && ch <= 0x0D28 ) || ( 0x0D2A <= ch && ch <= 0x0D39 ) || ( 0x0D60 <= ch && ch <= 0x0D61 ) || ( 0x0E01 <= ch && ch <= 0x0E2E ) || 0x0E30 == ch || ( 0x0E32 <= ch && ch <= 0x0E33 ) || ( 0x0E40 <= ch && ch <= 0x0E45 ) || ( 0x0E81 <= ch && ch <= 0x0E82 ) || 0x0E84 == ch || ( 0x0E87 <= ch && ch <= 0x0E88 ) || 0x0E8A == ch || 0x0E8D == ch || ( 0x0E94 <= ch && ch <= 0x0E97 ) || ( 0x0E99 <= ch && ch <= 0x0E9F ) || ( 0x0EA1 <= ch && ch <= 0x0EA3 ) || 0x0EA5 == ch || 0x0EA7 == ch || ( 0x0EAA <= ch && ch <= 0x0EAB ) || ( 0x0EAD <= ch && ch <= 0x0EAE ) || 0x0EB0 == ch || ( 0x0EB2 <= ch && ch <= 0x0EB3 ) || 0x0EBD == ch || ( 0x0EC0 <= ch && ch <= 0x0EC4 ) || ( 0x0F40 <= ch && ch <= 0x0F47 ) || ( 0x0F49 <= ch && ch <= 0x0F69 ) || ( 0x10A0 <= ch && ch <= 0x10C5 ) || ( 0x10D0 <= ch && ch <= 0x10F6 ) || 0x1100 == ch || ( 0x1102 <= ch && ch <= 0x1103 ) || ( 0x1105 <= ch && ch <= 0x1107 ) || 0x1109 == ch || ( 0x110B <= ch && ch <= 0x110C ) || ( 0x110E <= ch && ch <= 0x1112 ) || 0x113C == ch || 0x113E == ch || 0x1140 == ch || 0x114C == ch || 0x114E == ch || 0x1150 == ch || ( 0x1154 <= ch && ch <= 0x1155 ) || 0x1159 == ch || ( 0x115F <= ch && ch <= 0x1161 ) || 0x1163 == ch || 0x1165 == ch || 0x1167 == ch || 0x1169 == ch || ( 0x116D <= ch && ch <= 0x116E ) || ( 0x1172 <= ch && ch <= 0x1173 ) || 0x1175 == ch || 0x119E == ch || 0x11A8 == ch || 0x11AB == ch || ( 0x11AE <= ch && ch <= 0x11AF ) || ( 0x11B7 <= ch && ch <= 0x11B8 ) || 0x11BA == ch || ( 0x11BC <= ch && ch <= 0x11C2 ) || 0x11EB == ch || 0x11F0 == ch || 0x11F9 == ch || ( 0x1E00 <= ch && ch <= 0x1E9B ) || ( 0x1EA0 <= ch && ch <= 0x1EF9 ) || ( 0x1F00 <= ch && ch <= 0x1F15 ) || ( 0x1F18 <= ch && ch <= 0x1F1D ) || ( 0x1F20 <= ch && ch <= 0x1F45 ) || ( 0x1F48 <= ch && ch <= 0x1F4D ) || ( 0x1F50 <= ch && ch <= 0x1F57 ) || 0x1F59 == ch || 0x1F5B == ch || 0x1F5D == ch || ( 0x1F5F <= ch && ch <= 0x1F7D ) || ( 0x1F80 <= ch && ch <= 0x1FB4 ) || ( 0x1FB6 <= ch && ch <= 0x1FBC ) || 0x1FBE == ch || ( 0x1FC2 <= ch && ch <= 0x1FC4 ) || ( 0x1FC6 <= ch && ch <= 0x1FCC ) || ( 0x1FD0 <= ch && ch <= 0x1FD3 ) || ( 0x1FD6 <= ch && ch <= 0x1FDB ) || ( 0x1FE0 <= ch && ch <= 0x1FEC ) || ( 0x1FF2 <= ch && ch <= 0x1FF4 ) || ( 0x1FF6 <= ch && ch <= 0x1FFC ) || 0x2126 == ch || ( 0x212A <= ch && ch <= 0x212B ) || 0x212E == ch || ( 0x2180 <= ch && ch <= 0x2182 ) || ( 0x3041 <= ch && ch <= 0x3094 ) || ( 0x30A1 <= ch && ch <= 0x30FA ) || ( 0x3105 <= ch && ch <= 0x312C ) || ( 0xAC00 <= ch && ch <= 0xD7A3 ) ;
|
public class InternalPureXbaseParser { /** * InternalPureXbase . g : 779:1 : entryRuleXConditionalExpression returns [ EObject current = null ] : iv _ ruleXConditionalExpression = ruleXConditionalExpression EOF ; */
public final EObject entryRuleXConditionalExpression ( ) throws RecognitionException { } }
|
EObject current = null ; EObject iv_ruleXConditionalExpression = null ; try { // InternalPureXbase . g : 779:63 : ( iv _ ruleXConditionalExpression = ruleXConditionalExpression EOF )
// InternalPureXbase . g : 780:2 : iv _ ruleXConditionalExpression = ruleXConditionalExpression EOF
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXConditionalExpressionRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; iv_ruleXConditionalExpression = ruleXConditionalExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = iv_ruleXConditionalExpression ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return current ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
|
public class AbstractRectangularShape1dfx { /** * Replies the property that is the maximum x coordinate of the box .
* @ return the maxX property . */
@ Pure public DoubleProperty maxXProperty ( ) { } }
|
if ( this . maxX == null ) { this . maxX = new SimpleDoubleProperty ( this , MathFXAttributeNames . MAXIMUM_X ) { @ Override protected void invalidated ( ) { final double currentMax = get ( ) ; final double currentMin = getMinX ( ) ; if ( currentMin > currentMax ) { // min - max constrain is broken
minXProperty ( ) . set ( currentMax ) ; } } } ; } return this . maxX ;
|
public class CmsHtmlList { /** * Sets the list item to display in the list . < p >
* @ param listItems a collection of { @ link CmsListItem } objects */
public void setContent ( Collection < CmsListItem > listItems ) { } }
|
if ( m_metadata . isSelfManaged ( ) ) { m_filteredItems = new ArrayList < CmsListItem > ( listItems ) ; m_originalItems = null ; } else { m_filteredItems = null ; m_originalItems = new ArrayList < CmsListItem > ( listItems ) ; }
|
public class Yarrgs { /** * Parses < code > args < / code > into an instance of < code > argsType < / code > using
* { @ link Parsers # createFieldParserFactory ( ) } . Calls < code > System . exit ( 1 ) < / code > if the user
* supplied bad arguments after printing a reason to < code > System . err < / code > . Thus , this is
* suitable to be called from a < code > main < / code > method that ' s parsing arguments . */
public static < T > T parseInMain ( Class < T > argsType , String [ ] args ) { } }
|
return parseInMain ( argsType , args , Parsers . createFieldParserFactory ( ) ) ;
|
public class OptionalInt { /** * Performs negated filtering on inner value if it is present .
* @ param predicate a predicate function
* @ return this { @ code OptionalInt } if the value is present and doesn ' t matches predicate ,
* otherwise an empty { @ code OptionalInt }
* @ since 1.1.9 */
@ NotNull public OptionalInt filterNot ( @ NotNull IntPredicate predicate ) { } }
|
return filter ( IntPredicate . Util . negate ( predicate ) ) ;
|
public class JMSManager { /** * Allows the caller to send a Message object to a destination */
public void send ( Destination dest , Message msg ) throws MessagingException { } }
|
try { Session s = connection . createSession ( false , Session . AUTO_ACKNOWLEDGE ) ; MessageProducer p = s . createProducer ( dest ) ; p . send ( msg ) ; s . close ( ) ; } catch ( JMSException e ) { throw new MessagingException ( e . getMessage ( ) , e ) ; } if ( logger . isDebugEnabled ( ) ) { logger . debug ( "send() - message sent to destination " + dest ) ; }
|
public class BaseDestinationHandler { /** * Add PubSubLocalisation . */
protected void addPubSubLocalisation ( LocalizationDefinition destinationLocalizationDefinition ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "addPubSubLocalisation" , new Object [ ] { destinationLocalizationDefinition } ) ; _pubSubRealization . addPubSubLocalisation ( destinationLocalizationDefinition ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "addPubSubLocalisation" ) ;
|
public class ScheduledTimer { /** * Waits until the heartbeat is scheduled for execution .
* @ throws InterruptedException if the thread is interrupted while waiting */
public void tick ( ) throws InterruptedException { } }
|
try ( LockResource r = new LockResource ( mLock ) ) { HeartbeatScheduler . addTimer ( this ) ; // Wait in a loop to handle spurious wakeups
while ( ! mScheduled ) { mTickCondition . await ( ) ; } mScheduled = false ; }
|
public class Operators { /** * Entry point for resolving a binary operator given an operator tag and a pair of argument types . */
OperatorSymbol resolveBinary ( DiagnosticPosition pos , JCTree . Tag tag , Type op1 , Type op2 ) { } }
|
return resolve ( tag , binaryOperators , binop -> binop . test ( op1 , op2 ) , binop -> binop . resolve ( op1 , op2 ) , ( ) -> reportErrorIfNeeded ( pos , tag , op1 , op2 ) ) ;
|
public class PatchSchedulesInner { /** * Gets all patch schedules in the specified redis cache ( there is only one ) .
* @ param resourceGroupName The name of the resource group .
* @ param cacheName The name of the Redis cache .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; RedisPatchScheduleInner & gt ; object */
public Observable < Page < RedisPatchScheduleInner > > listByRedisResourceAsync ( final String resourceGroupName , final String cacheName ) { } }
|
return listByRedisResourceWithServiceResponseAsync ( resourceGroupName , cacheName ) . map ( new Func1 < ServiceResponse < Page < RedisPatchScheduleInner > > , Page < RedisPatchScheduleInner > > ( ) { @ Override public Page < RedisPatchScheduleInner > call ( ServiceResponse < Page < RedisPatchScheduleInner > > response ) { return response . body ( ) ; } } ) ;
|
public class Internal { /** * Encodes a long on 1 , 2 , 4 or 8 bytes
* @ param value The value to encode
* @ return A byte array containing the encoded value
* @ since 2.4 */
public static byte [ ] vleEncodeLong ( final long value ) { } }
|
if ( Byte . MIN_VALUE <= value && value <= Byte . MAX_VALUE ) { return new byte [ ] { ( byte ) value } ; } else if ( Short . MIN_VALUE <= value && value <= Short . MAX_VALUE ) { return Bytes . fromShort ( ( short ) value ) ; } else if ( Integer . MIN_VALUE <= value && value <= Integer . MAX_VALUE ) { return Bytes . fromInt ( ( int ) value ) ; } else { return Bytes . fromLong ( value ) ; }
|
public class ClusteredServer { /** * Adds an action to be called with a message to be published to every node in the cluster . */
public Server onpublish ( Action < Map < String , Object > > action ) { } }
|
publishActions . add ( action ) ; return this ;
|
public class QueryRange { /** * - - - - - interface Predicate - - - - - */
@ Override public boolean accept ( final Object t ) { } }
|
final boolean result = count >= start && count < end ; count ++ ; return result ;
|
public class FSEditLog { /** * Add close lease record to edit log . */
public void logCloseFile ( String path , INodeFile newNode ) { } }
|
CloseOp op = CloseOp . getInstance ( ) ; op . set ( newNode . getId ( ) , path , newNode . getReplication ( ) , newNode . getModificationTime ( ) , newNode . getAccessTime ( ) , newNode . getPreferredBlockSize ( ) , newNode . getBlocks ( ) , newNode . getPermissionStatus ( ) , null , null ) ; logEdit ( op ) ;
|
public class FacebookRestClient { /** * Sends a notification email to the specified users , who must have added your application .
* You can send five ( 5 ) emails to a user per day . Requires a session key for desktop applications , which may only
* send email to the person whose session it is . This method does not require a session for Web applications .
* @ param recipientIds up to 100 user ids to which the message is to be sent
* @ param subject the subject of the notification email ( optional )
* @ param text the plain text to send to the specified users via email
* @ return a comma - separated list of the IDs of the users to whom the email was successfully sent
* @ see < a href = " http : / / wiki . developers . facebook . com / index . php / Notifications . sendEmail " >
* Developers Wiki : notifications . sendEmail < / a > */
public String notifications_sendEmailPlain ( Collection < Integer > recipientIds , CharSequence subject , CharSequence text ) throws FacebookException , IOException { } }
|
return notifications_sendEmail ( recipientIds , subject , /* fbml */
null , text ) ;
|
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcSizeSelect ( ) { } }
|
if ( ifcSizeSelectEClass == null ) { ifcSizeSelectEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 970 ) ; } return ifcSizeSelectEClass ;
|
public class FutureContext { /** * Adds new { @ code Future } and { @ code Consumer } to the context of this
* thread . To resolve this future and invoke the result consumer use method
* { @ link resolve ( ) } Use this method to specify maximum { @ code timeout } used
* when obtaining object from { @ code future }
* @ param < T > type of { @ code future } and { @ code consumer }
* @ param future { @ code future } that returns argument of type { @ code < T > }
* used by { @ code consumer }
* @ param consumer { @ code consumer } of object obtained from { @ code future }
* @ param timeout the maximum time to wait
* @ param timeUnit the time unit of the { @ code timeout } argument */
public static < T > void register ( Future < T > future , Consumer < T > consumer , long timeout , TimeUnit timeUnit ) { } }
|
LOGGER . debug ( "Registering new future {} and consumer {} with timeout {} {}" , future , consumer , timeout , timeUnit ) ; getFutureContext ( ) . add ( future , consumer , timeout , timeUnit ) ;
|
public class StdOperator { /** * / * [ deutsch ]
* < p > Liefert einen Operator , der eine beliebige Entit & auml ; t so
* anpasst , da & szlig ; dieses Element auf den angegebenen Wert im
* Standardmodus gesetzt wird . < / p >
* @ param < T > generic type of target entity
* @ param < V > generic element value type
* @ param value new element value
* @ param element associated chronological element
* @ return operator
* @ since 3.5/4.3 */
public static < T extends ChronoEntity < T > , V > ChronoOperator < T > newValue ( V value , ChronoElement < V > element ) { } }
|
return new StdOperator < > ( NEW_VALUE_MODE , element , value ) ;
|
public class RedBlackTreeLong { /** * Remove the node containing the given key and element . The node MUST exists , else the tree won ' t be valid anymore . */
@ Override public void remove ( long key , T element ) { } }
|
if ( first . value == key && ObjectUtil . equalsOrNull ( element , first . element ) ) { removeMin ( ) ; return ; } if ( last . value == key && ObjectUtil . equalsOrNull ( element , last . element ) ) { removeMax ( ) ; return ; } // if both children of root are black , set root to red
if ( ( root . left == null || ! root . left . red ) && ( root . right == null || ! root . right . red ) ) root . red = true ; root = remove ( root , key , element ) ; if ( root != null ) root . red = false ;
|
public class MLSparse { /** * Returns the real part ( PR ) array . PR has length number - of - nonzero - values .
* @ return real part */
public Double [ ] exportReal ( ) { } }
|
Double [ ] ad = new Double [ indexSet . size ( ) ] ; int i = 0 ; for ( IndexMN index : indexSet ) { if ( real . containsKey ( index ) ) { ad [ i ] = real . get ( index ) ; } else { ad [ i ] = 0.0 ; } i ++ ; } return ad ;
|
public class FileUtil { /** * Writes the specified byte array to a file .
* @ param file The < code > File < / code > to write .
* @ param contents The byte array to write to the file .
* @ param createDirectory A value indicating whether the directory
* containing the file to be written should be created if it does
* not exist .
* @ throws IOException If the file could not be written . */
public static void setFileContents ( File file , byte [ ] contents , boolean createDirectory ) throws IOException { } }
|
if ( createDirectory ) { File directory = file . getParentFile ( ) ; if ( ! directory . exists ( ) ) { directory . mkdirs ( ) ; } } FileOutputStream stream = new FileOutputStream ( file ) ; stream . write ( contents ) ; stream . close ( ) ;
|
public class Base64 { /** * Low - level access to decoding ASCII characters in
* the form of a byte array . < strong > Ignores GUNZIP option , if
* it ' s set . < / strong > This is not generally a recommended method ,
* although it is used internally as part of the decoding process .
* Special case : if len = 0 , an empty array is returned . Still ,
* if you need more speed and reduced memory footprint ( and aren ' t
* gzipping ) , consider this method .
* @ param source The Base64 encoded data
* @ return decoded data
* @ since 2.3.1 */
public static byte [ ] decode ( byte [ ] source ) { } }
|
byte [ ] decoded = null ; try { decoded = decode ( source , 0 , source . length , Base64 . NO_OPTIONS ) ; } catch ( java . io . IOException ex ) { assert false : "IOExceptions only come from GZipping, which is turned off: " + ex . getMessage ( ) ; } return decoded ;
|
public class SqlClient { /** * Opens the CLI client for executing SQL statements .
* @ param context session context
* @ param executor executor */
private void openCli ( SessionContext context , Executor executor ) { } }
|
CliClient cli = null ; try { cli = new CliClient ( context , executor ) ; // interactive CLI mode
if ( options . getUpdateStatement ( ) == null ) { cli . open ( ) ; } // execute single update statement
else { final boolean success = cli . submitUpdate ( options . getUpdateStatement ( ) ) ; if ( ! success ) { throw new SqlClientException ( "Could not submit given SQL update statement to cluster." ) ; } } } finally { if ( cli != null ) { cli . close ( ) ; } }
|
public class BufferedISPNCache { /** * Sort changes and commit data to the cache . */
public void commitTransaction ( ) { } }
|
CompressedISPNChangesBuffer changesContainer = getChangesBufferSafe ( ) ; final TransactionManager tm = getTransactionManager ( ) ; try { final List < ChangesContainer > containers = changesContainer . getSortedList ( ) ; commitChanges ( tm , containers ) ; } finally { changesList . set ( null ) ; changesContainer = null ; }
|
public class FileUtils { /** * Returns a human readable size from a large number of bytes . You can specify that the human readable size use an
* abbreviated label ( e . g . , GB or MB ) .
* @ param aByteCount A large number of bytes
* @ param aAbbreviatedLabel Whether the label should be abbreviated
* @ return A human readable size */
public static String sizeFromBytes ( final long aByteCount , final boolean aAbbreviatedLabel ) { } }
|
long count ; if ( ( count = aByteCount / 1073741824 ) > 0 ) { return count + ( aAbbreviatedLabel ? " GB" : " gigabytes" ) ; } else if ( ( count = aByteCount / 1048576 ) > 0 ) { return count + ( aAbbreviatedLabel ? " MB" : " megabytes" ) ; } else if ( ( count = aByteCount / 1024 ) > 0 ) { return count + ( aAbbreviatedLabel ? " KB" : " kilobytes" ) ; } return count + ( aAbbreviatedLabel ? " B" : " bytes" ) ;
|
public class SrvDialog { /** * UTILITIES : */
protected void showAlertDialog ( Activity activity , String msg , String title ) { } }
|
AlertDialog . Builder builder = new Builder ( activity ) ; builder . setMessage ( msg ) . setTitle ( title ) . setPositiveButton ( R . string . ok , new DialogInterface . OnClickListener ( ) { public void onClick ( DialogInterface dialog , int id ) { dialog . cancel ( ) ; } } ) ; builder . show ( ) ;
|
public class RouteUtils { /** * Collects the @ Route annotation on < em > action < / em > method .
* This set will be added at the end of the list retrieved from the { @ link org . wisdom . api
* . Controller # routes ( ) }
* @ param controller the controller
* @ return the list of route , empty if none are available */
public static List < Route > collectRouteFromControllerAnnotations ( Controller controller ) { } }
|
String prefix = getPath ( controller ) ; List < Route > routes = new ArrayList < > ( ) ; Method [ ] methods = controller . getClass ( ) . getMethods ( ) ; for ( Method method : methods ) { org . wisdom . api . annotations . Route annotation = method . getAnnotation ( org . wisdom . api . annotations . Route . class ) ; if ( annotation != null ) { String uri = annotation . uri ( ) ; uri = getPrefixedUri ( prefix , uri ) ; final Route route = new RouteBuilder ( ) . route ( annotation . method ( ) ) . on ( uri ) . to ( controller , method ) . accepting ( annotation . accepts ( ) ) . producing ( annotation . produces ( ) ) ; routes . add ( route ) ; } } return routes ;
|
public class XcapClientImpl { /** * ( non - Javadoc )
* @ see XcapClient # deleteIfMatch ( java . net . URI ,
* java . lang . String , Header [ ] ,
* Credentials ) */
public XcapResponse deleteIfMatch ( URI uri , String eTag , Header [ ] additionalRequestHeaders , Credentials credentials ) throws IOException { } }
|
if ( log . isDebugEnabled ( ) ) { log . debug ( "deleteIfMatch(uri=" + uri + ", eTag=" + eTag + " , additionalRequestHeaders = ( " + Arrays . toString ( additionalRequestHeaders ) + " ) )" ) ; } final HttpDelete request = new HttpDelete ( uri ) ; request . setHeader ( XcapConstant . HEADER_IF_MATCH , eTag ) ; return execute ( request , additionalRequestHeaders , credentials ) ;
|
public class DirectoryExtenderEngine { /** * Szablon może być pojedynczym katalogiem . W takiej sytuacji przetwarzane
* są wszsytkie jego wpisy . */
protected void processVelocityResource ( ITemplateSource source , Map < String , Object > params , String target ) throws Exception { } }
|
for ( ITemplateSourceEntry entry : source . listEntries ( ) ) { // System . out . println ( " Wykryto zasób : " + entry . getName ( ) ) ;
if ( entry . isFile ( ) ) { // System . out . println ( " Element " + entry . getTemplate ( ) + " / " + source . getResource ( ) +
// " / " + entry . getName ( ) + " jest plikiem " ) ;
processVelocityFile ( entry , source . getResource ( ) , params , target ) ; } }
|
public class StopWatch { /** * Start the stop watch .
* @ return { @ link EChange } . */
@ Nonnull public final EChange start ( ) { } }
|
// Already started ?
if ( m_nStartDT > 0 ) return EChange . UNCHANGED ; m_nStartDT = getCurrentNanoTime ( ) ; return EChange . CHANGED ;
|
public class GUIDefaults { /** * returns the value for the specified property , if non - existent then the
* default value .
* @ param property the property to retrieve the value for
* @ param defaultValue the default value for the property
* @ return the value of the specified property */
public static String get ( String property , String defaultValue ) { } }
|
return PROPERTIES . getProperty ( property , defaultValue ) ;
|
public class Node { /** * Detaches child from Node and replaces it with newChild . */
public final void replaceChild ( Node child , Node newChild ) { } }
|
checkArgument ( newChild . next == null , "The new child node has next siblings." ) ; checkArgument ( newChild . previous == null , "The new child node has previous siblings." ) ; checkArgument ( newChild . parent == null , "The new child node already has a parent." ) ; checkState ( child . parent == this , "%s is not the parent of %s" , this , child ) ; // Copy over important information .
newChild . useSourceInfoIfMissingFrom ( child ) ; newChild . parent = this ; Node nextSibling = child . next ; Node prevSibling = child . previous ; Node last = first . previous ; if ( child == prevSibling ) { // first and only child
first = newChild ; first . previous = newChild ; } else { if ( child == first ) { first = newChild ; // prevSibling = = last , and last . next remains null
} else { prevSibling . next = newChild ; } if ( child == last ) { first . previous = newChild ; } else { nextSibling . previous = newChild ; } newChild . previous = prevSibling ; } newChild . next = nextSibling ; // maybe null
child . next = null ; child . previous = null ; child . parent = null ;
|
public class UpdateAssertionForProxyIdpAction { /** * Assigns the AuthenticatingAuthority element holding the issuer of the proxied assertion .
* @ param assertion
* the assertion to update
* @ param proxiedAssertion
* the proxied assertion */
protected void setAuthenticatingAuthority ( Assertion assertion , Assertion proxiedAssertion ) { } }
|
if ( proxiedAssertion . getIssuer ( ) == null || proxiedAssertion . getIssuer ( ) . getValue ( ) == null ) { log . warn ( "No issuer element found in proxied assertion" ) ; return ; } if ( assertion . getAuthnStatements ( ) . isEmpty ( ) ) { log . warn ( "No AuthnStatement available is assertion to update - will not process" ) ; return ; } AuthnStatement authnStatement = assertion . getAuthnStatements ( ) . get ( 0 ) ; if ( authnStatement . getAuthnContext ( ) == null ) { log . warn ( "No AuthnContext found in assertion to update - will not process" ) ; } final XMLObjectBuilderFactory bf = XMLObjectProviderRegistrySupport . getBuilderFactory ( ) ; SAMLObjectBuilder < AuthenticatingAuthority > aaBuilder = ( SAMLObjectBuilder < AuthenticatingAuthority > ) bf . < AuthenticatingAuthority > getBuilderOrThrow ( AuthenticatingAuthority . DEFAULT_ELEMENT_NAME ) ; AuthenticatingAuthority aa = aaBuilder . buildObject ( ) ; aa . setURI ( proxiedAssertion . getIssuer ( ) . getValue ( ) ) ; authnStatement . getAuthnContext ( ) . getAuthenticatingAuthorities ( ) . add ( aa ) ; log . info ( "Updated Assertion with AuthenticatingAuthority ({})" , aa . getURI ( ) ) ;
|
public class UnarchiveFindingsRequest { /** * IDs of the findings that you want to unarchive .
* @ param findingIds
* IDs of the findings that you want to unarchive . */
public void setFindingIds ( java . util . Collection < String > findingIds ) { } }
|
if ( findingIds == null ) { this . findingIds = null ; return ; } this . findingIds = new java . util . ArrayList < String > ( findingIds ) ;
|
public class SpatialAnchorsAccountsInner { /** * Regenerate 1 Key of a Spatial Anchors Account .
* @ param resourceGroupName Name of an Azure resource group .
* @ param spatialAnchorsAccountName Name of an Mixed Reality Spatial Anchors Account .
* @ param serial serial of key to be regenerated
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the SpatialAnchorsAccountKeysInner object */
public Observable < SpatialAnchorsAccountKeysInner > regenerateKeysAsync ( String resourceGroupName , String spatialAnchorsAccountName , Integer serial ) { } }
|
return regenerateKeysWithServiceResponseAsync ( resourceGroupName , spatialAnchorsAccountName , serial ) . map ( new Func1 < ServiceResponse < SpatialAnchorsAccountKeysInner > , SpatialAnchorsAccountKeysInner > ( ) { @ Override public SpatialAnchorsAccountKeysInner call ( ServiceResponse < SpatialAnchorsAccountKeysInner > response ) { return response . body ( ) ; } } ) ;
|
public class ThrowUnchecked { /** * Throws the cause of the given exception , even though it may be
* checked . If the cause is null , then the original exception is
* thrown . This method only returns normally if the exception is null .
* @ param t exception whose cause is to be thrown */
public static void fireCause ( Throwable t ) { } }
|
if ( t != null ) { Throwable cause = t . getCause ( ) ; if ( cause == null ) { cause = t ; } fire ( cause ) ; }
|
public class ProximityTracker { /** * Adds an object to the tracker . */
public void addObject ( int x , int y , Object object ) { } }
|
Record record = new Record ( x , y , object ) ; // if this is the very first element , we have to insert it
// straight away because our binary search algorithm doesn ' t work
// on empty arrays
if ( _size == 0 ) { _records [ _size ++ ] = record ; return ; } // figure out where to insert it
int ipoint = binarySearch ( x ) ; // expand the records array if necessary
if ( _size >= _records . length ) { int nsize = _size * 2 ; Record [ ] records = new Record [ nsize ] ; System . arraycopy ( _records , 0 , records , 0 , _size ) ; _records = records ; } // shift everything down
if ( ipoint < _size ) { System . arraycopy ( _records , ipoint , _records , ipoint + 1 , _size - ipoint ) ; } // insert the record
_records [ ipoint ] = record ; _size ++ ;
|
public class SoundStore { /** * Get the Sound based on a specified AIF file
* @ param ref The reference to the AIF file in the classpath
* @ return The Sound read from the AIF file
* @ throws IOException Indicates a failure to load the AIF */
public Audio getAIF ( String ref ) throws IOException { } }
|
return getAIF ( ref , ResourceLoader . getResourceAsStream ( ref ) ) ;
|
public class ZkMasterInquireClient { /** * Gets the client .
* @ param zookeeperAddress the address for Zookeeper
* @ param electionPath the path of the master election
* @ param leaderPath the path of the leader
* @ param inquireRetryCount the number of times to retry connections
* @ return the client */
public static synchronized ZkMasterInquireClient getClient ( String zookeeperAddress , String electionPath , String leaderPath , int inquireRetryCount ) { } }
|
ZkMasterConnectDetails connectDetails = new ZkMasterConnectDetails ( zookeeperAddress , leaderPath ) ; if ( ! sCreatedClients . containsKey ( connectDetails ) ) { sCreatedClients . put ( connectDetails , new ZkMasterInquireClient ( connectDetails , electionPath , inquireRetryCount ) ) ; } return sCreatedClients . get ( connectDetails ) ;
|
public class InternalOutputStreamManager { /** * This method will be called to force flush of streamSet */
public void forceFlush ( SIBUuid12 streamID ) throws SIErrorException { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "forceFlush" , streamID ) ; StreamSet streamSet = streamSets . get ( streamID ) ; streamSet . dereferenceControlAdapter ( ) ; // Send out a flushed message . If this fails , make sure we get
// to at least invoke the callback .
try { // The Cellule argument is null as it is ignored by our parent handler
// this flush message should be broadcast downstream .
// This will also implicitly remove the streamSet
downControl . sendFlushedMessage ( null , streamID ) ; } catch ( Exception e ) { // FFDC
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.gd.InternalOutputStreamManager.forceFlush" , "1:743:1.48.1.1" , this ) ; // Note that it doesn ' t make much sense to throw an exception here since
// this is a callback from stream array map .
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "forceFlush" , e ) ; return ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "forceFlush" ) ;
|
public class CodeGenBase { /** * This method translates a VDM node into an IR status .
* @ param statuses
* A list of previously generated IR statuses . The generated IR status will be added to this list .
* @ param node
* The VDM node from which we generate an IR status
* @ throws AnalysisException
* If something goes wrong during the construction of the IR status . */
protected void genIrStatus ( List < IRStatus < PIR > > statuses , INode node ) throws AnalysisException { } }
|
IRStatus < PIR > status = generator . generateFrom ( node ) ; if ( status != null ) { statuses . add ( status ) ; }
|
public class Slice { /** * Sets the specified 32 - bit float at the specified absolute
* { @ code index } in this buffer .
* @ throws IndexOutOfBoundsException if the specified { @ code index } is less than { @ code 0 } or
* { @ code index + 4 } is greater than { @ code this . length ( ) } */
public void setFloat ( int index , float value ) { } }
|
checkIndexLength ( index , SizeOf . SIZE_OF_FLOAT ) ; unsafe . putFloat ( base , address + index , value ) ;
|
public class ElementHelper { /** * Parse the value for any property tokens relative to the supplied properties .
* @ param value the value to parse
* @ param props the reference properties
* @ return the normalized string */
private static String normalize ( String value , Properties props ) { } }
|
return PropertyResolver . resolve ( props , value ) ;
|
public class GroovyResultSetExtension { /** * Adds a new row to the result set
* @ param values a map containing the mappings for column names and values
* @ throws java . sql . SQLException if something goes wrong
* @ see ResultSet # insertRow ( )
* @ see ResultSet # updateObject ( java . lang . String , java . lang . Object )
* @ see ResultSet # moveToInsertRow ( ) */
public void add ( Map values ) throws SQLException { } }
|
getResultSet ( ) . moveToInsertRow ( ) ; for ( Iterator iter = values . entrySet ( ) . iterator ( ) ; iter . hasNext ( ) ; ) { Map . Entry entry = ( Map . Entry ) iter . next ( ) ; getResultSet ( ) . updateObject ( entry . getKey ( ) . toString ( ) , entry . getValue ( ) ) ; } getResultSet ( ) . insertRow ( ) ;
|
public class SelectBuilderUtility { /** * Detect result type .
* @ param method
* @ return
* @ throws ClassNotFoundException */
public static TypeName extractReturnType ( final SQLiteModelMethod method ) { } }
|
SQLiteEntity daoEntity = method . getParent ( ) . getEntity ( ) ; // if true , field must be associate to ben attributes
TypeName returnTypeName = method . getReturnClass ( ) ; TypeName result = null ; if ( TypeUtility . isTypeIncludedIn ( returnTypeName , Void . class , Void . TYPE ) ) { // return VOID ( in the parameters must be a listener )
// case OnReadBeanListener
Pair < String , TypeName > foundElement = SqlBuilderHelper . searchInEachParameter ( method , new SqlBuilderHelper . OnParameterListener ( ) { @ Override public boolean onParameter ( Pair < String , TypeName > item ) { return ( item . value1 instanceof ParameterizedTypeName && TypeUtility . isEquals ( ( ( ParameterizedTypeName ) item . value1 ) . rawType , OnReadBeanListener . class . getName ( ) ) ) ; } } ) ; if ( foundElement != null ) { result = ( ( ParameterizedTypeName ) foundElement . value1 ) . typeArguments . get ( 0 ) ; } } else if ( TypeUtility . isTypeIncludedIn ( returnTypeName , Cursor . class ) ) { // return Cursor ( no listener )
result = null ; } else if ( returnTypeName instanceof ParameterizedTypeName ) { ParameterizedTypeName returnParameterizedTypeName = ( ParameterizedTypeName ) returnTypeName ; ClassName returnParameterizedClassName = returnParameterizedTypeName . rawType ; // return List ( no listener )
AssertKripton . assertTrueOrInvalidMethodSignException ( returnParameterizedTypeName . typeArguments . size ( ) == 1 , method , "return type %s is not supported" , returnTypeName ) ; TypeName elementName = returnParameterizedTypeName . typeArguments . get ( 0 ) ; Class < ? > wrapperClazz = null ; try { wrapperClazz = Class . forName ( returnParameterizedClassName . toString ( ) ) ; } catch ( ClassNotFoundException e ) { e . printStackTrace ( ) ; throw ( new KriptonClassNotFoundException ( e ) ) ; } if ( PagedResultImpl . class . isAssignableFrom ( wrapperClazz ) || Collection . class . isAssignableFrom ( wrapperClazz ) ) { if ( SQLTransformer . isSupportedJDKType ( elementName ) || TypeUtility . isByteArray ( elementName ) ) { // scalar list
result = null ; } else { result = elementName ; } } else { result = null ; } } else if ( TypeUtility . isEquals ( returnTypeName , daoEntity ) ) { // return one element ( no listener )
result = null ; } else if ( SQLTransformer . isSupportedJDKType ( returnTypeName ) || TypeUtility . isByteArray ( returnTypeName ) ) { // return single value string , int , long , short , double , float ,
// String ( no listener )
result = null ; } else { result = returnTypeName ; } if ( result == null || TypeUtility . isEquals ( result , daoEntity ) ) { return null ; } else { return result ; }
|
public class Utils { /** * CollectionのインスタンスをListに変換する 。
* @ since 1.0
* @ param collection 変換元のCollectionのインスタンス 。
* @ return 変換したListのインスタンス 。 */
public static < T > List < T > convertCollectionToList ( final Collection < T > collection ) { } }
|
if ( List . class . isAssignableFrom ( collection . getClass ( ) ) ) { return ( List < T > ) collection ; } return new ArrayList < > ( collection ) ;
|
public class HeaderHandlingDispatcherPortlet { /** * Used by the render method to set the response properties and headers .
* < p > The portlet should override this method and set its response header using this method in
* order to ensure that they are set before anything is written to the output stream .
* @ param request the render request
* @ param response the render response */
@ Override protected void doHeaders ( RenderRequest request , RenderResponse response ) { } }
|
try { doDispatch ( request , response ) ; } catch ( IOException | PortletException ex ) { logger . error ( "Exception rendering headers for portlet " + getPortletName ( ) + ". Aborting doHeaders" , ex ) ; }
|
public class AsyncOperationService { /** * Wrap getOperationStatus to avoid throwing exception over JMX */
@ JmxOperation ( description = "Retrieve operation status" ) public String getStatus ( int id ) { } }
|
try { return getOperationStatus ( id ) . toString ( ) ; } catch ( VoldemortException e ) { return "No operation with id " + id + " found" ; }
|
public class TraceStmBuilder { /** * Assumes dialect is VDM - SL . This method does not work with store lookups for local variables and since code
* generated VDM - SL traces do not rely on this then it is safe to use this method for this dialect .
* @ param callStm
* the call statement for which we want to replace the arguments with variables
* @ return the variable declarations corresponding to the variables that replace the arguments */
protected List < AVarDeclIR > replaceArgsWithVars ( SStmIR callStm ) { } }
|
List < AVarDeclIR > decls = new LinkedList < AVarDeclIR > ( ) ; if ( Settings . dialect != Dialect . VDM_SL ) { return decls ; } List < SExpIR > args = null ; if ( callStm instanceof SCallStmIR ) { args = ( ( SCallStmIR ) callStm ) . getArgs ( ) ; } else if ( callStm instanceof ACallObjectExpStmIR ) { args = ( ( ACallObjectExpStmIR ) callStm ) . getArgs ( ) ; } else { log . error ( "Expected a call statement or call object statement. Got: " + callStm ) ; return decls ; } for ( SExpIR arg : args ) { String argName = getInfo ( ) . getTempVarNameGen ( ) . nextVarName ( traceTrans . getTracePrefixes ( ) . callStmArgNamePrefix ( ) ) ; STypeIR type = arg . getType ( ) ; AVarDeclIR argDecl = getTransAssist ( ) . consDecl ( argName , type . clone ( ) , arg . clone ( ) ) ; argDecl . setFinal ( true ) ; decls . add ( argDecl ) ; getTransAssist ( ) . replaceNodeWith ( arg , getInfo ( ) . getExpAssistant ( ) . consIdVar ( argName , type . clone ( ) ) ) ; } return decls ;
|
public class AmazonElastiCacheClient { /** * Creates a new Amazon ElastiCache cache parameter group . An ElastiCache cache parameter group is a collection of
* parameters and their values that are applied to all of the nodes in any cluster or replication group using the
* CacheParameterGroup .
* A newly created CacheParameterGroup is an exact duplicate of the default parameter group for the
* CacheParameterGroupFamily . To customize the newly created CacheParameterGroup you can change the values of
* specific parameters . For more information , see :
* < ul >
* < li >
* < a href = " http : / / docs . aws . amazon . com / AmazonElastiCache / latest / APIReference / API _ ModifyCacheParameterGroup . html " >
* ModifyCacheParameterGroup < / a > in the ElastiCache API Reference .
* < / li >
* < li >
* < a href = " http : / / docs . aws . amazon . com / AmazonElastiCache / latest / red - ug / ParameterGroups . html " > Parameters and
* Parameter Groups < / a > in the ElastiCache User Guide .
* < / li >
* < / ul >
* @ param createCacheParameterGroupRequest
* Represents the input of a < code > CreateCacheParameterGroup < / code > operation .
* @ return Result of the CreateCacheParameterGroup operation returned by the service .
* @ throws CacheParameterGroupQuotaExceededException
* The request cannot be processed because it would exceed the maximum number of cache security groups .
* @ throws CacheParameterGroupAlreadyExistsException
* A cache parameter group with the requested name already exists .
* @ throws InvalidCacheParameterGroupStateException
* The current state of the cache parameter group does not allow the requested operation to occur .
* @ throws InvalidParameterValueException
* The value for a parameter is invalid .
* @ throws InvalidParameterCombinationException
* Two or more incompatible parameters were specified .
* @ sample AmazonElastiCache . CreateCacheParameterGroup
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticache - 2015-02-02 / CreateCacheParameterGroup "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public CacheParameterGroup createCacheParameterGroup ( CreateCacheParameterGroupRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeCreateCacheParameterGroup ( request ) ;
|
public class AutoTGT { /** * Hadoop does not just go off of a TGT , it needs a bit more . This should fill in the rest .
* @ param subject the subject that should have a TGT in it . */
private void loginHadoopUser ( Subject subject ) { } }
|
Class < ? > ugi = null ; try { ugi = Class . forName ( "org.apache.hadoop.security.UserGroupInformation" ) ; } catch ( ClassNotFoundException e ) { LOG . info ( "Hadoop was not found on the class path" ) ; return ; } try { Method isSecEnabled = ugi . getMethod ( "isSecurityEnabled" ) ; if ( ! ( ( Boolean ) isSecEnabled . invoke ( null ) ) ) { LOG . warn ( "Hadoop is on the classpath but not configured for " + "security, if you want security you need to be sure that " + "hadoop.security.authentication=kerberos in core-site.xml " + "in your jar" ) ; return ; } try { Method login = ugi . getMethod ( "loginUserFromSubject" , Subject . class ) ; login . invoke ( null , subject ) ; } catch ( NoSuchMethodException me ) { // The version of Hadoop does not have the needed client changes .
// So don ' t look now , but do something really ugly to work around it .
// This is because we are reaching into the hidden bits of Hadoop security , and it works for now , but may stop at any point in time .
// We are just trying to do the following
// Configuration conf = new Configuration ( ) ;
// HadoopKerberosName . setConfiguration ( conf ) ;
// subject . getPrincipals ( ) . add ( new User ( tgt . getClient ( ) . toString ( ) , AuthenticationMethod . KERBEROS , null ) ) ;
String name = getTGT ( subject ) . getClient ( ) . toString ( ) ; LOG . warn ( "The Hadoop client does not have loginUserFromSubject, Trying to hack around it. This may not work..." ) ; Class < ? > confClass = Class . forName ( "org.apache.hadoop.conf.Configuration" ) ; Constructor confCons = confClass . getConstructor ( ) ; Object conf = confCons . newInstance ( ) ; Class < ? > hknClass = Class . forName ( "org.apache.hadoop.security.HadoopKerberosName" ) ; Method hknSetConf = hknClass . getMethod ( "setConfiguration" , confClass ) ; hknSetConf . invoke ( null , conf ) ; Class < ? > authMethodClass = Class . forName ( "org.apache.hadoop.security.UserGroupInformation$AuthenticationMethod" ) ; Object kerbAuthMethod = null ; for ( Object authMethod : authMethodClass . getEnumConstants ( ) ) { if ( "KERBEROS" . equals ( authMethod . toString ( ) ) ) { kerbAuthMethod = authMethod ; break ; } } Class < ? > userClass = Class . forName ( "org.apache.hadoop.security.User" ) ; Constructor userCons = userClass . getConstructor ( String . class , authMethodClass , LoginContext . class ) ; userCons . setAccessible ( true ) ; Object user = userCons . newInstance ( name , kerbAuthMethod , null ) ; subject . getPrincipals ( ) . add ( ( Principal ) user ) ; } } catch ( Exception e ) { LOG . warn ( "Something went wrong while trying to initialize Hadoop through reflection. This version of hadoop may not be compatible." , e ) ; }
|
public class PathUtil { /** * Returns the file extension part of the specified path
* @ param path the filname path of the file
* @ return the file extension */
public static String getExtension ( String path ) { } }
|
int idx = path . lastIndexOf ( "/" ) ; // $ NON - NLS - 1 $
String filename = idx == - 1 ? path : path . substring ( idx + 1 ) ; idx = filename . lastIndexOf ( "." ) ; // $ NON - NLS - 1 $
return idx == - 1 ? "" : filename . substring ( idx + 1 ) ; // $ NON - NLS - 1 $
|
public class ClasspathReader { /** * Uses the java . class . path system property to obtain a list of URLs that
* represent the CLASSPATH
* @ return the URl [ ] */
@ SuppressWarnings ( "deprecation" ) @ Override public final URL [ ] findResourcesByClasspath ( ) { } }
|
List < URL > list = new ArrayList < URL > ( ) ; String classpath = System . getProperty ( "java.class.path" ) ; StringTokenizer tokenizer = new StringTokenizer ( classpath , File . pathSeparator ) ; while ( tokenizer . hasMoreTokens ( ) ) { String path = tokenizer . nextToken ( ) ; File fp = new File ( path ) ; if ( ! fp . exists ( ) ) throw new ResourceReadingException ( "File in java.class.path does not exist: " + fp ) ; try { list . add ( fp . toURL ( ) ) ; } catch ( MalformedURLException e ) { throw new ResourceReadingException ( e ) ; } } return list . toArray ( new URL [ list . size ( ) ] ) ;
|
public class ExpandableButtonMenu { /** * Set image drawable for a menu button
* @ param button
* @ param drawable */
public void setMenuButtonImage ( MenuButton button , Drawable drawable ) { } }
|
switch ( button ) { case MID : mMidBtn . setImageDrawable ( drawable ) ; break ; case LEFT : mLeftBtn . setImageDrawable ( drawable ) ; break ; case RIGHT : mRightBtn . setImageDrawable ( drawable ) ; break ; }
|
public class VpcConfigMarshaller { /** * Marshall the given parameter object . */
public void marshall ( VpcConfig vpcConfig , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( vpcConfig == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( vpcConfig . getVpcId ( ) , VPCID_BINDING ) ; protocolMarshaller . marshall ( vpcConfig . getSubnets ( ) , SUBNETS_BINDING ) ; protocolMarshaller . marshall ( vpcConfig . getSecurityGroupIds ( ) , SECURITYGROUPIDS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class BsJobLogCA { public void filter ( String name , EsAbstractConditionQuery . OperatorCall < BsJobLogCQ > queryLambda , ConditionOptionCall < FilterAggregationBuilder > opLambda , OperatorCall < BsJobLogCA > aggsLambda ) { } }
|
JobLogCQ cq = new JobLogCQ ( ) ; if ( queryLambda != null ) { queryLambda . callback ( cq ) ; } FilterAggregationBuilder builder = regFilterA ( name , cq . getQuery ( ) ) ; if ( opLambda != null ) { opLambda . callback ( builder ) ; } if ( aggsLambda != null ) { JobLogCA ca = new JobLogCA ( ) ; aggsLambda . callback ( ca ) ; ca . getAggregationBuilderList ( ) . forEach ( builder :: subAggregation ) ; }
|
public class StabilitySquareFiducialEstimate { /** * Processes the observation and generates a stability estimate
* @ param sampleRadius Radius around the corner pixels it will sample
* @ param input Observed corner location of the fiducial in distorted pixels . Must be in correct order .
* @ return true if successful or false if it failed */
public boolean process ( double sampleRadius , Quadrilateral_F64 input ) { } }
|
work . set ( input ) ; samples . reset ( ) ; estimator . process ( work , false ) ; estimator . getWorldToCamera ( ) . invert ( referenceCameraToWorld ) ; samples . reset ( ) ; createSamples ( sampleRadius , work . a , input . a ) ; createSamples ( sampleRadius , work . b , input . b ) ; createSamples ( sampleRadius , work . c , input . c ) ; createSamples ( sampleRadius , work . d , input . d ) ; if ( samples . size ( ) < 10 ) return false ; maxLocation = 0 ; maxOrientation = 0 ; for ( int i = 0 ; i < samples . size ( ) ; i ++ ) { referenceCameraToWorld . concat ( samples . get ( i ) , difference ) ; ConvertRotation3D_F64 . matrixToRodrigues ( difference . getR ( ) , rodrigues ) ; double theta = Math . abs ( rodrigues . theta ) ; double d = difference . getT ( ) . norm ( ) ; if ( theta > maxOrientation ) { maxOrientation = theta ; } if ( d > maxLocation ) { maxLocation = d ; } } return true ;
|
public class PropertyReaderHelper { /** * Lookup in { @ link Environment } a certain property or a list of properties .
* @ param env
* the { @ link Environment } context from which to
* @ param propName
* the name of the property to lookup from { @ link Environment } .
* @ return the list */
public static List < String > get ( final Environment env , final String propName ) { } }
|
final List < String > list = new ArrayList < > ( ) ; final String singleProp = env . getProperty ( propName ) ; if ( singleProp != null ) { list . add ( singleProp ) ; return list ; } int counter = 0 ; String prop = env . getProperty ( propName + "[" + counter + "]" ) ; while ( prop != null ) { list . add ( prop ) ; counter ++ ; prop = env . getProperty ( propName + "[" + counter + "]" ) ; } return list ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.