signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class HivePurgerQueryTemplate { /** * This method builds the where clause for the insertion query .
* If prefix is a , then it builds a . datepartition = ' 2016-01-01-00 ' AND a . size = ' 12345 ' from [ datepartition : ' 2016-01-01-00 ' , size : ' 12345 ' ] */
public static String getWhereClauseForPartition ( Map < String , String > spec , String prefix ) { } } | StringBuilder sb = new StringBuilder ( ) ; for ( Map . Entry < String , String > entry : spec . entrySet ( ) ) { if ( ! sb . toString ( ) . isEmpty ( ) ) { sb . append ( " AND " ) ; } sb . append ( prefix + entry . getKey ( ) ) ; sb . append ( "=" ) ; sb . append ( PartitionUtils . getQuotedString ( entry . getValue ( ) ) ) ; } return sb . toString ( ) ; |
public class CmsSitemapController { /** * Sets the editor mode in the user session . < p >
* @ param editorMode the editor mode */
public void setEditorModeInSession ( final EditorMode editorMode ) { } } | CmsRpcAction < Void > action = new CmsRpcAction < Void > ( ) { @ Override public void execute ( ) { getService ( ) . setEditorMode ( editorMode , this ) ; } @ Override protected void onResponse ( Void result ) { // nothing to do
} } ; action . execute ( ) ; |
public class ManagementClientImpl { /** * / * ( non - Javadoc )
* @ see tuwien . auto . calimero . mgmt . ManagementClient # writeMemory
* ( tuwien . auto . calimero . mgmt . Destination , int , byte [ ] ) */
public void writeMemory ( Destination dst , int startAddr , byte [ ] data ) throws KNXDisconnectException , KNXTimeoutException , KNXRemoteException , KNXLinkClosedException { } } | if ( startAddr < 0 || startAddr > 0xFFFF || data . length == 0 || data . length > 63 ) throw new KNXIllegalArgumentException ( "argument value out of range" ) ; final byte [ ] asdu = new byte [ data . length + 3 ] ; asdu [ 0 ] = ( byte ) data . length ; asdu [ 1 ] = ( byte ) ( startAddr >> 8 ) ; asdu [ 2 ] = ( byte ) startAddr ; for ( int i = 0 ; i < data . length ; ++ i ) asdu [ 3 + i ] = data [ i ] ; if ( dst . isConnectionOriented ( ) ) tl . connect ( dst ) ; else logger . error ( "doing write memory in connectionless mode, " + dst . toString ( ) ) ; final byte [ ] send = DataUnitBuilder . createCompactAPDU ( MEMORY_WRITE , asdu ) ; if ( dst . isVerifyMode ( ) ) { // explicitly read back data
final byte [ ] apdu = sendWait ( dst , priority , send , MEMORY_RESPONSE , 2 , 65 ) ; if ( ( apdu [ 1 ] & 0x3f ) == 0 ) throw new KNXRemoteException ( "remote app. could not write memory" ) ; if ( apdu . length - 4 != data . length ) throw new KNXInvalidResponseException ( "number of memory bytes differ" ) ; for ( int i = 4 ; i < apdu . length ; ++ i ) if ( apdu [ i ] != asdu [ i - 1 ] ) throw new KNXRemoteException ( "verify failed (erroneous memory data)" ) ; } else tl . sendData ( dst , priority , send ) ; |
public class EventReader { /** * Retrieve S3 object URL from source then downloads the object processes each event through
* call back functions .
* @ param source { @ link CloudTrailSource } to process . */
public void processSource ( CloudTrailSource source ) { } } | boolean filterSourceOut = false ; boolean downloadLogSuccess = true ; boolean processSourceSuccess = false ; ProgressStatus processSourceStatus = new ProgressStatus ( ProgressState . processSource , new BasicProcessSourceInfo ( source , processSourceSuccess ) ) ; final Object processSourceReportObject = progressReporter . reportStart ( processSourceStatus ) ; // Start to process the source
try { // Apply source filter first . If source filtered out then delete source immediately and return .
if ( ! sourceFilter . filterSource ( source ) ) { logger . debug ( "AWSCloudTrailSource " + source + " has been filtered out." ) ; processSourceSuccess = true ; filterSourceOut = true ; } else { int nLogFilesToProcess = ( ( SQSBasedSource ) source ) . getLogs ( ) . size ( ) ; for ( CloudTrailLog ctLog : ( ( SQSBasedSource ) source ) . getLogs ( ) ) { // start to process the log
boolean processLogSuccess = false ; ProgressStatus processLogStatus = new ProgressStatus ( ProgressState . processLog , new BasicProcessLogInfo ( source , ctLog , processLogSuccess ) ) ; final Object processLogReportObject = progressReporter . reportStart ( processLogStatus ) ; try { byte [ ] s3ObjectBytes = s3Manager . downloadLog ( ctLog , source ) ; if ( s3ObjectBytes == null ) { downloadLogSuccess = false ; continue ; // Failure downloading log file . Skip it .
} try ( GZIPInputStream gzippedInputStream = new GZIPInputStream ( new ByteArrayInputStream ( s3ObjectBytes ) ) ; EventSerializer serializer = getEventSerializer ( gzippedInputStream , ctLog ) ) { emitEvents ( serializer ) ; // decrement this value upon successfully processed a log
nLogFilesToProcess -- ; processLogSuccess = true ; } catch ( IllegalArgumentException | IOException e ) { LibraryUtils . handleException ( exceptionHandler , processLogStatus , e , "Failed to parse log file." ) ; } } finally { // end to process the log
LibraryUtils . endToProcess ( progressReporter , processLogSuccess , processLogStatus , processLogReportObject ) ; } } if ( nLogFilesToProcess == 0 ) { processSourceSuccess = true ; } } } catch ( CallbackException ex ) { exceptionHandler . handleException ( ex ) ; } finally { cleanupMessage ( filterSourceOut , downloadLogSuccess , processSourceSuccess , source ) ; // end to process the source
LibraryUtils . endToProcess ( progressReporter , processSourceSuccess , processSourceStatus , processSourceReportObject ) ; } |
public class ToStringStyle { /** * Unregisters the given object .
* Used by the reflection methods to avoid infinite loops .
* @ param value The object to unregister . */
static void unregister ( Object value ) { } } | if ( value != null ) { Map < Object , Object > m = getRegistry ( ) ; if ( m != null ) { m . remove ( value ) ; if ( m . isEmpty ( ) ) { REGISTRY . remove ( ) ; } } } |
public class AbstractFilteredDataStream { /** * Add many filters .
* @ param newFilters The filters to add . */
public final void addFilters ( final List < IDataFilter > newFilters ) { } } | assertFilterListExists ( ) ; for ( IDataFilter filter : newFilters ) { addFilter ( filter ) ; } |
public class ModbusSerialTransport { /** * Reads the specified number of bytes from the input stream
* @ param buffer Buffer to put data into
* @ param bytesToRead Number of bytes to read
* @ throws IOException If the port is invalid or if the number of bytes returned is not equal to that asked for */
void readBytes ( byte [ ] buffer , long bytesToRead ) throws IOException { } } | if ( commPort != null && commPort . isOpen ( ) ) { int cnt = commPort . readBytes ( buffer , bytesToRead ) ; if ( cnt != bytesToRead ) { throw new IOException ( "Cannot read from serial port - truncated" ) ; } } else { throw new IOException ( "Comm port is not valid or not open" ) ; } |
public class CmsCategoryWidget { /** * Generates the right height for the view . < p > */
protected void setHeight ( ) { } } | if ( m_categoryField . getValuesSet ( ) > 0 ) { m_height = ( m_categoryField . getValuesSet ( ) * 26 ) + 4 ; if ( m_height > MAX_HEIGHT ) { m_height = MAX_HEIGHT ; m_categoryField . getScrollPanel ( ) . setResizable ( true ) ; } else { m_categoryField . getScrollPanel ( ) . setResizable ( false ) ; } } else { m_height = DEFAULT_HEIGHT ; m_categoryField . getScrollPanel ( ) . setResizable ( false ) ; } m_categoryField . setHeight ( m_height ) ; |
public class Wizard { /** * Same as { @ link # open ( String ) } with the ability to pass a different width and height . */
public void open ( String title , int width , int height ) { } } | assertSteps ( ) ; if ( window == null ) { window = new DefaultWindow ( title ) ; window . setWidth ( width ) ; window . setHeight ( height ) ; window . trapWidget ( asWidget ( ) ) ; window . setGlassEnabled ( true ) ; window . addCloseHandler ( closeEvent -> { // calls the cancel ( ) to remove the datasource as the user wants to cancel the ADD operation .
onCancel ( ) ; } ) ; } else { window . setTitle ( title ) ; } resetContext ( ) ; for ( WizardStep < C , S > step : steps . values ( ) ) { step . reset ( context ) ; } state = initialState ( ) ; pushState ( state ) ; window . center ( ) ; |
public class AnnotationMetadataWriter { /** * Writes annotation attributes to the given generator .
* @ param declaringClassWriter The declaring class
* @ param generatorAdapter The generator adapter
* @ param annotationData The annotation data
* @ param loadTypeMethods Generated methods that load types */
@ Internal private static void pushAnnotationAttributes ( Type declaringType , ClassVisitor declaringClassWriter , GeneratorAdapter generatorAdapter , Map < ? extends CharSequence , Object > annotationData , Map < String , GeneratorAdapter > loadTypeMethods ) { } } | int totalSize = annotationData . size ( ) * 2 ; // start a new array
pushNewArray ( generatorAdapter , Object . class , totalSize ) ; int i = 0 ; for ( Map . Entry < ? extends CharSequence , Object > entry : annotationData . entrySet ( ) ) { // use the property name as the key
String memberName = entry . getKey ( ) . toString ( ) ; pushStoreStringInArray ( generatorAdapter , i ++ , totalSize , memberName ) ; // use the property type as the value
Object value = entry . getValue ( ) ; pushStoreInArray ( generatorAdapter , i ++ , totalSize , ( ) -> pushValue ( declaringType , declaringClassWriter , generatorAdapter , value , loadTypeMethods ) ) ; } // invoke the AbstractBeanDefinition . createMap method
generatorAdapter . invokeStatic ( Type . getType ( AnnotationUtil . class ) , METHOD_MAP_OF ) ; |
public class GPixelMath { /** * Performs pixel - wise addition < br >
* d ( x , y ) = inputA ( x , y ) + inputB ( x , y )
* @ param inputA Input image . Not modified .
* @ param inputB Input image . Not modified .
* @ param output Output image . Modified . */
public static < T extends ImageBase < T > , O extends ImageBase > void add ( T inputA , T inputB , O output ) { } } | if ( inputA instanceof ImageGray ) { if ( GrayU8 . class == inputA . getClass ( ) ) { PixelMath . add ( ( GrayU8 ) inputA , ( GrayU8 ) inputB , ( GrayU16 ) output ) ; } else if ( GrayS8 . class == inputA . getClass ( ) ) { PixelMath . add ( ( GrayS8 ) inputA , ( GrayS8 ) inputB , ( GrayS16 ) output ) ; } else if ( GrayU16 . class == inputA . getClass ( ) ) { PixelMath . add ( ( GrayU16 ) inputA , ( GrayU16 ) inputB , ( GrayS32 ) output ) ; } else if ( GrayS16 . class == inputA . getClass ( ) ) { PixelMath . add ( ( GrayS16 ) inputA , ( GrayS16 ) inputB , ( GrayS32 ) output ) ; } else if ( GrayS32 . class == inputA . getClass ( ) ) { PixelMath . add ( ( GrayS32 ) inputA , ( GrayS32 ) inputB , ( GrayS32 ) output ) ; } else if ( GrayS64 . class == inputA . getClass ( ) ) { PixelMath . add ( ( GrayS64 ) inputA , ( GrayS64 ) inputB , ( GrayS64 ) output ) ; } else if ( GrayF32 . class == inputA . getClass ( ) ) { PixelMath . add ( ( GrayF32 ) inputA , ( GrayF32 ) inputB , ( GrayF32 ) output ) ; } else if ( GrayF64 . class == inputA . getClass ( ) ) { PixelMath . add ( ( GrayF64 ) inputA , ( GrayF64 ) inputB , ( GrayF64 ) output ) ; } else { throw new IllegalArgumentException ( "Unknown image Type: " + inputA . getClass ( ) . getSimpleName ( ) ) ; } } else if ( inputA instanceof Planar ) { Planar inA = ( Planar ) inputA ; Planar inB = ( Planar ) inputB ; Planar out = ( Planar ) output ; for ( int i = 0 ; i < inA . getNumBands ( ) ; i ++ ) { add ( inA . getBand ( i ) , inB . getBand ( i ) , out . getBand ( i ) ) ; } } |
public class AuthorInfo { /** * setter for lastName - sets The last name of the author .
* @ generated
* @ param v value to set into the feature */
public void setLastName ( String v ) { } } | if ( AuthorInfo_Type . featOkTst && ( ( AuthorInfo_Type ) jcasType ) . casFeat_lastName == null ) jcasType . jcas . throwFeatMissing ( "lastName" , "de.julielab.jules.types.AuthorInfo" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( AuthorInfo_Type ) jcasType ) . casFeatCode_lastName , v ) ; |
public class MultiImplementationSender { /** * Register a new possible implementation with the associated condition . The
* implementation is added at the end so any other possible implementation
* will be used before this one if the associated condition allow it .
* @ param condition
* the condition that indicates if the implementation can be used
* at runtime
* @ param implementation
* the implementation to register
* @ return this instance for fluent chaining */
public final MultiImplementationSender < M > addImplementation ( Condition < Message > condition , MessageSender implementation ) { } } | implementations . add ( new Implementation ( condition , implementation ) ) ; return this ; |
public class MyfacesLogger { /** * Log a SEVERE message , with an array of object arguments .
* The message is forwarded to appropriate Java Logger objects .
* @ param sourceClass the name of the class that issued the logging request
* @ param sourceMethod the name of the method that issued the logging request
* @ param msg the string message ( or a key in the resource bundle )
* @ param params an array of parameters to the message */
public void severe ( String sourceClass , String sourceMethod , String msg , Object [ ] params ) { } } | logp ( Level . SEVERE , sourceClass , sourceMethod , msg , params ) ; |
public class Main { /** * This function flips the case of all characters in a provided string .
* @ param inputStr A string containing alphabetic characters .
* @ return A string where the case of every character is switched .
* Examples :
* invertCase ( ' Python ' )
* ' pYTHON '
* invertCase ( ' Pangram ' )
* ' pANGRAM '
* invertCase ( ' LIttLE ' )
* ' liTTle ' */
public static String invertCase ( String inputStr ) { } } | String result = "" ; for ( int i = 0 ; i < inputStr . length ( ) ; i ++ ) { char currentChar = inputStr . charAt ( i ) ; if ( Character . isUpperCase ( currentChar ) ) { result = result + Character . toLowerCase ( currentChar ) ; } else { result = result + Character . toUpperCase ( currentChar ) ; } } return result ; |
public class InterconnectLocationClient { /** * Retrieves the list of interconnect locations available to the specified project .
* < p > Sample code :
* < pre > < code >
* try ( InterconnectLocationClient interconnectLocationClient = InterconnectLocationClient . create ( ) ) {
* ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ;
* for ( InterconnectLocation element : interconnectLocationClient . listInterconnectLocations ( project ) . iterateAll ( ) ) {
* / / doThingsWith ( element ) ;
* < / code > < / pre >
* @ param project Project ID for this request .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final ListInterconnectLocationsPagedResponse listInterconnectLocations ( ProjectName project ) { } } | ListInterconnectLocationsHttpRequest request = ListInterconnectLocationsHttpRequest . newBuilder ( ) . setProject ( project == null ? null : project . toString ( ) ) . build ( ) ; return listInterconnectLocations ( request ) ; |
public class JsMessageVisitor { /** * Initializes the meta data in a message builder given a node that may
* contain JsDoc properties .
* @ param builder the message builder whose meta data will be initialized
* @ param node the node with the message ' s JSDoc properties
* @ return true if message has JsDoc with valid description in @ desc
* annotation */
private static boolean maybeInitMetaDataFromJsDoc ( Builder builder , Node node ) { } } | boolean messageHasDesc = false ; JSDocInfo info = node . getJSDocInfo ( ) ; if ( info != null ) { String desc = info . getDescription ( ) ; if ( desc != null ) { builder . setDesc ( desc ) ; messageHasDesc = true ; } if ( info . isHidden ( ) ) { builder . setIsHidden ( true ) ; } if ( info . getMeaning ( ) != null ) { builder . setMeaning ( info . getMeaning ( ) ) ; } } return messageHasDesc ; |
public class CreateDiskResult { /** * An object describing the API operations .
* @ param operations
* An object describing the API operations . */
public void setOperations ( java . util . Collection < Operation > operations ) { } } | if ( operations == null ) { this . operations = null ; return ; } this . operations = new java . util . ArrayList < Operation > ( operations ) ; |
public class ObjectAccessor { /** * Creates a copy of the wrapped object , where the copy ' s type is a
* specified subclass of the wrapped object ' s class .
* Note : it does a " shallow " copy . Reference fields are not copied
* recursively .
* @ param subclass A subclass of the wrapped object ' s class .
* @ param < S > The subclass .
* @ return A shallow copy . */
public < S extends T > S copyIntoSubclass ( Class < S > subclass ) { } } | S copy = Instantiator . of ( subclass ) . instantiate ( ) ; return copyInto ( copy ) ; |
public class PoolManager { /** * Change the pool of a particular job */
public synchronized void setPool ( JobInProgress job , String pool ) { } } | removeJob ( job ) ; job . getJobConf ( ) . set ( EXPLICIT_POOL_PROPERTY , pool ) ; addJob ( job ) ; |
public class ServerUrl { /** * Decode a cloudId to a Node representation . This helps when using
* official elasticsearch as a service : https : / / cloud . elastic . co
* The cloudId can be found from the cloud console .
* @ param cloudId The cloud ID to decode .
* @ return A Node running on https : / / address : 443 */
public static String decodeCloudId ( String cloudId ) { } } | // 1 . Ignore anything before ` : ` .
String id = cloudId . substring ( cloudId . indexOf ( ':' ) + 1 ) ; // 2 . base64 decode
String decoded = new String ( Base64 . getDecoder ( ) . decode ( id ) ) ; // 3 . separate based on ` $ `
String [ ] words = decoded . split ( "\\$" ) ; // 4 . form the URLs
return "https://" + words [ 1 ] + "." + words [ 0 ] + ":443" ; |
public class CPDefinitionPersistenceImpl { /** * Caches the cp definitions in the entity cache if it is enabled .
* @ param cpDefinitions the cp definitions */
@ Override public void cacheResult ( List < CPDefinition > cpDefinitions ) { } } | for ( CPDefinition cpDefinition : cpDefinitions ) { if ( entityCache . getResult ( CPDefinitionModelImpl . ENTITY_CACHE_ENABLED , CPDefinitionImpl . class , cpDefinition . getPrimaryKey ( ) ) == null ) { cacheResult ( cpDefinition ) ; } else { cpDefinition . resetOriginalValues ( ) ; } } |
public class ArrowConverter { /** * Convert a set of input strings to arrow columns
* @ param bufferAllocator the buffer allocator to use
* @ param schema the schema to use
* @ param dataVecRecord the collection of input strings to process
* @ return the created vectors */
public static List < FieldVector > toArrowColumnsStringSingle ( final BufferAllocator bufferAllocator , final Schema schema , List < String > dataVecRecord ) { } } | return toArrowColumnsString ( bufferAllocator , schema , Arrays . asList ( dataVecRecord ) ) ; |
public class DefaultGroovyMethods { /** * Bitwise OR together two BitSets . Called when the ' | ' operator is used
* between two bit sets .
* @ param left a BitSet
* @ param right another BitSet to bitwise AND
* @ return the bitwise OR of both BitSets
* @ since 1.5.0 */
public static BitSet or ( BitSet left , BitSet right ) { } } | BitSet result = ( BitSet ) left . clone ( ) ; result . or ( right ) ; return result ; |
public class SearchApi { /** * Search on a string Search for entities that match a given sub - string . - - -
* This route is cached for up to 3600 seconds SSO Scope :
* esi - search . search _ structures . v1
* @ param categories
* Type of entities to search for ( required )
* @ param characterId
* An EVE character ID ( required )
* @ param search
* The string to search on ( required )
* @ param acceptLanguage
* Language to use in the response ( optional , default to en - us )
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param ifNoneMatch
* ETag from a previous request . A 304 will be returned if this
* matches the current ETag ( optional )
* @ param language
* Language to use in the response , takes precedence over
* Accept - Language ( optional , default to en - us )
* @ param strict
* Whether the search should be a strict match ( optional , default
* to false )
* @ param token
* Access token to use if unable to set a header ( optional )
* @ return ApiResponse & lt ; CharacterSearchResponse & gt ;
* @ throws ApiException
* If fail to call the API , e . g . server error or cannot
* deserialize the response body */
public ApiResponse < CharacterSearchResponse > getCharactersCharacterIdSearchWithHttpInfo ( List < String > categories , Integer characterId , String search , String acceptLanguage , String datasource , String ifNoneMatch , String language , Boolean strict , String token ) throws ApiException { } } | com . squareup . okhttp . Call call = getCharactersCharacterIdSearchValidateBeforeCall ( categories , characterId , search , acceptLanguage , datasource , ifNoneMatch , language , strict , token , null ) ; Type localVarReturnType = new TypeToken < CharacterSearchResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ; |
public class PinotAuditCountHttpClient { /** * A thread - safe method which fetches a tier - to - count mapping .
* The returned json object from Pinot contains below information
* " aggregationResults " : [
* " groupByResult " : [
* " value " : " 172765137.00000 " ,
* " group " : [
* " kafka - 08 - tracking - local "
* " exceptions " : [
* @ param datasetName name of dataset
* @ param start time start point in milliseconds
* @ param end time end point in milliseconds
* @ return A tier to record count mapping when succeeded . Otherwise a null value is returned */
public Map < String , Long > fetch ( String datasetName , long start , long end ) throws IOException { } } | Map < String , Long > map = new HashMap < > ( ) ; String query = "select tier, sum(count) from kafkaAudit where " + "eventType=\"" + datasetName + "\" and " + "beginTimestamp >= \"" + start + "\" and " + "beginTimestamp < \"" + end + "\" group by tier" ; String fullURL = targetUrl + URLEncoder . encode ( query , Charsets . UTF_8 . toString ( ) ) ; HttpGet req = new HttpGet ( fullURL ) ; String rst = null ; HttpEntity entity = null ; log . info ( "Full url for {} is {}" , datasetName , fullURL ) ; try { CloseableHttpResponse response = httpClient . execute ( req , HttpClientContext . create ( ) ) ; int statusCode = response . getStatusLine ( ) . getStatusCode ( ) ; if ( statusCode < 200 || statusCode >= 300 ) { throw new IOException ( String . format ( "status code: %d, reason: %s" , statusCode , response . getStatusLine ( ) . getReasonPhrase ( ) ) ) ; } entity = response . getEntity ( ) ; rst = EntityUtils . toString ( entity ) ; } finally { if ( entity != null ) { EntityUtils . consume ( entity ) ; } } JsonObject all = PARSER . parse ( rst ) . getAsJsonObject ( ) ; JsonArray aggregationResults = all . getAsJsonArray ( "aggregationResults" ) ; if ( aggregationResults == null || aggregationResults . size ( ) == 0 ) { log . error ( all . toString ( ) ) ; throw new IOException ( "No aggregation results " + all . toString ( ) ) ; } JsonObject aggregation = ( JsonObject ) aggregationResults . get ( 0 ) ; JsonArray groupByResult = aggregation . getAsJsonArray ( "groupByResult" ) ; if ( groupByResult == null || groupByResult . size ( ) == 0 ) { log . error ( aggregation . toString ( ) ) ; throw new IOException ( "No aggregation results " + aggregation . toString ( ) ) ; } log . info ( "Audit count for {} is {}" , datasetName , groupByResult ) ; for ( JsonElement ele : groupByResult ) { JsonObject record = ( JsonObject ) ele ; map . put ( record . getAsJsonArray ( "group" ) . get ( 0 ) . getAsString ( ) , ( long ) Double . parseDouble ( record . get ( "value" ) . getAsString ( ) ) ) ; } return map ; |
public class ForeignKey { /** * TableA as property name in TableB */
public String getReverseRelationshipName ( ) { } } | String ret = "" ; if ( tableAB == null ) { if ( multipleRelationsBetweenTables ) // foreignKey will be non - null since it applies only to one - to - X relation
{ ret = concatenateColumnNames ( this ) ; ret = ret . substring ( 1 ) + "_" ; // remove leading _ and add trailing _ .
} ret += StringUtility . firstLetterToLower ( tableA . getClassName ( ) ) ; } else { ForeignKey [ ] fks = tableAB . getForeignKeys ( ) . values ( ) . toArray ( new ForeignKey [ 2 ] ) ; ret = ForeignKey . concatenateColumnNames ( fks [ 0 ] ) . substring ( 1 ) ; } String multiplicity = getTableAMultiplicity ( ) ; if ( ! multiplicity . equals ( "one" ) ) ret = StringUtility . englishPluralize ( ret ) ; return ret ; |
public class Participant { /** * Create a ParticipantDeleter to execute delete .
* @ param pathAccountSid The SID of the Account that created the resources to
* delete
* @ param pathConferenceSid The SID of the conference with the participants to
* delete
* @ param pathCallSid The Call SID of the resources to delete
* @ return ParticipantDeleter capable of executing the delete */
public static ParticipantDeleter deleter ( final String pathAccountSid , final String pathConferenceSid , final String pathCallSid ) { } } | return new ParticipantDeleter ( pathAccountSid , pathConferenceSid , pathCallSid ) ; |
public class StorableIntrospector { /** * Create a representation of the signature which includes the method name .
* This uniquely identifies the method .
* @ param m method to describe */
private static String createSig ( Method m ) { } } | return m . getName ( ) + ':' + MethodDesc . forMethod ( m ) . getDescriptor ( ) ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcExternallyDefinedTextFont ( ) { } } | if ( ifcExternallyDefinedTextFontEClass == null ) { ifcExternallyDefinedTextFontEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 250 ) ; } return ifcExternallyDefinedTextFontEClass ; |
public class StyleUtil { /** * Creates a mark with the specified parameters .
* @ param wellKnownName the well known name
* @ param fill the fill
* @ param stroke the stroke
* @ return the mark */
public static MarkInfo createMark ( String wellKnownName , FillInfo fill , StrokeInfo stroke ) { } } | MarkInfo mark = new MarkInfo ( ) ; mark . setFill ( fill ) ; mark . setStroke ( stroke ) ; WellKnownNameInfo wellKnownNameInfo = new WellKnownNameInfo ( ) ; wellKnownNameInfo . setWellKnownName ( wellKnownName ) ; mark . setWellKnownName ( wellKnownNameInfo ) ; return mark ; |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcSpaceProgram ( ) { } } | if ( ifcSpaceProgramEClass == null ) { ifcSpaceProgramEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 524 ) ; } return ifcSpaceProgramEClass ; |
public class PCA { /** * Takes a set of data on each row , with the same number of features as the constructing data
* and returns the data in the coordinates of the basis set about the mean .
* @ param data Data of the same features used to construct the PCA object
* @ return The record in terms of the principal component vectors , you can set unused ones to zero . */
public INDArray convertToComponents ( INDArray data ) { } } | INDArray dx = data . subRowVector ( mean ) ; return Nd4j . tensorMmul ( eigenvectors . transpose ( ) , dx , new int [ ] [ ] { { 1 } , { 1 } } ) . transposei ( ) ; |
public class Range { /** * Returns the smallest range that includes this range and
* the { @ code value } .
* See { @ link # extend ( Range ) } for more details , as this method is
* equivalent to { @ code extend ( Range . create ( value , value ) ) } . < / p >
* @ param value a non - { @ code null } { @ code T } reference
* @ return the extension of this range and the value .
* @ throws NullPointerException if { @ code value } was { @ code null } */
public Range < T > extend ( T value ) { } } | if ( value == null ) throw new IllegalArgumentException ( "value must not be null" ) ; return extend ( value , value ) ; |
public class KeyStoreSecurity { /** * Creates a new Message Authentication Code
* @ param alias algorithm to use e . g . : HmacSHA256
* @ return Mac implementation */
public Mac getMac ( final @ NotNull String alias ) { } } | try { final Key secretKey = keys . get ( alias ) ; Mac mac = Mac . getInstance ( secretKey . getAlgorithm ( ) ) ; mac . init ( secretKey ) ; return mac ; } catch ( NoSuchAlgorithmException | InvalidKeyException e ) { throw new RuntimeException ( e ) ; } |
public class JavacTypes { /** * Returns an object cast to the specified type .
* @ throws NullPointerException if the object is { @ code null }
* @ throws IllegalArgumentException if the object is of the wrong type */
private static < T > T cast ( Class < T > clazz , Object o ) { } } | if ( ! clazz . isInstance ( o ) ) throw new IllegalArgumentException ( o . toString ( ) ) ; return clazz . cast ( o ) ; |
public class Update { /** * < pre >
* { $ append : { path : value } }
* < / pre > */
public static AppendInsert append ( String field , AppendInsertLiteral value ) { } } | return new AppendInsert ( "$append" , field ) . more ( value ) ; |
public class BProgram { /** * Runs the passed code in the passed scope .
* @ param script Code to evaluate
* @ param scriptName For error reporting purposes .
* @ return Result of code evaluation . */
protected Object evaluate ( String script , String scriptName ) { } } | try { Context curCtx = Context . getCurrentContext ( ) ; curCtx . setLanguageVersion ( Context . VERSION_1_8 ) ; return curCtx . evaluateString ( programScope , script , scriptName , 1 , null ) ; } catch ( EcmaError rerr ) { throw new BPjsCodeEvaluationException ( rerr ) ; } catch ( WrappedException wrapped ) { try { throw wrapped . getCause ( ) ; } catch ( BPjsException be ) { throw be ; } catch ( IllegalStateException ise ) { String msg = ise . getMessage ( ) ; if ( msg . contains ( "Cannot capture continuation" ) && msg . contains ( "executeScriptWithContinuations or callFunctionWithContinuations" ) ) { throw new BPjsCodeEvaluationException ( "bp.sync called outside of a b-thread" ) ; } else { throw ise ; } } catch ( Throwable generalException ) { throw new BPjsRuntimeException ( "(Wrapped) Exception evaluating BProgram code: " + generalException . getMessage ( ) , generalException ) ; } } catch ( EvaluatorException evalExp ) { throw new BPjsCodeEvaluationException ( evalExp ) ; } catch ( Exception exp ) { throw new BPjsRuntimeException ( "Error evaluating BProgram code: " + exp . getMessage ( ) , exp ) ; } |
public class ThemeManager { /** * Apply theme from intent . Only system use , don ' t call it ! */
public static void applyTheme ( Activity activity , boolean force ) { } } | if ( force || ThemeManager . hasSpecifiedTheme ( activity ) ) { activity . setTheme ( ThemeManager . getTheme ( activity ) ) ; } |
public class HistogramAggregationIterator { /** * Puts the next data point of an iterator in the internal buffer .
* @ param i The index in { @ link # iterators } of the iterator .
* @ param dp The last data point returned by that iterator . */
private void putDataPoint ( final int i , final HistogramDataPoint dp ) { } } | timestamps [ i ] = dp . timestamp ( ) ; values [ i ] = dp . clone ( ) ; |
public class AbstractAnnotationMetadataBuilder { /** * Build the meta data for the given method element excluding any class metadata .
* @ param element The element
* @ return The { @ link AnnotationMetadata } */
public AnnotationMetadata buildForMethod ( T element ) { } } | final AnnotationMetadata existing = MUTATED_ANNOTATION_METADATA . get ( element ) ; if ( existing != null ) { return existing ; } else { DefaultAnnotationMetadata annotationMetadata = new DefaultAnnotationMetadata ( ) ; return buildInternal ( null , element , annotationMetadata , false , false ) ; } |
public class PojoTask { /** * Sets the < code > context < / code > URL from which a relative
* < code > location < / code > URL should be evaluated . If < code > location < / code >
* is not relative then < code > context < / code > has no effect . The default is
* the location of the < code > PojoTask . class < / code > file . This method may be
* called at most once .
* @ param context A URL in < code > String < / code > form */
public void setContext ( final String context ) { } } | // Assertions .
if ( context == null ) { String msg = "Argument 'context' cannot be null." ; throw new IllegalArgumentException ( msg ) ; } if ( this . context != null ) { String msg = "Property 'context' may only be set once." ; throw new RuntimeException ( msg ) ; } this . context = context ; |
public class ModelMigration { /** * < p > diff . < / p >
* @ return a { @ link io . ebeaninternal . dbmigration . model . ModelDiff } object . */
public ModelDiff diff ( ) { } } | if ( diff != null ) return diff ; setOffline ( ) ; setDefaults ( ) ; try { migrationModel = new MigrationModel ( server ) ; ModelContainer migrated = migrationModel . read ( ) ; currentModel = new CurrentModel ( server , constraintNaming ) ; ModelContainer current = currentModel . read ( ) ; diff = new ModelDiff ( migrated ) ; diff . compareTo ( current ) ; } finally { if ( ! online ) { DbOffline . reset ( ) ; } } return diff ; |
public class JdbcTemp { /** * this method can used for insert / update / delete for database
* In insertParams the parameter type only supports : String Integer Float or
* Long Double Bye Short if you need operate other types , you must use JDBC
* directly replacing this method .
* @ param insertParams
* the parameter that will be insert into sql .
* @ param sql
* the standard sql sentence .
* @ throws Exception */
public void operate ( Collection insertParams , String sql ) throws Exception { } } | Debug . logVerbose ( "[JdonFramework]--> enter getSingleObject " , module ) ; Connection c = null ; PreparedStatement ps = null ; try { c = dataSource . getConnection ( ) ; ps = c . prepareStatement ( sql ) ; Debug . logVerbose ( sql , module ) ; jdbcUtil . setQueryParams ( insertParams , ps ) ; ps . executeUpdate ( ) ; } catch ( SQLException se ) { throw new SQLException ( "SQLException: " + se . getMessage ( ) ) ; } catch ( Exception ex ) { Debug . logError ( ex , module ) ; throw new Exception ( ex ) ; } finally { if ( ps != null ) try { ps . close ( ) ; } catch ( SQLException quiet ) { } if ( c != null ) try { c . close ( ) ; } catch ( SQLException quiet ) { } } |
public class ProductPartitionTreeImpl { /** * Returns a new empty tree .
* @ param adGroupId the ID of the ad group
* @ param biddingStrategyConfig the bidding strategy configuration of the ad group */
private static ProductPartitionTreeImpl createEmptyAdGroupTree ( Long adGroupId , BiddingStrategyConfiguration biddingStrategyConfig ) { } } | Preconditions . checkNotNull ( adGroupId , "Null ad group ID" ) ; Preconditions . checkNotNull ( biddingStrategyConfig , "Null bidding strategy configuration" ) ; ProductPartitionNode rootNode = new ProductPartitionNode ( null , null , - 1L , new ProductDimensionComparator ( ) ) ; return new ProductPartitionTreeImpl ( adGroupId , biddingStrategyConfig , rootNode ) ; |
public class ServerService { /** * Modify servers by filter
* @ param serverFilter server filter
* @ param modifyServerConfig server config
* @ return OperationFuture wrapper for list of Servers */
public OperationFuture < List < Server > > modify ( ServerFilter serverFilter , ModifyServerConfig modifyServerConfig ) { } } | List < Server > serverList = find ( serverFilter ) . stream ( ) . map ( ServerMetadata :: asRefById ) . collect ( toList ( ) ) ; return modify ( serverList , modifyServerConfig ) ; |
public class Transforms { /** * Sin function
* @ param in
* @ param copy
* @ return */
public static INDArray atanh ( INDArray in , boolean copy ) { } } | return Nd4j . getExecutioner ( ) . exec ( new ATanh ( in , ( copy ? in . ulike ( ) : in ) ) ) ; |
public class DruidRunner { /** * Stops the server and removes the index
* @ throws Exception */
public void stop ( ) throws Exception { } } | DruidIndices . getInstance ( ) . remove ( String . valueOf ( port ) ) ; server . stop ( ) ; |
public class ControllableQueue { /** * Adds a new node into the queue .
* @ param delay the delay in milliseconds
* @ param value the value */
public void add ( long delay , T value ) { } } | mQueue . add ( new DelayNode < > ( value , delay + mPastTime ) ) ; |
public class FeaturesImpl { /** * Gets phraselist feature info .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param phraselistId The ID of the feature to be retrieved .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PhraseListFeatureInfo object */
public Observable < ServiceResponse < PhraseListFeatureInfo > > getPhraseListWithServiceResponseAsync ( UUID appId , String versionId , int phraselistId ) { } } | if ( this . client . endpoint ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.endpoint() is required and cannot be null." ) ; } if ( appId == null ) { throw new IllegalArgumentException ( "Parameter appId is required and cannot be null." ) ; } if ( versionId == null ) { throw new IllegalArgumentException ( "Parameter versionId is required and cannot be null." ) ; } String parameterizedHost = Joiner . on ( ", " ) . join ( "{Endpoint}" , this . client . endpoint ( ) ) ; return service . getPhraseList ( appId , versionId , phraselistId , this . client . acceptLanguage ( ) , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < PhraseListFeatureInfo > > > ( ) { @ Override public Observable < ServiceResponse < PhraseListFeatureInfo > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PhraseListFeatureInfo > clientResponse = getPhraseListDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ; |
public class BackendUser { /** * Synchronously sign up using credentials provided via constructor or setters .
* @ return boolean indicating sign up success */
public boolean signUp ( ) { } } | SignUpCredentials creds = new SignUpCredentials ( getUsername ( ) , getEmailAddress ( ) , getPassword ( ) ) ; SignUpResponse response = getAM ( ) . signUp ( creds ) ; if ( response . getStatus ( ) . isSuccess ( ) ) { this . initFrom ( response . get ( ) ) ; return true ; } else return false ; |
public class ResourceARNDetailMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ResourceARNDetail resourceARNDetail , ProtocolMarshaller protocolMarshaller ) { } } | if ( resourceARNDetail == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( resourceARNDetail . getARN ( ) , ARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class StringHelper { /** * Generates a root alias by truncating the " root name " defined by
* the incoming decription and removing / modifying any non - valid
* alias characters .
* @ param description The root name from which to generate a root alias .
* @ return The generated root alias . */
private static String generateAliasRoot ( String description ) { } } | String result = truncate ( unqualifyEntityName ( description ) , ALIAS_TRUNCATE_LENGTH ) // Important to use Locale . ENGLISH . See HHH - 8579 . # toLowerCase ( ) uses the default Locale . Certain DBs
// do not like non - ascii characters in aliases , etc . , so ensure consistency / portability here .
. toLowerCase ( Locale . ENGLISH ) . replace ( '/' , '_' ) // entityNames may now include slashes for the representations
. replace ( '$' , '_' ) ; // classname may be an inner class
result = cleanAlias ( result ) ; if ( Character . isDigit ( result . charAt ( result . length ( ) - 1 ) ) ) { return result + "x" ; // ick !
} else { return result ; } |
public class CmsSetupXmlHelper { /** * Returns a file from a given filename . < p >
* @ param xmlFilename the file name
* @ return the file */
private File getFile ( String xmlFilename ) { } } | File file = new File ( m_basePath + xmlFilename ) ; if ( ! file . exists ( ) || ! file . canRead ( ) ) { file = new File ( xmlFilename ) ; } return file ; |
public class StreamRemoteConnector { /** * Reads the next available byte from the input buffer provided , waiting if data is not
* available .
* @ param inputBuffer the buffer to read from
* @ return one byte of data from the stream
* @ throws IOException if there are problems reading data . */
private byte nextByte ( ByteBuffer inputBuffer ) throws IOException { } } | getAtLeastBytes ( inputBuffer , 1 , ReadMode . ONLY_WHEN_EMPTY ) ; return inputBuffer . get ( ) ; |
public class LogbackHelper { /** * Creates a new { @ link ConsoleAppender } to { @ code System . out } with the specified name and log pattern .
* @ see # buildLogPattern ( RootLoggerConfig ) */
public ConsoleAppender < ILoggingEvent > newConsoleAppender ( Context loggerContext , String name , String logPattern ) { } } | PatternLayoutEncoder consoleEncoder = new PatternLayoutEncoder ( ) ; consoleEncoder . setContext ( loggerContext ) ; consoleEncoder . setPattern ( logPattern ) ; consoleEncoder . start ( ) ; ConsoleAppender < ILoggingEvent > consoleAppender = new ConsoleAppender < > ( ) ; consoleAppender . setContext ( loggerContext ) ; consoleAppender . setEncoder ( consoleEncoder ) ; consoleAppender . setName ( name ) ; consoleAppender . setTarget ( "System.out" ) ; consoleAppender . start ( ) ; return consoleAppender ; |
public class Blueprint { /** * Serialisiert den Blueprint */
private static byte [ ] persist ( String description , Directory rootDir , Blob icon ) { } } | ByteArrayOutputStream arrayOutputStream = new ByteArrayOutputStream ( ) ; DataOutputStream dataOutputStream = new DataOutputStream ( arrayOutputStream ) ; try { // Immer erster Stelle die aktuelle Version schreiben
dataOutputStream . writeByte ( CURRENT_BINARY_VERSION ) ; // Die Reihenfolge der Felder darf sich aus Gründen der
// Kompatibilität niemals ändern . Neue Felder können hinzugefügt
// werden . Dann ist die Version hochzusetzten .
dataOutputStream . writeUTF ( rootDir . id ( ) ) ; dataOutputStream . writeUTF ( description ) ; dataOutputStream . writeUTF ( icon != null ? icon . id ( ) : "" ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } return arrayOutputStream . toByteArray ( ) ; |
public class JDBC4Connection { /** * Check if the createStatement ( ) options are supported
* See http : / / docs . oracle . com / javase / 7 / docs / api / index . html ? java / sql / DatabaseMetaData . html
* The following flags are supported :
* - The type must either be TYPE _ SCROLL _ INSENSITIVE or TYPE _ FORWARD _ ONLY .
* - The concurrency must be CONCUR _ READ _ ONLY .
* - The holdability must be CLOSE _ CURSORS _ AT _ COMMIT .
* @ param resultSetType JDBC result set type option
* @ param resultSetConcurrency JDBC result set concurrency option
* @ param resultSetHoldability JDBC result set holdability option
* @ throws SQLException if not supported */
private static void checkCreateStatementSupported ( int resultSetType , int resultSetConcurrency , int resultSetHoldability ) throws SQLException { } } | if ( ( ( resultSetType != ResultSet . TYPE_SCROLL_INSENSITIVE && resultSetType != ResultSet . TYPE_FORWARD_ONLY ) ) || resultSetConcurrency != ResultSet . CONCUR_READ_ONLY || resultSetHoldability != ResultSet . CLOSE_CURSORS_AT_COMMIT ) { throw SQLError . noSupport ( ) ; } |
public class BaseTableLayout { /** * Turns on debug lines . */
public BaseTableLayout < C , T > debug ( Debug debug ) { } } | this . debug = debug ; if ( debug == Debug . none ) toolkit . clearDebugRectangles ( this ) ; else invalidate ( ) ; return this ; |
public class KernelResolverRepository { /** * Find the preferred version of a feature from the list of features
* The decision is made by consulting { @ link # symbolicNameToPreferredVersion } to find out whether the user has configured a preferred version . If so , look for a feature with
* that version .
* If no preferred version has been configured for this symbolic name , or if the preferred version cannot be found in the list , return the latest version .
* @ param symbolicName the symbolic name of the feature
* @ param featureList the list of features , which should all have the same symbolic name
* @ return the best feature from the list */
private ProvisioningFeatureDefinition getPreferredVersion ( String symbolicName , List < ProvisioningFeatureDefinition > featureList ) { } } | Version preferredVersion = symbolicNameToPreferredVersion . get ( symbolicName ) ; ProvisioningFeatureDefinition result = null ; if ( preferredVersion != null ) { for ( ProvisioningFeatureDefinition feature : featureList ) { if ( preferredVersion . equals ( feature . getVersion ( ) ) ) { result = feature ; break ; } } } if ( result == null ) { result = featureList . iterator ( ) . next ( ) ; } return result ; |
public class SpendMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Spend spend , ProtocolMarshaller protocolMarshaller ) { } } | if ( spend == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( spend . getAmount ( ) , AMOUNT_BINDING ) ; protocolMarshaller . marshall ( spend . getUnit ( ) , UNIT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class CorrelationDistance { /** * Pearson correlation distance between the two arrays of type int . */
public static double pearson ( int [ ] x , int [ ] y ) { } } | if ( x . length != y . length ) throw new IllegalArgumentException ( String . format ( "Arrays have different length: x[%d], y[%d]" , x . length , y . length ) ) ; return 1 - Math . cor ( x , y ) ; |
public class MyfacesLogRecord { /** * Private method to infer the caller ' s class and method names */
private void _inferCaller ( ) { } } | _needToInferCaller = false ; // Get the stack trace .
StackTraceElement stack [ ] = ( new Throwable ( ) ) . getStackTrace ( ) ; // First , search back to a method in the Logger class .
int i = 0 ; while ( i < stack . length ) { StackTraceElement frame = stack [ i ] ; String cname = frame . getClassName ( ) ; if ( cname . equals ( _JDK_LOG_CLASS ) || cname . equals ( _MYFACES_LOG_CLASS ) ) { break ; } i ++ ; } // Now search for the first frame before the " Logger " class .
while ( i < stack . length ) { StackTraceElement frame = stack [ i ] ; String cname = frame . getClassName ( ) ; if ( cname . equals ( _JDK_LOG_CLASS ) || cname . equals ( _MYFACES_LOG_CLASS ) ) { i ++ ; continue ; } String mname = frame . getMethodName ( ) ; if ( "log" . equals ( mname ) || "_log" . equals ( mname ) ) { i ++ ; continue ; } // We ' ve found the relevant frame .
setSourceClassName ( cname ) ; setSourceMethodName ( mname ) ; return ; } // Forcibly set both to null , so the LogRecord superclass
// doesn ' t try to detect them on its own
setSourceClassName ( null ) ; setSourceMethodName ( null ) ; |
public class PropertyHelper { /** * Converts a value to the type of the given field .
* @ param value
* @ param field
* @ return */
public static Object convertToClass ( String value , Class < ? > clazz ) { } } | Object propertyValue ; if ( clazz . isAssignableFrom ( int . class ) ) { propertyValue = Integer . parseInt ( value ) ; } else if ( clazz . isAssignableFrom ( long . class ) ) { propertyValue = Long . parseLong ( value ) ; } else if ( clazz . isAssignableFrom ( float . class ) ) { propertyValue = Float . parseFloat ( value ) ; } else if ( clazz . isAssignableFrom ( boolean . class ) ) { propertyValue = Boolean . parseBoolean ( value ) ; } else { propertyValue = value ; } return propertyValue ; |
public class SpeechToText { /** * Reset a custom acoustic model .
* Resets a custom acoustic model by removing all audio resources from the model . Resetting a custom acoustic model
* initializes the model to its state when it was first created . Metadata such as the name and language of the model
* are preserved , but the model ' s audio resources are removed and must be re - created . You must use credentials for the
* instance of the service that owns a model to reset it .
* * * See also : * * [ Resetting a custom acoustic
* model ] ( https : / / cloud . ibm . com / docs / services / speech - to - text / acoustic - models . html # resetModel - acoustic ) .
* @ param resetAcousticModelOptions the { @ link ResetAcousticModelOptions } containing the options for the call
* @ return a { @ link ServiceCall } with a response type of Void */
public ServiceCall < Void > resetAcousticModel ( ResetAcousticModelOptions resetAcousticModelOptions ) { } } | Validator . notNull ( resetAcousticModelOptions , "resetAcousticModelOptions cannot be null" ) ; String [ ] pathSegments = { "v1/acoustic_customizations" , "reset" } ; String [ ] pathParameters = { resetAcousticModelOptions . customizationId ( ) } ; RequestBuilder builder = RequestBuilder . post ( RequestBuilder . constructHttpUrl ( getEndPoint ( ) , pathSegments , pathParameters ) ) ; Map < String , String > sdkHeaders = SdkCommon . getSdkHeaders ( "speech_to_text" , "v1" , "resetAcousticModel" ) ; for ( Entry < String , String > header : sdkHeaders . entrySet ( ) ) { builder . header ( header . getKey ( ) , header . getValue ( ) ) ; } builder . header ( "Accept" , "application/json" ) ; return createServiceCall ( builder . build ( ) , ResponseConverterUtils . getVoid ( ) ) ; |
public class GridPanel { /** * returns if a grid contains a certain element
* @ param searchElement the searchElement of the grid element on which the search is done
* @ return true or false */
public boolean isRowPresent ( String searchElement ) { } } | ready ( ) ; boolean found ; GridCell cell = getCell ( searchElement ) ; // scrollTop ( ) ; / / make sure always start from top then scroll down till the end of the page
// do {
// if the row is not in visible ( need to scroll down - errors when used BufferView in grid )
found = cell . isElementPresent ( ) ; // } while ( ! found & & scrollPageDown ( ) ) ;
return found ; |
public class ListUtils { /** * Applies a binary function between each element of the given list .
* @ param list list of elements
* @ param init initial value for the accumulator
* @ param f accumulator expression to apply
* @ param < Accumulator > binary function
* @ param < Value > element type
* @ return an accumulated / aggregated value */
public static < Accumulator , Value > Accumulator reduce ( List < Value > list , Accumulator init , ReduceClosure < Accumulator , Value > f ) { } } | Accumulator accumulator = init ; for ( Value value : list ) { accumulator = f . eval ( accumulator , value ) ; } return accumulator ; |
public class EntityStatisticsProcessor { /** * Counts additional occurrences of a property as qualifier property of
* statements .
* @ param usageStatistics
* statistics object where count is stored
* @ param property
* the property to count
* @ param count
* the number of times to count the property */
private void countPropertyQualifier ( UsageStatistics usageStatistics , PropertyIdValue property , int count ) { } } | addPropertyCounters ( usageStatistics , property ) ; usageStatistics . propertyCountsQualifier . put ( property , usageStatistics . propertyCountsQualifier . get ( property ) + count ) ; |
public class Matrix { /** * Rotates matrix m by angle a ( in degrees ) around the axis ( x , y , z )
* @ param rm returns the result
* @ param rmOffset index into rm where the result matrix starts
* @ param a angle to rotate in degrees
* @ param x scale factor x
* @ param y scale factor y
* @ param z scale factor z */
public static void setRotateM ( float [ ] rm , int rmOffset , float a , float x , float y , float z ) { } } | rm [ rmOffset + 3 ] = 0 ; rm [ rmOffset + 7 ] = 0 ; rm [ rmOffset + 11 ] = 0 ; rm [ rmOffset + 12 ] = 0 ; rm [ rmOffset + 13 ] = 0 ; rm [ rmOffset + 14 ] = 0 ; rm [ rmOffset + 15 ] = 1 ; a *= ( float ) ( Math . PI / 180.0f ) ; float s = ( float ) Math . sin ( a ) ; float c = ( float ) Math . cos ( a ) ; if ( 1.0f == x && 0.0f == y && 0.0f == z ) { rm [ rmOffset + 5 ] = c ; rm [ rmOffset + 10 ] = c ; rm [ rmOffset + 6 ] = s ; rm [ rmOffset + 9 ] = - s ; rm [ rmOffset + 1 ] = 0 ; rm [ rmOffset + 2 ] = 0 ; rm [ rmOffset + 4 ] = 0 ; rm [ rmOffset + 8 ] = 0 ; rm [ rmOffset + 0 ] = 1 ; } else if ( 0.0f == x && 1.0f == y && 0.0f == z ) { rm [ rmOffset + 0 ] = c ; rm [ rmOffset + 10 ] = c ; rm [ rmOffset + 8 ] = s ; rm [ rmOffset + 2 ] = - s ; rm [ rmOffset + 1 ] = 0 ; rm [ rmOffset + 4 ] = 0 ; rm [ rmOffset + 6 ] = 0 ; rm [ rmOffset + 9 ] = 0 ; rm [ rmOffset + 5 ] = 1 ; } else if ( 0.0f == x && 0.0f == y && 1.0f == z ) { rm [ rmOffset + 0 ] = c ; rm [ rmOffset + 5 ] = c ; rm [ rmOffset + 1 ] = s ; rm [ rmOffset + 4 ] = - s ; rm [ rmOffset + 2 ] = 0 ; rm [ rmOffset + 6 ] = 0 ; rm [ rmOffset + 8 ] = 0 ; rm [ rmOffset + 9 ] = 0 ; rm [ rmOffset + 10 ] = 1 ; } else { float len = length ( x , y , z ) ; if ( 1.0f != len ) { float recipLen = 1.0f / len ; x *= recipLen ; y *= recipLen ; z *= recipLen ; } float nc = 1.0f - c ; float xy = x * y ; float yz = y * z ; float zx = z * x ; float xs = x * s ; float ys = y * s ; float zs = z * s ; rm [ rmOffset + 0 ] = x * x * nc + c ; rm [ rmOffset + 4 ] = xy * nc - zs ; rm [ rmOffset + 8 ] = zx * nc + ys ; rm [ rmOffset + 1 ] = xy * nc + zs ; rm [ rmOffset + 5 ] = y * y * nc + c ; rm [ rmOffset + 9 ] = yz * nc - xs ; rm [ rmOffset + 2 ] = zx * nc - ys ; rm [ rmOffset + 6 ] = yz * nc + xs ; rm [ rmOffset + 10 ] = z * z * nc + c ; } |
public class ErrorHandler { /** * Invoked on a log writer error event .
* The default behavior used here prints the logging source and the error message to
* the system standard error stream ( System . err ) . If an exception object is given , the
* exception and the last method calls of the log writer leading to the error are also
* printed . < br >
* Only predefined maximum invocations are handled , subsequent calls are ignored .
* @ param source the log writer the error originated from
* @ param msg message describing the error
* @ param e exception related to the error , may be < code > null < / code > */
public synchronized void error ( LogWriter source , String msg , Exception e ) { } } | if ( invocations >= maxInvocations ) return ; ++ invocations ; String out = source + ": " + msg ; StackTraceElement [ ] trace = null ; if ( e != null ) { out += " (" + e . getMessage ( ) + ")" ; trace = e . getStackTrace ( ) ; } synchronized ( System . err ) { System . err . println ( out ) ; final String srcName = source . getClass ( ) . getName ( ) ; if ( trace != null ) for ( int i = 0 ; i < trace . length ; ++ i ) { System . err . println ( "\t- " + trace [ i ] ) ; if ( trace [ i ] . getClassName ( ) . equals ( srcName ) ) break ; } } |
public class Bundler { /** * Inserts a SparceArray of Parcelable values into the mapping of this
* Bundle , replacing any existing value for the given key . Either key
* or value may be null .
* @ param key a String , or null
* @ param value a SparseArray of Parcelable objects , or null
* @ return this */
public Bundler putSparseParcelableArray ( String key , SparseArray < ? extends Parcelable > value ) { } } | bundle . putSparseParcelableArray ( key , value ) ; return this ; |
public class Utils { /** * Add element to start of list */
public static < T > List < T > addFirst ( List < T > to , T what ) { } } | List < T > data = safeList ( to ) ; data . add ( 0 , what ) ; return data ; |
public class MutableDateTime { /** * Set the time from another instant .
* The date part of this object will be unaffected .
* @ param instant an instant to copy the time from , date part ignored
* @ throws IllegalArgumentException if the object is invalid */
public void setTime ( final ReadableInstant instant ) { } } | long instantMillis = DateTimeUtils . getInstantMillis ( instant ) ; Chronology instantChrono = DateTimeUtils . getInstantChronology ( instant ) ; DateTimeZone zone = instantChrono . getZone ( ) ; if ( zone != null ) { instantMillis = zone . getMillisKeepLocal ( DateTimeZone . UTC , instantMillis ) ; } setTime ( instantMillis ) ; |
public class ProcessEngineDriver { /** * Starting a regular process .
* @ param processId ID of the process to be started
* @ param masterRequestId
* @ param ownerType
* @ param ownerId
* @ param vars Input parameter bindings for the process instance to be created
* @ return Process instance ID */
public Long startProcess ( Long processId , String masterRequestId , String ownerType , Long ownerId , Map < String , String > vars , String secondaryOwnerType , Long secondaryOwnerId , Map < String , String > headers ) throws Exception { } } | Process procdef = getProcessDefinition ( processId ) ; int performance_level = procdef . getPerformanceLevel ( ) ; if ( performance_level <= 0 ) performance_level = default_performance_level_regular ; EngineDataAccess edao = EngineDataAccessCache . getInstance ( false , performance_level ) ; InternalMessenger msgBroker = MessengerFactory . newInternalMessenger ( ) ; // do not set internal messenger with cache options , as this engine does not process it directly - Unless PL 9
if ( performance_level >= 9 ) msgBroker . setCacheOption ( InternalMessenger . CACHE_ONLY ) ; if ( masterRequestId == null ) masterRequestId = genMasterRequestId ( ) ; ProcessExecutor engine = new ProcessExecutor ( edao , msgBroker , false ) ; ProcessInstance processInst = engine . createProcessInstance ( processId , ownerType , ownerId , secondaryOwnerType , secondaryOwnerId , masterRequestId , vars ) ; if ( ownerType . equals ( OwnerType . DOCUMENT ) && ownerId != 0L ) { setOwnerDocumentProcessInstanceId ( engine , ownerId , processInst . getId ( ) , masterRequestId ) ; bindRequestVariable ( procdef , ownerId , engine , processInst ) ; } if ( headers != null ) { bindRequestHeadersVariable ( procdef , headers , engine , processInst ) ; } // Delay for ensuring document document content is available for the processing thread
// It is also needed to ensure the message is really sent , instead of cached
int delay = PropertyManager . getIntegerProperty ( PropertyNames . MDW_PROCESS_LAUNCH_DELAY , 2 ) ; engine . startProcessInstance ( processInst , delay ) ; return processInst . getId ( ) ; |
public class SslRefInfoImpl { /** * { @ inheritDoc }
* @ throws SocialLoginException */
@ Override public HashMap < String , PublicKey > getPublicKeys ( ) throws SocialLoginException { } } | if ( this . jsseHelper == null ) { init ( ) ; } // TODO due to dynamic changes on keyStore , we have to load the public
// keys everytime .
HashMap < String , PublicKey > publicKeys = new HashMap < String , PublicKey > ( ) ; if ( this . sslTrustStoreName != null ) { KeyStoreService keyStoreService = keyStoreServiceRef . getService ( ) ; if ( keyStoreService == null ) { throw new SocialLoginException ( "KEYSTORE_SERVICE_NOT_FOUND" , null , new Object [ 0 ] ) ; } Collection < String > names = null ; try { names = keyStoreService . getTrustedCertEntriesInKeyStore ( sslTrustStoreName ) ; } catch ( KeyStoreException e ) { throw new SocialLoginException ( "ERROR_LOADING_KEYSTORE_CERTIFICATES" , e , new Object [ ] { sslTrustStoreName , e . getLocalizedMessage ( ) } ) ; } Iterator < String > aliasNames = names . iterator ( ) ; while ( aliasNames . hasNext ( ) ) { String aliasName = aliasNames . next ( ) ; PublicKey publicKey = null ; try { publicKey = keyStoreService . getCertificateFromKeyStore ( sslTrustStoreName , aliasName ) . getPublicKey ( ) ; } catch ( GeneralSecurityException e ) { throw new SocialLoginException ( "ERROR_LOADING_CERTIFICATE" , e , new Object [ ] { aliasName , sslTrustStoreName , e . getLocalizedMessage ( ) } ) ; } publicKeys . put ( aliasName , publicKey ) ; } } return publicKeys ; |
public class BigDecimalMath { /** * Calculates the arc hyperbolic cotangens ( inverse hyperbolic cotangens ) of { @ link BigDecimal } x .
* < p > See : < a href = " https : / / en . wikipedia . org / wiki / Hyperbolic _ function " > Wikipedia : Hyperbolic function < / a > < / p >
* @ param x the { @ link BigDecimal } to calculate the arc hyperbolic cotangens for
* @ param mathContext the { @ link MathContext } used for the result
* @ return the calculated arc hyperbolic cotangens { @ link BigDecimal } with the precision specified in the < code > mathContext < / code >
* @ throws UnsupportedOperationException if the { @ link MathContext } has unlimited precision */
public static BigDecimal acoth ( BigDecimal x , MathContext mathContext ) { } } | checkMathContext ( mathContext ) ; MathContext mc = new MathContext ( mathContext . getPrecision ( ) + 6 , mathContext . getRoundingMode ( ) ) ; BigDecimal result = log ( x . add ( ONE , mc ) . divide ( x . subtract ( ONE , mc ) , mc ) , mc ) . divide ( TWO , mc ) ; return round ( result , mathContext ) ; |
public class AppServicePlansInner { /** * Get the send key name and value of a Hybrid Connection .
* Get the send key name and value of a Hybrid Connection .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param name Name of the App Service plan .
* @ param namespaceName The name of the Service Bus namespace .
* @ param relayName The name of the Service Bus relay .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the HybridConnectionKeyInner object if successful . */
public HybridConnectionKeyInner listHybridConnectionKeys ( String resourceGroupName , String name , String namespaceName , String relayName ) { } } | return listHybridConnectionKeysWithServiceResponseAsync ( resourceGroupName , name , namespaceName , relayName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class PropertiesManager { /** * Retrieve the value of the given property as an integer . If the current
* value of the specified property cannot be converted to an integer , the
* default value will be retrieved .
* @ param property
* the property to retrieve
* @ return the integer value of the given property or the default value if
* the current value is not a valid integer
* @ throws NumberFormatException
* if both the current and default values are not integers */
public int getIntegerPropertyFallback ( T property ) throws NumberFormatException { } } | try { return getIntegerProperty ( property ) ; } catch ( NumberFormatException e ) { return Integer . parseInt ( getDefaultProperty ( property ) ) ; } |
public class AbstractQuery { /** * Returns the providers and their ordering to be considered . This information typically must be interpreted by the
* singleton SPI implementations , which are backing the singleton accessors .
* If the list returned is empty , the default provider list ,
* determined by methods like { @ code getDefaultProviderNames ( ) } should be used .
* @ return the ordered providers , never null . */
public List < String > getProviderNames ( ) { } } | @ SuppressWarnings ( "unchecked" ) List < String > result = get ( KEY_QUERY_PROVIDERS , List . class ) ; if ( result == null ) { return Collections . emptyList ( ) ; } return result ; |
public class StringSerializer { /** * ( non - Javadoc )
* @ see Serializer # deserialize ( byte [ ] , java . lang . Class ) */
@ SuppressWarnings ( "unchecked" ) @ Override public < T > T deserialize ( byte [ ] content , Class < T > clazz ) throws SerializerException { } } | validateString ( clazz ) ; return ( T ) new String ( content , Charsets . UTF_8 ) ; |
public class LoggerFactory { /** * Resort to reflection to make any FileHandler that is currently active roll - over
* the current logfile and start a new one , e . g . this may be useful before a big
* batch - job starts to have a clean start of log - entries .
* @ throws IllegalStateException If there is a problem calling the rotate - method of the { @ link FileHandler } */
@ SuppressForbidden ( reason = "Could not do this without reflection" ) public static void rolloverLogfile ( ) { } } | Logger log = Logger . getLogger ( "" ) ; // NOSONAR - local logger used on purpose here
for ( Handler handler : log . getHandlers ( ) ) { if ( handler instanceof FileHandler ) { try { Method m = FileHandler . class . getDeclaredMethod ( "rotate" ) ; m . setAccessible ( true ) ; if ( ! Level . OFF . equals ( handler . getLevel ( ) ) ) { // Assume not closed .
m . invoke ( handler ) ; } } catch ( IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e ) { throw new IllegalStateException ( e ) ; } } } |
public class ProvisionerImpl { /** * Set the start level of the framework , and listen for framework events to
* ensure we wait until the start level operation is complete before
* continuing ( due to timing , this translates into waiting until the next
* start level event is fired . . we don ' t necessarily know that it ' s ours . . ) .
* @ param level
* StartLevel to change to
* @ return BundleStartStatus containing any exceptions encountered during
* the StartLevel change operation . */
protected BundleStartStatus setFrameworkStartLevel ( int level ) { } } | BundleStartStatus startStatus = null ; if ( frameworkStartLevel != null ) { // The framework listener passed as a paramter will be notified with a
// FrameworkEvent ( START _ LEVEL _ CHANGED or ERROR ) when the setStartLevel
// operation completes .
StartLevelFrameworkListener slfw = new StartLevelFrameworkListener ( true ) ; frameworkStartLevel . setStartLevel ( level , slfw ) ; slfw . waitForLevel ( ) ; startStatus = slfw . getStatus ( ) ; } else startStatus = new BundleStartStatus ( ) ; return startStatus ; |
public class JQMRadioset { /** * Returns the value of the button that has the given id
* @ return the value of the button with the given id */
private String getValueForId ( String id ) { } } | for ( int k = 0 ; k < fieldset . getWidgetCount ( ) ; k ++ ) { Widget widget = fieldset . getWidget ( k ) ; if ( id . equals ( widget . getElement ( ) . getAttribute ( "id" ) ) ) return widget . getElement ( ) . getAttribute ( "value" ) ; } return null ; |
public class HtmlGL20 { /** * The content of non - VBO buffers may be changed between the glVertexAttribPointer call
* and the glDrawXxx call . Thus , we need to defer copying them to a VBO buffer until just
* before the actual glDrawXxx call . */
protected void prepareDraw ( ) { } } | VertexAttribArrayState previousNio = null ; int previousElementSize = 0 ; if ( useNioBuffer == 0 && enabledArrays == previouslyEnabledArrays ) { return ; } for ( int i = 0 ; i < VERTEX_ATTRIB_ARRAY_COUNT ; i ++ ) { int mask = 1 << i ; int enabled = enabledArrays & mask ; if ( enabled != ( previouslyEnabledArrays & mask ) ) { if ( enabled != 0 ) { gl . enableVertexAttribArray ( i ) ; } else { gl . disableVertexAttribArray ( i ) ; } } if ( enabled != 0 && ( useNioBuffer & mask ) != 0 ) { VertexAttribArrayState data = vertexAttribArrayState [ i ] ; if ( previousNio != null && previousNio . nioBuffer == data . nioBuffer && previousNio . nioBufferLimit >= data . nioBufferLimit ) { if ( boundArrayBuffer != previousNio . webGlBuffer ) { gl . bindBuffer ( ARRAY_BUFFER , previousNio . webGlBuffer ) ; boundArrayBuffer = data . webGlBuffer ; } gl . vertexAttribPointer ( i , data . size , data . type , data . normalize , data . stride , data . nioBufferPosition * previousElementSize ) ; } else { if ( boundArrayBuffer != data . webGlBuffer ) { gl . bindBuffer ( ARRAY_BUFFER , data . webGlBuffer ) ; boundArrayBuffer = data . webGlBuffer ; } int elementSize = getElementSize ( data . nioBuffer ) ; int savePosition = data . nioBuffer . position ( ) ; if ( data . nioBufferPosition * elementSize < data . stride ) { data . nioBuffer . position ( 0 ) ; gl . bufferData ( ARRAY_BUFFER , getTypedArray ( data . nioBuffer , data . type , data . nioBufferLimit * elementSize ) , STREAM_DRAW ) ; gl . vertexAttribPointer ( i , data . size , data . type , data . normalize , data . stride , data . nioBufferPosition * elementSize ) ; previousNio = data ; previousElementSize = elementSize ; } else { data . nioBuffer . position ( data . nioBufferPosition ) ; gl . bufferData ( ARRAY_BUFFER , getTypedArray ( data . nioBuffer , data . type , ( data . nioBufferLimit - data . nioBufferPosition ) * elementSize ) , STREAM_DRAW ) ; gl . vertexAttribPointer ( i , data . size , data . type , data . normalize , data . stride , 0 ) ; } data . nioBuffer . position ( savePosition ) ; } } } previouslyEnabledArrays = enabledArrays ; |
public class AuthorizationImpl { /** * Converts an < code > IGroupMember < / code > into an < code > IAuthorizationPrincipal < / code > .
* @ return org . apereo . portal . security . IAuthorizationPrincipal
* @ param groupMember org . apereo . portal . groups . IGroupMember */
@ Override public IAuthorizationPrincipal newPrincipal ( IGroupMember groupMember ) throws GroupsException { } } | String key = groupMember . getKey ( ) ; Class type = groupMember . getType ( ) ; logger . debug ( "AuthorizationImpl.newPrincipal(): for {} ({})" , type , key ) ; return newPrincipal ( key , type ) ; |
public class Configuration { /** * Override multiple numeric bindings , both begin and end are inclusive
* @ param beginTotal inclusive start of range
* @ param endTotal inclusive end of range
* @ param beginDecimal inclusive start of range
* @ param endDecimal inclusive end of range
* @ param javaType java type */
public void registerNumeric ( int beginTotal , int endTotal , int beginDecimal , int endDecimal , Class < ? > javaType ) { } } | for ( int total = beginTotal ; total <= endTotal ; total ++ ) { for ( int decimal = beginDecimal ; decimal <= endDecimal ; decimal ++ ) { registerNumeric ( total , decimal , javaType ) ; } } |
public class TableInfo { /** * Returns a builder for a { @ code TableInfo } object given table identity and definition . Use
* { @ link StandardTableDefinition } to create simple BigQuery table . Use { @ link ViewDefinition } to
* create a BigQuery view . Use { @ link ExternalTableDefinition } to create a BigQuery a table backed
* by external data . */
public static Builder newBuilder ( TableId tableId , TableDefinition definition ) { } } | return new BuilderImpl ( ) . setTableId ( tableId ) . setDefinition ( definition ) ; |
public class DefaultLoginWebflowConfigurer { /** * Create authentication warning messages view .
* @ param flow the flow */
protected void createAuthenticationWarningMessagesView ( final Flow flow ) { } } | val state = createViewState ( flow , CasWebflowConstants . STATE_ID_SHOW_AUTHN_WARNING_MSGS , "casLoginMessageView" ) ; val setAction = createSetAction ( "requestScope.messages" , "messageContext.allMessages" ) ; state . getEntryActionList ( ) . add ( setAction ) ; createTransitionForState ( state , CasWebflowConstants . TRANSITION_ID_PROCEED , CasWebflowConstants . STATE_ID_PROCEED_FROM_AUTHENTICATION_WARNINGS_VIEW ) ; val proceedAction = createActionState ( flow , CasWebflowConstants . STATE_ID_PROCEED_FROM_AUTHENTICATION_WARNINGS_VIEW ) ; proceedAction . getActionList ( ) . add ( createEvaluateAction ( CasWebflowConstants . ACTION_ID_SEND_TICKET_GRANTING_TICKET ) ) ; createStateDefaultTransition ( proceedAction , CasWebflowConstants . STATE_ID_SERVICE_CHECK ) ; |
public class Job { /** * / * package */
final Result runJob ( ) { } } | try { // daily jobs check the requirements manually
if ( this instanceof DailyJob || meetsRequirements ( true ) ) { mResult = onRunJob ( getParams ( ) ) ; } else { mResult = getParams ( ) . isPeriodic ( ) ? Result . FAILURE : Result . RESCHEDULE ; } return mResult ; } finally { mFinishedTimeStamp = System . currentTimeMillis ( ) ; } |
public class HybridJPAObjectFactory { /** * Checks access to the specified JPA reference and returns true if the
* current call to { @ link # getObjectInstance } is in the context of a
* Stateful Session bean . < p >
* By default , this method will return what is stored in the info object as
* passed by the isSFSB parameter , which will be the correct answer for EJB
* modules and WAR modules that do not contain EJBs . < p >
* This method is overridden here to support the EJBs in War module scenario .
* In this scenario , the type of EJB using a persistence context cannot
* be determined until the time of injection or lookup . At that time ,
* the EJB type may be determined from the CMD on the thread .
* @ param info the information associated with the current object creation
* @ return true if the object is being created in a Stateful bean context
* @ throws InjectionException if an invalid access is detected */
@ Override protected boolean checkSFSBAccess ( JPAJndiLookupInfo info , boolean isSFSB ) throws InjectionException { } } | ComponentMetaData cmd = ComponentMetaDataAccessorImpl . getComponentMetaDataAccessor ( ) . getComponentMetaData ( ) ; final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "checkSFSBAccess: " + info + ", " + ( cmd == null ? null : cmd . getJ2EEName ( ) ) ) ; if ( cmd instanceof BeanMetaData ) { BeanMetaData bmd = ( BeanMetaData ) cmd ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "stateful session bean" ) ; isSFSB = bmd . isStatefulSessionBean ( ) ; String refName = info . getReferenceName ( ) ; if ( isSFSB && ! bmd . ivPersistenceRefNames . contains ( refName ) ) // F743-30682
{ Tr . error ( tc , "PERSISTENCE_REF_DEPENDENCY_NOT_DECLARED_CNTR0315E" , bmd . j2eeName . getComponent ( ) , bmd . j2eeName . getModule ( ) , bmd . j2eeName . getApplication ( ) , refName ) ; String msg = Tr . formatMessage ( tc , "PERSISTENCE_REF_DEPENDENCY_NOT_DECLARED_CNTR0315E" , bmd . j2eeName . getComponent ( ) , bmd . j2eeName . getModule ( ) , bmd . j2eeName . getApplication ( ) , refName ) ; throw new InjectionException ( msg ) ; } } else { // even though the resource ref may have been defined in a stateful bean , the lookup
// was not within the context of a stateful bean . if this is a lookup of an ExPC , it
// should not be allowed ( super will verify ) .
isSFSB = false ; } boolean result = super . checkSFSBAccess ( info , isSFSB ) ; // F743-30682
if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "checkSFSBAccess: " + result ) ; return result ; |
public class AssociateMemberAccountRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( AssociateMemberAccountRequest associateMemberAccountRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( associateMemberAccountRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( associateMemberAccountRequest . getMemberAccountId ( ) , MEMBERACCOUNTID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class AbstractFramedChannel { /** * Flushes all ready stream sink conduits to the channel .
* Frames will be batched up , to allow them all to be written out via a gathering
* write . The { @ link # framePriority } implementation will be invoked to decide which
* frames are eligible for sending and in what order . */
protected synchronized void flushSenders ( ) { } } | if ( flushingSenders ) { throw UndertowMessages . MESSAGES . recursiveCallToFlushingSenders ( ) ; } flushingSenders = true ; try { int toSend = 0 ; while ( ! newFrames . isEmpty ( ) ) { S frame = newFrames . poll ( ) ; frame . preWrite ( ) ; if ( framePriority . insertFrame ( frame , pendingFrames ) ) { if ( ! heldFrames . isEmpty ( ) ) { framePriority . frameAdded ( frame , pendingFrames , heldFrames ) ; } } else { heldFrames . add ( frame ) ; } } boolean finalFrame = false ; ListIterator < S > it = pendingFrames . listIterator ( ) ; while ( it . hasNext ( ) ) { S sender = it . next ( ) ; if ( sender . isReadyForFlush ( ) ) { ++ toSend ; } else { break ; } if ( sender . isLastFrame ( ) ) { finalFrame = true ; } } if ( toSend == 0 ) { // if there is nothing to send we just attempt a flush on the underlying channel
try { if ( channel . getSinkChannel ( ) . flush ( ) ) { channel . getSinkChannel ( ) . suspendWrites ( ) ; } } catch ( Throwable e ) { safeClose ( channel ) ; markWritesBroken ( e ) ; } return ; } ByteBuffer [ ] data = new ByteBuffer [ toSend * 3 ] ; int j = 0 ; it = pendingFrames . listIterator ( ) ; try { while ( j < toSend ) { S next = it . next ( ) ; // todo : rather than adding empty buffers just store the offsets
SendFrameHeader frameHeader = next . getFrameHeader ( ) ; PooledByteBuffer frameHeaderByteBuffer = frameHeader . getByteBuffer ( ) ; ByteBuffer frameTrailerBuffer = frameHeader . getTrailer ( ) ; data [ j * 3 ] = frameHeaderByteBuffer != null ? frameHeaderByteBuffer . getBuffer ( ) : Buffers . EMPTY_BYTE_BUFFER ; data [ ( j * 3 ) + 1 ] = next . getBuffer ( ) == null ? Buffers . EMPTY_BYTE_BUFFER : next . getBuffer ( ) ; data [ ( j * 3 ) + 2 ] = frameTrailerBuffer != null ? frameTrailerBuffer : Buffers . EMPTY_BYTE_BUFFER ; ++ j ; } long toWrite = Buffers . remaining ( data ) ; long res ; do { res = channel . getSinkChannel ( ) . write ( data ) ; toWrite -= res ; } while ( res > 0 && toWrite > 0 ) ; int max = toSend ; while ( max > 0 ) { S sinkChannel = pendingFrames . get ( 0 ) ; PooledByteBuffer frameHeaderByteBuffer = sinkChannel . getFrameHeader ( ) . getByteBuffer ( ) ; ByteBuffer frameTrailerBuffer = sinkChannel . getFrameHeader ( ) . getTrailer ( ) ; if ( frameHeaderByteBuffer != null && frameHeaderByteBuffer . getBuffer ( ) . hasRemaining ( ) || sinkChannel . getBuffer ( ) != null && sinkChannel . getBuffer ( ) . hasRemaining ( ) || frameTrailerBuffer != null && frameTrailerBuffer . hasRemaining ( ) ) { break ; } sinkChannel . flushComplete ( ) ; pendingFrames . remove ( sinkChannel ) ; max -- ; } if ( ! pendingFrames . isEmpty ( ) || ! channel . getSinkChannel ( ) . flush ( ) ) { channel . getSinkChannel ( ) . resumeWrites ( ) ; } else { channel . getSinkChannel ( ) . suspendWrites ( ) ; } if ( pendingFrames . isEmpty ( ) && finalFrame ) { // all data has been sent . Close gracefully
channel . getSinkChannel ( ) . shutdownWrites ( ) ; if ( ! channel . getSinkChannel ( ) . flush ( ) ) { channel . getSinkChannel ( ) . setWriteListener ( ChannelListeners . flushingChannelListener ( null , null ) ) ; channel . getSinkChannel ( ) . resumeWrites ( ) ; } } } catch ( IOException | RuntimeException | Error e ) { safeClose ( channel ) ; markWritesBroken ( e ) ; } } finally { flushingSenders = false ; if ( ! newFrames . isEmpty ( ) ) { runInIoThread ( new Runnable ( ) { @ Override public void run ( ) { flushSenders ( ) ; } } ) ; } } |
public class AbstractCounterFactory { /** * { @ inheritDoc } */
@ Override public ICounter getCounter ( String name ) { } } | try { return counterCache . get ( name ) ; } catch ( ExecutionException e ) { throw new RuntimeException ( e ) ; } |
public class VaultsInner { /** * Creates or updates a Recovery Services vault .
* @ param resourceGroupName The name of the resource group where the recovery services vault is present .
* @ param vaultName The name of the recovery services vault .
* @ param vault Recovery Services Vault to be created .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the VaultInner object if successful . */
public VaultInner createOrUpdate ( String resourceGroupName , String vaultName , VaultInner vault ) { } } | return createOrUpdateWithServiceResponseAsync ( resourceGroupName , vaultName , vault ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class SmbComTransactionResponse { /** * { @ inheritDoc }
* @ see jcifs . internal . smb1 . ServerMessageBlock # decode ( byte [ ] , int ) */
@ Override public int decode ( byte [ ] buffer , int bufferIndex ) throws SMBProtocolDecodingException { } } | int len = super . decode ( buffer , bufferIndex ) ; if ( this . byteCount == 0 ) { // otherwise hasMore may not be correctly set
readBytesWireFormat ( buffer , len + bufferIndex ) ; } nextElement ( ) ; return len ; |
public class HandlerSocketCache { /** * Opens a connection and a session to a HandlerSocket server . Specify the table ' s index to
* hit as well as the columns you want . A common index is PRIMRY - for the primary
* key . You may also have a multi - column key named NAME _ EMAIL with columns ( ` name ` , ` email ` ) . */
private IndexSession getSession ( String index , String ... columns ) throws PersistenceException { } } | IndexSession session = null ; String sessionKey = index + Arrays . hashCode ( columns ) ; // both index and columns play a role in opening a session
try { if ( hsClient == null ) { hsClient = new HSClientImpl ( handlerSocketHost , port , poolSize ) ; } session = indexSessions . get ( sessionKey ) ; if ( session == null ) { session = hsClient . openIndexSession ( database , this . getSqlNameForClassName ( getEntityClassName ( ) ) , index , columns ) ; indexSessions . put ( sessionKey , session ) ; } return session ; } catch ( Exception e ) { throw new PersistenceException ( e ) ; } |
public class IntStreamEx { /** * Returns a sequential { @ link IntStreamEx } with the specified range of the
* specified array as its source .
* @ param array the array , assumed to be unmodified during use
* @ param startInclusive the first index to cover , inclusive
* @ param endExclusive index immediately past the last index to cover
* @ return an { @ code IntStreamEx } for the array range
* @ throws ArrayIndexOutOfBoundsException if { @ code startInclusive } is
* negative , { @ code endExclusive } is less than
* { @ code startInclusive } , or { @ code endExclusive } is greater than
* the array size
* @ since 0.1.1
* @ see Arrays # stream ( int [ ] , int , int ) */
public static IntStreamEx of ( int [ ] array , int startInclusive , int endExclusive ) { } } | return of ( Arrays . spliterator ( array , startInclusive , endExclusive ) ) ; |
public class JsAdminFactoryImpl { @ Override public BaseDestinationDefinition createBaseDestinationDefinition ( DestinationType type , String name ) { } } | return new BaseDestinationDefinitionImpl ( type , name ) ; |
public class URIUtils { /** * Helper method for modifying URI port
* @ param uri
* @ param newPort
* @ return */
public static String modifyURIPort ( String uri , int newPort ) { } } | try { URI uriObj = new URI ( uri ) ; return uriToString ( URLUtils . modifyURIPort ( uriObj , newPort ) ) ; } catch ( URISyntaxException e ) { try { return ( new NetworkInterfaceURI ( uri ) ) . modifyURIPort ( newPort ) ; } catch ( IllegalArgumentException ne ) { throw new IllegalArgumentException ( ne . getMessage ( ) , ne ) ; } } |
public class GVRPicker { /** * Gets the pick ray in world coordinates .
* World coordinates are defined as the coordinate system at the
* root of the scene graph before any camera viewing transformation
* is applied .
* You can get the pick ray relative to the scene object that
* owns the picker ( or the camera if no owner ) by calling
* { @ link # getPickRay ( ) }
* @ param origin world coordinate origin of the pick ray
* @ param direction world coordinate direction of the pick ray
* @ see # getPickRay ( )
* @ see # setPickRay ( float , float , float , float , float , float ) */
public final void getWorldPickRay ( Vector3f origin , Vector3f direction ) { } } | GVRSceneObject owner = getOwnerObject ( ) ; if ( owner == null ) // should never come here , picker always
{ // owned by GVRGearCursorController pivot
owner = mScene . getMainCameraRig ( ) . getHeadTransformObject ( ) ; } Matrix4f mtx = owner . getTransform ( ) . getModelMatrix4f ( ) ; origin . set ( mRayOrigin ) ; direction . set ( mRayDirection ) ; origin . mulPosition ( mtx ) ; // get ray in world coordinates
direction . mulDirection ( mtx ) ; direction . normalize ( ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.