signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class UpdateAcceleratorRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateAcceleratorRequest updateAcceleratorRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( updateAcceleratorRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateAcceleratorRequest . getAcceleratorArn ( ) , ACCELERATORARN_BINDING ) ; protocolMarshaller . marshall ( updateAcceleratorRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( updateAcceleratorRequest . getIpAddressType ( ) , IPADDRESSTYPE_BINDING ) ; protocolMarshaller . marshall ( updateAcceleratorRequest . getEnabled ( ) , ENABLED_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class TagSetMappingsToNAF { /** * Mapping between EAGLES PAROLE Ancora tagset and NAF .
* @ param postag
* the postag
* @ return the mapping to NAF pos tagset */
private static String mapSpanishAncoraTagSetToNAF ( final String postag ) { } }
|
if ( postag . equalsIgnoreCase ( "RG" ) || postag . equalsIgnoreCase ( "RN" ) ) { return "A" ; // adverb
} else if ( postag . equalsIgnoreCase ( "CC" ) || postag . equalsIgnoreCase ( "CS" ) ) { return "C" ; // conjunction
} else if ( postag . startsWith ( "D" ) ) { return "D" ; // det predeterminer
} else if ( postag . startsWith ( "A" ) ) { return "G" ; // adjective
} else if ( postag . startsWith ( "NC" ) ) { return "N" ; // common noun
} else if ( postag . startsWith ( "NP" ) ) { return "R" ; // proper noun
} else if ( postag . startsWith ( "SP" ) ) { return "P" ; // preposition
} else if ( postag . startsWith ( "P" ) ) { return "Q" ; // pronoun
} else if ( postag . startsWith ( "V" ) ) { return "V" ; // verb
} else { return "O" ; // other
}
|
public class CommerceNotificationTemplateUtil { /** * Removes all the commerce notification templates where groupId = & # 63 ; and type = & # 63 ; and enabled = & # 63 ; from the database .
* @ param groupId the group ID
* @ param type the type
* @ param enabled the enabled */
public static void removeByG_T_E ( long groupId , String type , boolean enabled ) { } }
|
getPersistence ( ) . removeByG_T_E ( groupId , type , enabled ) ;
|
public class RetryPolicy { /** * Sets the max number of execution attempts to perform . { @ code - 1 } indicates no limit . This method has the same
* effect as setting 1 more than { @ link # withMaxRetries ( int ) } . For example , 2 retries equal 3 attempts .
* @ throws IllegalArgumentException if { @ code maxAttempts } is 0 or less than - 1
* @ see # withMaxRetries ( int ) */
public RetryPolicy < R > withMaxAttempts ( int maxAttempts ) { } }
|
Assert . isTrue ( maxAttempts != 0 , "maxAttempts cannot be 0" ) ; Assert . isTrue ( maxAttempts >= - 1 , "maxAttempts cannot be less than -1" ) ; this . maxRetries = maxAttempts == - 1 ? - 1 : maxAttempts - 1 ; return this ;
|
public class DNSSEC { /** * Creates a byte array containing the concatenation of the fields of the
* SIG ( 0 ) record and the message to be signed . This does not perform
* a cryptographic digest .
* @ param sig The SIG record used to sign the rrset .
* @ param msg The message to be signed .
* @ param previous If this is a response , the signature from the query .
* @ return The data to be cryptographically signed . */
public static byte [ ] digestMessage ( SIGRecord sig , Message msg , byte [ ] previous ) { } }
|
DNSOutput out = new DNSOutput ( ) ; digestSIG ( out , sig ) ; if ( previous != null ) out . writeByteArray ( previous ) ; msg . toWire ( out ) ; return out . toByteArray ( ) ;
|
public class CQLStatementCache { /** * Create a prepared statement for the given query / table combo . */
private PreparedStatement prepareQuery ( String tableName , Query query ) { } }
|
// All queries start with SELECT * FROM < keyspace > . < table >
StringBuilder cql = new StringBuilder ( "SELECT * FROM " ) ; cql . append ( m_keyspace ) ; cql . append ( "." ) ; cql . append ( tableName ) ; switch ( query ) { case SELECT_1_ROW_1_COLUMN : cql . append ( " WHERE key = ? AND column1 = ?;" ) ; break ; case SELECT_1_ROW_COLUMN_RANGE : cql . append ( " WHERE key = ? AND column1 >= ? AND column1 < ? LIMIT ?;" ) ; break ; case SELECT_1_ROW_COLUMN_SET : cql . append ( " WHERE key = ? AND column1 IN ?;" ) ; break ; case SELECT_1_ROW_ALL_COLUMNS : cql . append ( " WHERE key = ? LIMIT ?;" ) ; break ; case SELECT_1_ROW_UPPER_COLUMNS : cql . append ( " WHERE key = ? AND column1 >= ? LIMIT ?;" ) ; break ; case SELECT_1_ROW_LOWER_COLUMNS : cql . append ( " WHERE key = ? AND column1 <= ? LIMIT ?;" ) ; break ; case SELECT_ROWS_RANGE : // unfortunately I didn ' t find how to get first column of each row in CQL .
cql . append ( " ;" ) ; break ; default : throw new RuntimeException ( "Not supported: " + query ) ; } m_logger . debug ( "Preparing query {}: {}" , query , cql ) ; return ( ( CQLService ) DBService . instance ( m_tenant ) ) . getSession ( ) . prepare ( cql . toString ( ) ) ;
|
public class ApiOvhEmaildomain { /** * Get this object properties
* REST : GET / email / domain / delegatedAccount / { email } / responder
* @ param email [ required ] Email */
public OvhResponderAccount delegatedAccount_email_responder_GET ( String email ) throws IOException { } }
|
String qPath = "/email/domain/delegatedAccount/{email}/responder" ; StringBuilder sb = path ( qPath , email ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhResponderAccount . class ) ;
|
public class AbstractCommandLineRunner { /** * Creates inputs from a list of source files , zips and json files .
* < p > Can be overridden by subclasses who want to pull files from different places .
* @ param files A list of flag entries indicates js and zip file names
* @ param jsonFiles A list of json encoded files .
* @ param allowStdIn Whether ' - ' is allowed appear as a filename to represent stdin . If true , ' - '
* is only allowed to appear once .
* @ param jsModuleSpecs A list js module specs .
* @ return An array of inputs */
@ GwtIncompatible ( "Unnecessary" ) protected List < SourceFile > createInputs ( List < FlagEntry < JsSourceType > > files , List < JsonFileSpec > jsonFiles , boolean allowStdIn , List < JsModuleSpec > jsModuleSpecs ) throws IOException { } }
|
List < SourceFile > inputs = new ArrayList < > ( files . size ( ) ) ; boolean usingStdin = false ; int jsModuleIndex = 0 ; JsModuleSpec jsModuleSpec = Iterables . getFirst ( jsModuleSpecs , null ) ; int cumulatedInputFilesExpected = jsModuleSpec == null ? Integer . MAX_VALUE : jsModuleSpec . getNumInputs ( ) ; for ( int i = 0 ; i < files . size ( ) ; i ++ ) { FlagEntry < JsSourceType > file = files . get ( i ) ; String filename = file . value ; if ( file . flag == JsSourceType . JS_ZIP ) { if ( ! "-" . equals ( filename ) ) { List < SourceFile > newFiles = SourceFile . fromZipFile ( filename , inputCharset ) ; // Update the manifest maps for new zip entries .
if ( rootRelativePathsMap . containsKey ( filename ) ) { String rootFilename = rootRelativePathsMap . get ( filename ) ; for ( SourceFile zipEntry : newFiles ) { String zipEntryName = zipEntry . getName ( ) ; checkState ( zipEntryName . contains ( filename ) ) ; String zipmap = zipEntryName . replace ( filename , rootFilename ) ; rootRelativePathsMap . put ( zipEntryName , zipmap ) ; } } inputs . addAll ( newFiles ) ; if ( jsModuleSpec != null ) { jsModuleSpec . numJsFiles += newFiles . size ( ) - 1 ; } } } else if ( ! "-" . equals ( filename ) ) { SourceKind kind = file . flag == JsSourceType . WEAKDEP ? SourceKind . WEAK : SourceKind . STRONG ; SourceFile newFile = SourceFile . fromFile ( filename , inputCharset , kind ) ; inputs . add ( newFile ) ; } else { if ( ! allowStdIn ) { throw new FlagUsageException ( "Can't specify stdin." ) ; } if ( usingStdin ) { throw new FlagUsageException ( "Can't specify stdin twice." ) ; } if ( ! config . outputManifests . isEmpty ( ) ) { throw new FlagUsageException ( "Manifest files cannot be generated when the input is from stdin." ) ; } if ( ! config . outputBundles . isEmpty ( ) ) { throw new FlagUsageException ( "Bundle files cannot be generated when the input is from stdin." ) ; } this . err . println ( WAITING_FOR_INPUT_WARNING ) ; inputs . add ( SourceFile . fromInputStream ( "stdin" , this . in , inputCharset ) ) ; usingStdin = true ; } if ( i >= cumulatedInputFilesExpected - 1 ) { jsModuleIndex ++ ; if ( jsModuleIndex < jsModuleSpecs . size ( ) ) { jsModuleSpec = jsModuleSpecs . get ( jsModuleIndex ) ; cumulatedInputFilesExpected += jsModuleSpec . getNumInputs ( ) ; } } } if ( jsonFiles != null ) { for ( JsonFileSpec jsonFile : jsonFiles ) { inputs . add ( SourceFile . fromCode ( jsonFile . getPath ( ) , jsonFile . getSrc ( ) ) ) ; } } for ( JSError error : removeDuplicateZipEntries ( inputs , jsModuleSpecs ) ) { compiler . report ( error ) ; } return inputs ;
|
public class WidgetsHtmlPanel { /** * Check if the { @ link Element Element } < code > root < / code > is attached to the
* widget . If it is the case , adopt the widget . If not , check if the chidren
* are linked to a widget to adopt them . */
protected void adoptSubWidgets ( Element root ) { } }
|
GQuery children = $ ( root ) . children ( ) ; for ( Element child : children . elements ( ) ) { Widget w = $ ( child ) . widget ( ) ; if ( w != null ) { doAdopt ( w ) ; } else { adoptSubWidgets ( child ) ; } }
|
public class ApiOvhSms { /** * Add one or several sending jobs
* REST : POST / sms / { serviceName } / users / { login } / jobs
* @ param sender [ required ] The sender
* @ param _ class [ required ] [ default = phoneDisplay ] The sms class
* @ param receiversSlotId [ required ] The receivers document slot id
* @ param priority [ required ] [ default = high ] The priority of the message
* @ param validityPeriod [ required ] [ default = 2880 ] The maximum time - in minute ( s ) - before the message is dropped
* @ param senderForResponse [ required ] Set the flag to send a special sms which can be reply by the receiver ( smsResponse ) .
* @ param coding [ required ] [ default = 7bit ] The sms coding
* @ param differedPeriod [ required ] [ default = 0 ] The time - in minute ( s ) - to wait before sending the message
* @ param tag [ required ] The identifier group tag
* @ param noStopClause [ required ] Do not display STOP clause in the message , this requires that this is not an advertising message
* @ param receiversDocumentUrl [ required ] The receivers document url link in csv format
* @ param message [ required ] The sms message
* @ param receivers [ required ] The receivers list
* @ param charset [ required ] [ default = UTF - 8 ] The sms coding
* @ param serviceName [ required ] The internal name of your SMS offer
* @ param login [ required ] The sms user login */
public OvhSmsSendingReport serviceName_users_login_jobs_POST ( String serviceName , String login , OvhCharsetEnum charset , OvhClassEnum _class , OvhCodingEnum coding , Long differedPeriod , String message , Boolean noStopClause , OvhPriorityEnum priority , String [ ] receivers , String receiversDocumentUrl , String receiversSlotId , String sender , Boolean senderForResponse , String tag , Long validityPeriod ) throws IOException { } }
|
String qPath = "/sms/{serviceName}/users/{login}/jobs" ; StringBuilder sb = path ( qPath , serviceName , login ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "charset" , charset ) ; addBody ( o , "class" , _class ) ; addBody ( o , "coding" , coding ) ; addBody ( o , "differedPeriod" , differedPeriod ) ; addBody ( o , "message" , message ) ; addBody ( o , "noStopClause" , noStopClause ) ; addBody ( o , "priority" , priority ) ; addBody ( o , "receivers" , receivers ) ; addBody ( o , "receiversDocumentUrl" , receiversDocumentUrl ) ; addBody ( o , "receiversSlotId" , receiversSlotId ) ; addBody ( o , "sender" , sender ) ; addBody ( o , "senderForResponse" , senderForResponse ) ; addBody ( o , "tag" , tag ) ; addBody ( o , "validityPeriod" , validityPeriod ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhSmsSendingReport . class ) ;
|
public class JavaWriter { /** * Prints a Java escaped string */
public void printJavaString ( String s ) throws IOException { } }
|
for ( int i = 0 ; i < s . length ( ) ; i ++ ) { char ch = s . charAt ( i ) ; switch ( ch ) { case '\\' : _os . print ( "\\\\" ) ; break ; case '\n' : _os . print ( "\\n" ) ; break ; case '\r' : _os . print ( "\\r" ) ; break ; case '"' : _os . print ( "\\\"" ) ; break ; default : _os . print ( ch ) ; } }
|
public class StorageImportDispatcher { /** * Publishes any apis that were imported in the " Published " state .
* @ throws StorageException */
private void publishApis ( ) throws StorageException { } }
|
logger . info ( Messages . i18n . format ( "StorageExporter.PublishingApis" ) ) ; // $ NON - NLS - 1 $
try { for ( EntityInfo info : apisToPublish ) { logger . info ( Messages . i18n . format ( "StorageExporter.PublishingApi" , info ) ) ; // $ NON - NLS - 1 $
ApiVersionBean versionBean = storage . getApiVersion ( info . organizationId , info . id , info . version ) ; Api gatewayApi = new Api ( ) ; gatewayApi . setEndpoint ( versionBean . getEndpoint ( ) ) ; gatewayApi . setEndpointType ( versionBean . getEndpointType ( ) . toString ( ) ) ; gatewayApi . setEndpointProperties ( versionBean . getEndpointProperties ( ) ) ; gatewayApi . setOrganizationId ( versionBean . getApi ( ) . getOrganization ( ) . getId ( ) ) ; gatewayApi . setApiId ( versionBean . getApi ( ) . getId ( ) ) ; gatewayApi . setVersion ( versionBean . getVersion ( ) ) ; gatewayApi . setPublicAPI ( versionBean . isPublicAPI ( ) ) ; gatewayApi . setParsePayload ( versionBean . isParsePayload ( ) ) ; if ( versionBean . isPublicAPI ( ) ) { List < Policy > policiesToPublish = new ArrayList < > ( ) ; Iterator < PolicyBean > apiPolicies = storage . getAllPolicies ( info . organizationId , info . id , info . version , PolicyType . Api ) ; while ( apiPolicies . hasNext ( ) ) { PolicyBean apiPolicy = apiPolicies . next ( ) ; Policy policyToPublish = new Policy ( ) ; policyToPublish . setPolicyJsonConfig ( apiPolicy . getConfiguration ( ) ) ; policyToPublish . setPolicyImpl ( apiPolicy . getDefinition ( ) . getPolicyImpl ( ) ) ; policiesToPublish . add ( policyToPublish ) ; } gatewayApi . setApiPolicies ( policiesToPublish ) ; } // Publish the api to all relevant gateways
Set < ApiGatewayBean > gateways = versionBean . getGateways ( ) ; if ( gateways == null ) { throw new RuntimeException ( "No gateways specified for api!" ) ; // $ NON - NLS - 1 $
} for ( ApiGatewayBean apiGatewayBean : gateways ) { IGatewayLink gatewayLink = createGatewayLink ( apiGatewayBean . getGatewayId ( ) ) ; gatewayLink . publishApi ( gatewayApi ) ; } } } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
|
public class SARLJvmModelInferrer { /** * Create a string concatenation client from a set of Java code lines .
* @ param javaCodeLines the Java code lines .
* @ return the client . */
private static StringConcatenationClient toStringConcatenation ( final String ... javaCodeLines ) { } }
|
return new StringConcatenationClient ( ) { @ Override protected void appendTo ( StringConcatenationClient . TargetStringConcatenation builder ) { for ( final String line : javaCodeLines ) { builder . append ( line ) ; builder . newLineIfNotEmpty ( ) ; } } } ;
|
public class CommerceCountryPersistenceImpl { /** * Returns the number of commerce countries where groupId = & # 63 ; and twoLettersISOCode = & # 63 ; .
* @ param groupId the group ID
* @ param twoLettersISOCode the two letters iso code
* @ return the number of matching commerce countries */
@ Override public int countByG_Tw ( long groupId , String twoLettersISOCode ) { } }
|
FinderPath finderPath = FINDER_PATH_COUNT_BY_G_TW ; Object [ ] finderArgs = new Object [ ] { groupId , twoLettersISOCode } ; Long count = ( Long ) finderCache . getResult ( finderPath , finderArgs , this ) ; if ( count == null ) { StringBundler query = new StringBundler ( 3 ) ; query . append ( _SQL_COUNT_COMMERCECOUNTRY_WHERE ) ; query . append ( _FINDER_COLUMN_G_TW_GROUPID_2 ) ; boolean bindTwoLettersISOCode = false ; if ( twoLettersISOCode == null ) { query . append ( _FINDER_COLUMN_G_TW_TWOLETTERSISOCODE_1 ) ; } else if ( twoLettersISOCode . equals ( "" ) ) { query . append ( _FINDER_COLUMN_G_TW_TWOLETTERSISOCODE_3 ) ; } else { bindTwoLettersISOCode = true ; query . append ( _FINDER_COLUMN_G_TW_TWOLETTERSISOCODE_2 ) ; } String sql = query . toString ( ) ; Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( sql ) ; QueryPos qPos = QueryPos . getInstance ( q ) ; qPos . add ( groupId ) ; if ( bindTwoLettersISOCode ) { qPos . add ( twoLettersISOCode ) ; } count = ( Long ) q . uniqueResult ( ) ; finderCache . putResult ( finderPath , finderArgs , count ) ; } catch ( Exception e ) { finderCache . removeResult ( finderPath , finderArgs ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return count . intValue ( ) ;
|
public class NodeRegisterImpl { /** * { @ inheritDoc } */
public boolean isNodePresent ( String host , int port , String transport , String version ) { } }
|
if ( getNode ( host , port , transport , version ) != null ) { return true ; } return false ;
|
public class SnpAdverseDrugReactionType { /** * Gets the value of the proteinNameAndGeneSymbolAndUniprotId property .
* This accessor method returns a reference to the live list ,
* not a snapshot . Therefore any modification you make to the
* returned list will be present inside the JAXB object .
* This is why there is not a < CODE > set < / CODE > method for the proteinNameAndGeneSymbolAndUniprotId property .
* For example , to add a new item , do as follows :
* < pre >
* getProteinNameAndGeneSymbolAndUniprotId ( ) . add ( newItem ) ;
* < / pre >
* Objects of the following type ( s ) are allowed in the list
* { @ link JAXBElement } { @ code < } { @ link String } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link String } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link String } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link String } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link String } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link String } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link String } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link String } { @ code > } */
public List < JAXBElement < String > > getProteinNameAndGeneSymbolAndUniprotId ( ) { } }
|
if ( proteinNameAndGeneSymbolAndUniprotId == null ) { proteinNameAndGeneSymbolAndUniprotId = new ArrayList < JAXBElement < String > > ( ) ; } return this . proteinNameAndGeneSymbolAndUniprotId ;
|
public class AbstractMetadataIdGenerator { /** * Generates a eight character [ a - z0-9 ] system unique identifier .
* @ param id identifier of variable length
* @ return hashcode */
protected String generateHashcode ( Object id ) { } }
|
return Hashing . crc32 ( ) . hashString ( id . toString ( ) , UTF_8 ) . toString ( ) ;
|
public class StickyListHeadersListView { /** * the API version */
@ SuppressLint ( "NewApi" ) private void setHeaderOffet ( int offset ) { } }
|
if ( mHeaderOffset == null || mHeaderOffset != offset ) { mHeaderOffset = offset ; if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . HONEYCOMB ) { mHeader . setTranslationY ( mHeaderOffset ) ; } else { MarginLayoutParams params = ( MarginLayoutParams ) mHeader . getLayoutParams ( ) ; params . topMargin = mHeaderOffset ; mHeader . setLayoutParams ( params ) ; } if ( mOnStickyHeaderOffsetChangedListener != null ) { mOnStickyHeaderOffsetChangedListener . onStickyHeaderOffsetChanged ( this , mHeader , - mHeaderOffset ) ; } }
|
public class TSNE { /** * Update the current solution on iteration .
* @ param sol Solution matrix
* @ param meta Metadata array ( gradient , momentum , learning rate )
* @ param it Iteration number , to choose momentum factor . */
protected void updateSolution ( double [ ] [ ] sol , double [ ] meta , int it ) { } }
|
final double mom = ( it < momentumSwitch && initialMomentum < finalMomentum ) ? initialMomentum : finalMomentum ; final int dim3 = dim * 3 ; for ( int i = 0 , off = 0 ; i < sol . length ; i ++ , off += dim3 ) { final double [ ] sol_i = sol [ i ] ; for ( int k = 0 ; k < dim ; k ++ ) { // Indexes in meta array
final int gradk = off + k , movk = gradk + dim , gaink = movk + dim ; // Adjust learning rate :
meta [ gaink ] = MathUtil . max ( ( ( meta [ gradk ] > 0 ) != ( meta [ movk ] > 0 ) ) ? ( meta [ gaink ] + 0.2 ) : ( meta [ gaink ] * 0.8 ) , MIN_GAIN ) ; meta [ movk ] *= mom ; // Dampening the previous momentum
meta [ movk ] -= learningRate * meta [ gradk ] * meta [ gaink ] ; // Learn
sol_i [ k ] += meta [ movk ] ; } }
|
public class OrderItem { /** * Add values for PromotionIds , return this .
* @ param promotionIds
* New values to add .
* @ return This instance . */
public OrderItem withPromotionIds ( String ... values ) { } }
|
List < String > list = getPromotionIds ( ) ; for ( String value : values ) { list . add ( value ) ; } return this ;
|
public class FieldIterator { /** * Build the field list from the concrete and base record classes . */
private void scanSharedFields ( ) { } }
|
if ( ! m_bFirstTime ) return ; m_bFirstTime = false ; m_vFieldList = new Vector < FieldSummary > ( ) ; m_iCurrentIndex = 0 ; String strClassName = m_recClassInfo . getField ( ClassInfo . CLASS_NAME ) . toString ( ) ; // Step one - Get a list of all the base classes to the origin [ 0 ] = VirtualRecord .
m_rgstrClasses = this . getBaseRecordClasses ( strClassName ) ; String strBaseSharedRecord = null ; // if ( includeSharedFields = = true )
{ // Now add all the classes from the first shared record up the chain
FileHdr recFileHdr = new FileHdr ( Record . findRecordOwner ( m_recFileHdr ) ) ; recFileHdr . setKeyArea ( FileHdr . FILE_NAME_KEY ) ; try { for ( int i = 0 ; i < m_rgstrClasses . length ; i ++ ) { recFileHdr . addNew ( ) ; recFileHdr . getField ( FileHdr . FILE_NAME ) . setString ( m_rgstrClasses [ i ] ) ; if ( recFileHdr . seek ( null ) == true ) // For shared records , include overriding record fields .
// if ( recFileHdr . getField ( FileHdr . TYPE ) . toString ( ) . indexOf ( " SHARED _ TABLE " ) ! = - 1 ) / / For shared records , include overriding record fields .
{ strBaseSharedRecord = m_rgstrClasses [ i ] ; break ; } } } catch ( DBException ex ) { ex . printStackTrace ( ) ; } finally { if ( recFileHdr != null ) recFileHdr . free ( ) ; } } for ( int i = 0 ; i < m_rgstrClasses . length ; i ++ ) { this . scanBaseFields ( m_rgstrClasses [ i ] , ( i == m_rgstrClasses . length - 1 ) ) ; if ( m_rgstrClasses [ i ] . equals ( strBaseSharedRecord ) ) break ; } if ( strBaseSharedRecord != null ) this . scanExtendedClasses ( strBaseSharedRecord ) ; if ( strBaseSharedRecord != null ) if ( ! strClassName . equals ( strBaseSharedRecord ) ) { // This class is not the base class , merge the two
this . scanRecordsFields ( m_rgstrClasses ) ; }
|
public class ParaClient { /** * Deletes multiple objects .
* @ param keys the ids of the objects to delete */
public void deleteAll ( List < String > keys ) { } }
|
if ( keys == null || keys . isEmpty ( ) ) { return ; } final int size = this . chunkSize ; IntStream . range ( 0 , getNumChunks ( keys , size ) ) . mapToObj ( i -> ( List < String > ) partitionList ( keys , i , size ) ) . forEach ( chunk -> { MultivaluedMap < String , String > ids = new MultivaluedHashMap < > ( ) ; ids . put ( "ids" , chunk ) ; invokeDelete ( "_batch" , ids ) ; } ) ;
|
public class AbstractApplication { /** * Returns a usage message , explaining all known options
* @ param options Options to show in usage .
* @ return a usage message explaining all known options */
public static String usage ( Collection < TrackedParameter > options ) { } }
|
StringBuilder usage = new StringBuilder ( 10000 ) ; if ( ! REFERENCE_VERSION . equals ( VERSION ) ) { usage . append ( "ELKI build: " ) . append ( VERSION ) . append ( NEWLINE ) . append ( NEWLINE ) ; } usage . append ( REFERENCE ) ; // Collect options
OptionUtil . formatForConsole ( usage . append ( NEWLINE ) . append ( "Parameters:" ) . append ( NEWLINE ) , FormatUtil . getConsoleWidth ( ) , options ) ; return usage . toString ( ) ;
|
public class JCloudsReader { /** * { @ inheritDoc } */
@ Override public IBucket read ( long pKey ) throws TTIOException { } }
|
// IBucket returnval = null ;
IBucket returnval = mCache . getIfPresent ( pKey ) ; if ( returnval == null ) { try { returnval = getAndprefetchBuckets ( pKey ) ; // reader . write ( returnval . getBucketKey ( ) + " , " + returnval . getClass ( ) . getName ( ) + " \ n " ) ;
// reader . flush ( ) ;
} catch ( Exception exc ) { throw new TTIOException ( exc ) ; } } return returnval ;
|
public class DServer { String [ ] polled_device ( ) { } }
|
Util . out4 . println ( "In polled_device command" ) ; final int nb_class = class_list . size ( ) ; final Vector dev_name = new Vector ( ) ; for ( int i = 0 ; i < nb_class ; i ++ ) { final DeviceClass dc = ( DeviceClass ) class_list . elementAt ( i ) ; final int nb_dev = dc . get_device_list ( ) . size ( ) ; for ( int j = 0 ; j < nb_dev ; j ++ ) { // Get DS name if it is polled
final DeviceImpl dev = dc . get_device_at ( j ) ; if ( dev . is_polled ( ) == true ) { dev_name . add ( dev . get_name ( ) ) ; } } } // Return an empty sequence if no devices are polled
if ( dev_name . size ( ) == 0 ) { Util . out4 . println ( "Return an empty sequence because no devices are polled" ) ; return new String [ 0 ] ; } // Returned device name list to caller ( sorted )
final MyComp comp = new MyComp ( ) ; Collections . sort ( dev_name , comp ) ; final int nb_dev = dev_name . size ( ) ; final String [ ] ret = new String [ nb_dev ] ; for ( int i = 0 ; i < nb_dev ; i ++ ) { ret [ i ] = ( String ) dev_name . elementAt ( i ) ; } return ret ;
|
public class FetchBuddyListImpl { /** * ( non - Javadoc )
* @ see com . tvd12 . ezyfox . core . command . BaseCommand # execute ( ) */
@ SuppressWarnings ( { } }
|
"unchecked" , "rawtypes" } ) @ Override public List < ApiBuddy > execute ( ) { BuddyList buddyList = extension . getParentZone ( ) . getBuddyListManager ( ) . getBuddyList ( user ) ; return ( List ) buddyList . getBuddies ( ) ;
|
public class ParaClient { /** * Returns the value of a specific app setting ( property ) .
* @ param key a key
* @ return a map containing one element { " value " : " the _ value " } or an empty map . */
public Map < String , Object > appSettings ( String key ) { } }
|
if ( StringUtils . isBlank ( key ) ) { return appSettings ( ) ; } return getEntity ( invokeGet ( Utils . formatMessage ( "_settings/{0}" , key ) , null ) , Map . class ) ;
|
public class MockArtifactStore { /** * { @ inheritDoc } */
public synchronized Metadata getMetadata ( String path ) throws IOException , MetadataNotFoundException { } }
|
Metadata metadata = new Metadata ( ) ; boolean foundMetadata = false ; path = StringUtils . stripEnd ( StringUtils . stripStart ( path , "/" ) , "/" ) ; String groupId = path . replace ( '/' , '.' ) ; Set < String > pluginArtifactIds = getArtifactIds ( groupId ) ; if ( pluginArtifactIds != null ) { List < Plugin > plugins = new ArrayList < Plugin > ( ) ; for ( String artifactId : pluginArtifactIds ) { Set < String > pluginVersions = getVersions ( groupId , artifactId ) ; if ( pluginVersions == null || pluginVersions . isEmpty ( ) ) { continue ; } String [ ] versions = pluginVersions . toArray ( new String [ pluginVersions . size ( ) ] ) ; Arrays . sort ( versions , new VersionComparator ( ) ) ; MavenXpp3Reader reader = new MavenXpp3Reader ( ) ; for ( int j = versions . length - 1 ; j >= 0 ; j -- ) { InputStream inputStream = null ; try { inputStream = get ( new Artifact ( groupId , artifactId , versions [ j ] , "pom" ) ) ; Model model = reader . read ( new XmlStreamReader ( inputStream ) ) ; if ( model == null || ! "maven-plugin" . equals ( model . getPackaging ( ) ) ) { continue ; } Plugin plugin = new Plugin ( ) ; plugin . setArtifactId ( artifactId ) ; plugin . setName ( model . getName ( ) ) ; // TODO proper goal - prefix determination
// ugh ! this is incredibly hacky and does not handle some fool that sets the goal prefix in
// a parent pom . . . ok unlikely , but stupid is as stupid does
boolean havePrefix = false ; final Build build = model . getBuild ( ) ; if ( build != null && build . getPlugins ( ) != null ) { havePrefix = setPluginGoalPrefixFromConfiguration ( plugin , build . getPlugins ( ) ) ; } if ( ! havePrefix && build != null && build . getPluginManagement ( ) != null && build . getPluginManagement ( ) . getPlugins ( ) != null ) { havePrefix = setPluginGoalPrefixFromConfiguration ( plugin , build . getPluginManagement ( ) . getPlugins ( ) ) ; } if ( ! havePrefix && artifactId . startsWith ( "maven-" ) && artifactId . endsWith ( "-plugin" ) ) { plugin . setPrefix ( StringUtils . removeStart ( StringUtils . removeEnd ( artifactId , "-plugin" ) , "maven-" ) ) ; havePrefix = true ; } if ( ! havePrefix && artifactId . endsWith ( "-maven-plugin" ) ) { plugin . setPrefix ( StringUtils . removeEnd ( artifactId , "-maven-plugin" ) ) ; havePrefix = true ; } if ( ! havePrefix ) { plugin . setPrefix ( artifactId ) ; } plugins . add ( plugin ) ; foundMetadata = true ; break ; } catch ( ArtifactNotFoundException e ) { // ignore
} catch ( XmlPullParserException e ) { // ignore
} finally { IOUtils . closeQuietly ( inputStream ) ; } } } if ( ! plugins . isEmpty ( ) ) { metadata . setPlugins ( plugins ) ; } } int index = path . lastIndexOf ( '/' ) ; groupId = ( index == - 1 ? groupId : groupId . substring ( 0 , index ) ) . replace ( '/' , '.' ) ; String artifactId = ( index == - 1 ? null : path . substring ( index + 1 ) ) ; if ( artifactId != null ) { Set < String > artifactVersions = getVersions ( groupId , artifactId ) ; if ( artifactVersions != null && ! artifactVersions . isEmpty ( ) ) { metadata . setGroupId ( groupId ) ; metadata . setArtifactId ( artifactId ) ; Versioning versioning = new Versioning ( ) ; List < String > versions = new ArrayList < String > ( artifactVersions ) ; Collections . sort ( versions , new VersionComparator ( ) ) ; // sort the Maven way
long lastUpdated = 0 ; for ( String version : versions ) { try { long lastModified = getLastModified ( new Artifact ( groupId , artifactId , version , "pom" ) ) ; versioning . addVersion ( version ) ; if ( lastModified >= lastUpdated ) { lastUpdated = lastModified ; versioning . setLastUpdatedTimestamp ( new Date ( lastModified ) ) ; versioning . setLatest ( version ) ; if ( ! version . endsWith ( "-SNAPSHOT" ) ) { versioning . setRelease ( version ) ; } } } catch ( ArtifactNotFoundException e ) { // ignore
} } metadata . setVersioning ( versioning ) ; foundMetadata = true ; } } int index2 = index == - 1 ? - 1 : path . lastIndexOf ( '/' , index - 1 ) ; groupId = index2 == - 1 ? groupId : groupId . substring ( 0 , index2 ) . replace ( '/' , '.' ) ; artifactId = index2 == - 1 ? artifactId : path . substring ( index2 + 1 , index ) ; String version = index2 == - 1 ? null : path . substring ( index + 1 ) ; if ( version != null && version . endsWith ( "-SNAPSHOT" ) ) { Map < String , Map < String , Map < Artifact , Content > > > artifactMap = contents . get ( groupId ) ; Map < String , Map < Artifact , Content > > versionMap = ( artifactMap == null ? null : artifactMap . get ( artifactId ) ) ; Map < Artifact , Content > filesMap = ( versionMap == null ? null : versionMap . get ( version ) ) ; if ( filesMap != null ) { List < SnapshotVersion > snapshotVersions = new ArrayList < SnapshotVersion > ( ) ; int maxBuildNumber = 0 ; long lastUpdated = 0 ; String timestamp = null ; boolean found = false ; for ( final Map . Entry < Artifact , Content > entry : filesMap . entrySet ( ) ) { final Artifact artifact = entry . getKey ( ) ; final Content content = entry . getValue ( ) ; SimpleDateFormat fmt = new SimpleDateFormat ( "yyyyMMddHHmmss" ) ; fmt . setTimeZone ( TimeZone . getTimeZone ( "GMT" ) ) ; String lastUpdatedTime = fmt . format ( new Date ( content . getLastModified ( ) ) ) ; try { Maven3 . addSnapshotVersion ( snapshotVersions , artifact , lastUpdatedTime ) ; } catch ( LinkageError e ) { // Maven 2
} if ( "pom" . equals ( artifact . getType ( ) ) ) { if ( artifact . getBuildNumber ( ) != null && maxBuildNumber < artifact . getBuildNumber ( ) . intValue ( ) ) { maxBuildNumber = artifact . getBuildNumber ( ) . intValue ( ) ; timestamp = artifact . getTimestampString ( ) ; } else { maxBuildNumber = Math . max ( 1 , maxBuildNumber ) ; } lastUpdated = Math . max ( lastUpdated , content . getLastModified ( ) ) ; found = true ; } } if ( ! snapshotVersions . isEmpty ( ) || found ) { Versioning versioning = metadata . getVersioning ( ) ; if ( versioning == null ) { versioning = new Versioning ( ) ; } metadata . setGroupId ( groupId ) ; metadata . setArtifactId ( artifactId ) ; metadata . setVersion ( version ) ; try { Maven3 . addSnapshotVersions ( versioning , snapshotVersions ) ; } catch ( LinkageError e ) { // Maven 2
} if ( maxBuildNumber > 0 ) { Snapshot snapshot = new Snapshot ( ) ; snapshot . setBuildNumber ( maxBuildNumber ) ; snapshot . setTimestamp ( timestamp ) ; versioning . setSnapshot ( snapshot ) ; } versioning . setLastUpdatedTimestamp ( new Date ( lastUpdated ) ) ; metadata . setVersioning ( versioning ) ; foundMetadata = true ; } } } if ( ! foundMetadata ) { throw new MetadataNotFoundException ( path ) ; } return metadata ;
|
public class PageAwareAuthenticator { /** * Redirects to the configured authentication page , passing the target service as parameter " _ redirect _ " . */
protected void redirectToAuthenticationPage ( DataBinder binder ) { } }
|
if ( _page != null ) { binder . mul ( Service . SERVICE_HTTP_HEADER , String . class , String . class ) . add ( "Content-Type" , "text/html; charset=utf-8" ) ; binder . put ( Service . SERVICE_PAGE_TARGET , _page + "?_redirect_=" + binder . get ( Service . REQUEST_TARGET_PATH ) ) ; // _ logger . log ( Level . FINE , " redirecting to { 0 } " , binder . get ( Service . SERVICE _ PAGE _ TARGET ) ) ;
}
|
public class DelegatedTokenCredentials { /** * Generate the URL to authenticate through OAuth2.
* @ param responseMode the method that should be used to send the resulting token back to your app
* @ param state a value included in the request that is also returned in the token response
* @ return the URL to authenticate through OAuth2 */
public String generateAuthenticationUrl ( ResponseMode responseMode , String state ) { } }
|
return String . format ( "%s/%s/oauth2/authorize?client_id=%s&response_type=code&redirect_uri=%s&response_mode=%s&state=%s" , environment ( ) . activeDirectoryEndpoint ( ) , domain ( ) , clientId ( ) , this . redirectUrl , responseMode . value , state ) ;
|
public class FileChooserCore { /** * Loads all the files of a folder in the file chooser .
* If no path is specified ( ' folder ' is null ) the root folder of the SD card is going to be used .
* @ param folder The folder . */
public void loadFolder ( File folder ) { } }
|
// Remove previous files .
LinearLayout root = this . chooser . getRootLayout ( ) ; LinearLayout layout = ( LinearLayout ) root . findViewById ( R . id . linearLayoutFiles ) ; layout . removeAllViews ( ) ; // Get the file path .
if ( folder == null || ! folder . exists ( ) ) { if ( defaultFolder != null ) { this . currentFolder = defaultFolder ; } else { this . currentFolder = Environment . getExternalStorageDirectory ( ) ; } } else { this . currentFolder = folder ; } // Verify if the path exists .
if ( this . currentFolder . exists ( ) && layout != null ) { List < FileItem > fileItems = new LinkedList < FileItem > ( ) ; // Add the parent folder .
if ( this . currentFolder . getParent ( ) != null ) { File parent = new File ( this . currentFolder . getParent ( ) ) ; if ( parent . exists ( ) ) { fileItems . add ( new FileItem ( this . chooser . getContext ( ) , parent , ".." ) ) ; } } // Verify if the file is a directory .
if ( this . currentFolder . isDirectory ( ) ) { // Get the folder ' s files .
File [ ] fileList = this . currentFolder . listFiles ( ) ; if ( fileList != null ) { // Order the files alphabetically and separating folders from files .
Arrays . sort ( fileList , new Comparator < File > ( ) { public int compare ( File file1 , File file2 ) { if ( file1 != null && file2 != null ) { if ( file1 . isDirectory ( ) && ( ! file2 . isDirectory ( ) ) ) return - 1 ; if ( file2 . isDirectory ( ) && ( ! file1 . isDirectory ( ) ) ) return 1 ; return file1 . getName ( ) . compareTo ( file2 . getName ( ) ) ; } return 0 ; } } ) ; // Iterate all the files in the folder .
for ( int i = 0 ; i < fileList . length ; i ++ ) { // Verify if file can be selected ( is a directory or folder mode is not activated and the file pass the filter , if defined ) .
boolean selectable = true ; if ( ! fileList [ i ] . isDirectory ( ) ) { selectable = ! this . folderMode && ( this . filter == null || fileList [ i ] . getName ( ) . matches ( this . filter ) ) ; } // Verify if the file must be show .
if ( selectable || ! this . showOnlySelectable ) { // Create the file item and add it to the list .
FileItem fileItem = new FileItem ( this . chooser . getContext ( ) , fileList [ i ] ) ; fileItem . setSelectable ( selectable ) ; fileItems . add ( fileItem ) ; } } } // Set the name of the current folder .
String currentFolderName = this . showFullPathInTitle ? this . currentFolder . getPath ( ) : this . currentFolder . getName ( ) ; this . chooser . setCurrentFolderName ( currentFolderName ) ; } else { // The file is not a folder , add only this file .
fileItems . add ( new FileItem ( this . chooser . getContext ( ) , this . currentFolder ) ) ; } // Add click listener and add the FileItem objects to the layout .
for ( int i = 0 ; i < fileItems . size ( ) ; i ++ ) { fileItems . get ( i ) . addListener ( this . fileItemClickListener ) ; layout . addView ( fileItems . get ( i ) ) ; } // Refresh default folder .
defaultFolder = this . currentFolder ; }
|
public class ClassLoaderUtil { /** * Add file to CLASSPATH
* @ param s File name
* @ throws IOException IOException */
public static void addFile ( String s ) throws IOException { } }
|
File f = new File ( s ) ; addFile ( f ) ;
|
public class CollapsiblePanel { /** * Set the collapsion state . */
public void setCollapsed ( boolean collapse ) { } }
|
if ( collapse ) { _content . setVisible ( false ) ; _trigger . setIcon ( _downIcon ) ; } else { _content . setVisible ( true ) ; _trigger . setIcon ( _upIcon ) ; } SwingUtil . refresh ( this ) ;
|
public class OlympusCameraSettingsMakernoteDescriptor { /** * / < returns > < / returns > */
@ Nullable public String getAfPointSelectedDescription ( ) { } }
|
Rational [ ] values = _directory . getRationalArray ( TagAfPointSelected ) ; if ( values == null ) return "n/a" ; if ( values . length < 4 ) return null ; int index = 0 ; if ( values . length == 5 && values [ 0 ] . longValue ( ) == 0 ) index = 1 ; int p1 = ( int ) ( values [ index ] . doubleValue ( ) * 100 ) ; int p2 = ( int ) ( values [ index + 1 ] . doubleValue ( ) * 100 ) ; int p3 = ( int ) ( values [ index + 2 ] . doubleValue ( ) * 100 ) ; int p4 = ( int ) ( values [ index + 3 ] . doubleValue ( ) * 100 ) ; if ( p1 + p2 + p3 + p4 == 0 ) return "n/a" ; return String . format ( "(%d%%,%d%%) (%d%%,%d%%)" , p1 , p2 , p3 , p4 ) ;
|
public class NodeDetailsDeriver { /** * This method calculates the actual time associated with the
* supplied node .
* @ param n The node
* @ return The actual time spent in the node */
protected long calculateActualTime ( Node n ) { } }
|
long childElapsed = 0 ; if ( n . containerNode ( ) ) { long startTime = n . getTimestamp ( ) + n . getDuration ( ) ; long endTime = n . getTimestamp ( ) ; for ( int i = 0 ; i < ( ( ContainerNode ) n ) . getNodes ( ) . size ( ) ; i ++ ) { Node child = ( ( ContainerNode ) n ) . getNodes ( ) . get ( i ) ; if ( child . getTimestamp ( ) < startTime ) { startTime = child . getTimestamp ( ) ; } if ( endTime < ( child . getTimestamp ( ) + child . getDuration ( ) ) ) { endTime = child . getTimestamp ( ) + child . getDuration ( ) ; } childElapsed += child . getDuration ( ) ; } // Check if child accumulated elapsed time is greater than parent duration
// indicating that some / all of the children were concurrently performed .
if ( childElapsed > n . getDuration ( ) ) { // Set child elapsed time to zero , so parent time
childElapsed = endTime - startTime ; if ( childElapsed < 0 || childElapsed > n . getDuration ( ) ) { // If child durations are greater than the parent , then
// just set actual time to same as parent ( i . e . so child
// elapsed is considered as 0 ) .
childElapsed = 0 ; } } else if ( endTime > n . getTimestamp ( ) + n . getDuration ( ) ) { // Child end time after parent end time , so must be async
childElapsed = 0 ; } } return n . getDuration ( ) - childElapsed ;
|
public class MeasureToViewMap { /** * Resume stats collection for all MutableViewData . */
synchronized void resumeStatsCollection ( Timestamp now ) { } }
|
for ( Entry < String , Collection < MutableViewData > > entry : mutableMap . asMap ( ) . entrySet ( ) ) { for ( MutableViewData mutableViewData : entry . getValue ( ) ) { mutableViewData . resumeStatsCollection ( now ) ; } }
|
public class DRUMS { /** * Reads < code > numberToRead < / code > elements ( or less if there are not enough elements ) from the bucket with the
* given < code > bucketId < / code > beginning at the element offset .
* @ param bucketId
* the id of the bucket where to read the elements from
* @ param elementOffset
* the byte offset , where to start reading
* @ param numberToRead
* the number of elements to read
* @ return ArrayList containing the data - objects
* @ throws FileLockException
* @ throws IOException */
public List < Data > read ( int bucketId , int elementOffset , int numberToRead ) throws FileLockException , IOException { } }
|
String filename = gp . DATABASE_DIRECTORY + "/" + hashFunction . getFilename ( bucketId ) ; HeaderIndexFile < Data > indexFile = new HeaderIndexFile < Data > ( filename , HeaderIndexFile . AccessMode . READ_ONLY , gp . HEADER_FILE_LOCK_RETRY , gp ) ; List < Data > result = new ArrayList < Data > ( ) ; // where to start
long actualOffset = elementOffset * gp . getElementSize ( ) ; // get the complete buffer
ByteBuffer dataBuffer = ByteBuffer . allocate ( numberToRead * gp . getElementSize ( ) ) ; indexFile . read ( actualOffset , dataBuffer ) ; dataBuffer . flip ( ) ; byte [ ] dataArray = new byte [ gp . getElementSize ( ) ] ; while ( dataBuffer . position ( ) < dataBuffer . limit ( ) ) { dataBuffer . get ( dataArray ) ; @ SuppressWarnings ( "unchecked" ) Data copy = ( Data ) prototype . fromByteBuffer ( ByteBuffer . wrap ( dataArray ) ) ; result . add ( copy ) ; } indexFile . close ( ) ; return result ;
|
public class StaticFieldELResolver { /** * < p > Returns the type of a static field . < / p >
* < p > If the base object is an instance of < code > ELClass < / code > and the
* property is a String ,
* the < code > propertyResolved < / code > property of the
* < code > ELContext < / code > object must be set to < code > true < / code >
* by the resolver , before returning . If this property is not
* < code > true < / code > after this method is called , the caller can
* safely assume no value has been set . < / p >
* If the property string is a public static field of class specified in
* ELClass , return the type of the static field . < / p >
* @ param context The context of this evaluation .
* @ param base An < code > ELClass < / code > .
* @ param property The name of the field .
* @ return If the < code > propertyResolved < / code > property of
* < code > ELContext < / code > was set to < code > true < / code > , then
* the type of the type of the field .
* @ throws NullPointerException if context is < code > null < / code > .
* @ throws PropertyNotFoundException if field is not a public static
* filed of the class , or if the field is inaccessible . */
@ Override public Class < ? > getType ( ELContext context , Object base , Object property ) { } }
|
if ( context == null ) { throw new NullPointerException ( ) ; } if ( base instanceof ELClass && property instanceof String ) { Class < ? > klass = ( ( ELClass ) base ) . getKlass ( ) ; String fieldName = ( String ) property ; try { context . setPropertyResolved ( true ) ; Field field = klass . getField ( fieldName ) ; int mod = field . getModifiers ( ) ; if ( Modifier . isPublic ( mod ) && Modifier . isStatic ( mod ) ) { return field . getType ( ) ; } } catch ( NoSuchFieldException ex ) { } throw new PropertyNotFoundException ( ELUtil . getExceptionMessageString ( context , "staticFieldReadError" , new Object [ ] { klass . getName ( ) , fieldName } ) ) ; } return null ;
|
public class ModulePersonalAccessTokens { /** * Create a new personal access token .
* This method is the _ only _ time you will see the personal access token value . Please keep
* it save after this call , because a { @ link # fetchAll ( ) } or a { @ link # fetchOne ( String ) } will
* _ not _ return it !
* @ param token the token to be created on Contentful .
* @ return the just created token , containing the access token string . */
public CMAPersonalAccessToken create ( CMAPersonalAccessToken token ) { } }
|
final CMASystem sys = token . getSystem ( ) ; token . setSystem ( null ) ; try { return service . create ( token ) . blockingFirst ( ) ; } finally { token . setSystem ( sys ) ; }
|
public class GetGlue { /** * Set the { @ link retrofit . RestAdapter } log levels .
* @ param isDebug If true , the log level is set to { @ link retrofit . RestAdapter . LogLevel # FULL } . Otherwise { @ link
* retrofit . RestAdapter . LogLevel # NONE } . */
public GetGlue setIsDebug ( boolean isDebug ) { } }
|
this . isDebug = isDebug ; RestAdapter . LogLevel logLevel = isDebug ? RestAdapter . LogLevel . FULL : RestAdapter . LogLevel . NONE ; if ( restAdapter != null ) { restAdapter . setLogLevel ( logLevel ) ; } if ( restAdapterApiFour != null ) { restAdapterApiFour . setLogLevel ( logLevel ) ; } return this ;
|
public class EmbedBuilder { /** * Checks if the given embed is empty . Empty embeds will throw an exception if built
* @ return true if the embed is empty and cannot be built */
public boolean isEmpty ( ) { } }
|
return title == null && description . length ( ) == 0 && timestamp == null // & & color = = null color alone is not enough to send
&& thumbnail == null && author == null && footer == null && image == null && fields . isEmpty ( ) ;
|
public class InMemoryHandler { /** * ( non - Javadoc )
* @ see net . roboconf . target . api . TargetHandler
* # isMachineRunning ( net . roboconf . target . api . TargetHandlerParameters , java . lang . String ) */
@ Override public boolean isMachineRunning ( TargetHandlerParameters parameters , String machineId ) throws TargetException { } }
|
this . logger . fine ( "Verifying the in-memory agent for " + machineId + " is running." ) ; Map < String , String > targetProperties = preventNull ( parameters . getTargetProperties ( ) ) ; // No agent factory = > no iPojo instance = > not running
boolean result = false ; if ( this . standardAgentFactory != null ) result = this . standardAgentFactory . getInstancesNames ( ) . contains ( machineId ) ; // On restoration , in - memory agents will ALL have disappeared .
// So , it makes sense to recreate them if they do not exist anymore .
// To determine whether we should restore them or no , we look for the model in the manager .
if ( ! result && ! simulatePlugins ( targetProperties ) ) { Map . Entry < String , String > ctx = parseMachineId ( machineId ) ; ManagedApplication ma = this . manager . applicationMngr ( ) . findManagedApplicationByName ( ctx . getValue ( ) ) ; Instance scopedInstance = InstanceHelpers . findInstanceByPath ( ma . getApplication ( ) , ctx . getKey ( ) ) ; // Is it supposed to be running ?
if ( scopedInstance . getStatus ( ) != InstanceStatus . NOT_DEPLOYED ) { this . logger . fine ( "In-memory agent for " + machineId + " is supposed to be running but is not. It will be restored." ) ; Map < String , String > messagingConfiguration = this . manager . messagingMngr ( ) . getMessagingClient ( ) . getConfiguration ( ) ; Factory factory = findIPojoFactory ( parameters ) ; createIPojo ( targetProperties , messagingConfiguration , machineId , ctx . getKey ( ) , ctx . getValue ( ) , this . manager . getDomain ( ) , factory ) ; result = true ; // The agent will restore its model by asking it to the DM .
} } return result ;
|
public class CompassImageView { /** * onDraw override .
* If animation is " on " , view is invalidated after each redraw ,
* to perform recalculation on every loop of UI redraw */
@ Override public void onDraw ( Canvas canvas ) { } }
|
if ( animationOn ) { if ( angleRecalculate ( new Date ( ) . getTime ( ) ) ) { this . setRotation ( angle1 ) ; } } else { this . setRotation ( angle1 ) ; } super . onDraw ( canvas ) ; if ( animationOn ) { this . invalidate ( ) ; }
|
public class InputGateMetrics { /** * Iterates over all input channels and collects the total number of queued buffers in a
* best - effort way .
* @ return total number of queued buffers */
long refreshAndGetTotal ( ) { } }
|
long total = 0 ; for ( InputChannel channel : inputGate . getInputChannels ( ) . values ( ) ) { if ( channel instanceof RemoteInputChannel ) { RemoteInputChannel rc = ( RemoteInputChannel ) channel ; total += rc . unsynchronizedGetNumberOfQueuedBuffers ( ) ; } } return total ;
|
public class PreferenceActivity { /** * Shows a specific bread crumb . When using the split screen layout , the bread crumb is shown
* above the currently shown preference fragment , otherwise the bread crumb is shown as the
* toolbar title .
* @ param breadCrumbTitle
* The bread crumb title , which should be shown , as an instance of the type { @ link
* CharSequence } or null , if no bread crumb should be shown */
private void showBreadCrumb ( @ Nullable final CharSequence breadCrumbTitle ) { } }
|
CharSequence formattedBreadCrumbTitle = formatBreadCrumbTitle ( breadCrumbTitle ) ; if ( isSplitScreen ( ) ) { breadCrumbToolbar . setTitle ( formattedBreadCrumbTitle ) ; } else if ( ! TextUtils . isEmpty ( formattedBreadCrumbTitle ) ) { showTitle ( formattedBreadCrumbTitle ) ; }
|
public class MapView { private boolean doSetScale ( double scale ) { } }
|
boolean res = Math . abs ( viewState . getScale ( ) - scale ) > .0000001 ; viewState = viewState . copyAndSetScale ( scale ) ; return res ;
|
public class ClusterTierActiveEntity { /** * Send a { @ link PassiveReplicationMessage } to the passive , reuse the same transaction id and client id as the original message since this
* original message won ' t ever be sent to the passive and these ids will be used to prevent duplication if the active goes down and the
* client resends the original message to the passive ( now our new active ) .
* @ param context context of the message
* @ param message message to be forwarded
* @ param newChain resulting chain to send */
private void sendMessageToSelfAndDeferRetirement ( ActiveInvokeContext < EhcacheEntityResponse > context , KeyBasedServerStoreOpMessage message , Chain newChain ) { } }
|
try { long clientId = context . getClientSource ( ) . toLong ( ) ; entityMessenger . messageSelfAndDeferRetirement ( message , new PassiveReplicationMessage . ChainReplicationMessage ( message . getKey ( ) , newChain , context . getCurrentTransactionId ( ) , context . getOldestTransactionId ( ) , clientId ) ) ; } catch ( MessageCodecException e ) { throw new AssertionError ( "Codec error" , e ) ; }
|
public class IntervalLogStream { /** * Starts up a thread that automatically rolls the underlying OutputStream
* at the beginning of the interval , even if no output is written . */
public synchronized void startAutoRollover ( ) { } }
|
// TODO : If java . util . Timer class is available , use it instead of
// creating a thread each time .
if ( mRolloverThread == null ) { mRolloverThread = new Thread ( new AutoRollover ( this ) , nextName ( ) ) ; mRolloverThread . setDaemon ( true ) ; mRolloverThread . start ( ) ; }
|
public class ConverterUtils { /** * Returns the new { @ link CellValue } from provided { @ link org . apache . poi . ss . usermodel . CellValue } . */
public static ICellValue resolveCellValue ( org . apache . poi . ss . usermodel . CellValue cellval ) { } }
|
if ( cellval == null ) { return CellValue . BLANK ; } switch ( cellval . getCellType ( ) ) { case CELL_TYPE_NUMERIC : { return CellValue . from ( cellval . getNumberValue ( ) ) ; } case CELL_TYPE_STRING : { return CellValue . from ( cellval . getStringValue ( ) ) ; } case CELL_TYPE_BOOLEAN : { return CellValue . from ( cellval . getBooleanValue ( ) ) ; } case CELL_TYPE_ERROR : { return CellValue . from ( ErrorEval . valueOf ( cellval . getErrorValue ( ) ) . getErrorString ( ) ) ; } case CELL_TYPE_BLANK : { return CellValue . BLANK ; } case CELL_TYPE_FORMULA : { throw new CalculationEngineException ( "Result of evaluation cannot be a formula." ) ; } default : { throw new CalculationEngineException ( String . format ( "CellValue's tType %s is not supported." , cellval . getCellType ( ) ) ) ; } }
|
public class DatabaseClientSnippets { /** * [ VARIABLE my _ singer _ id ] */
public String singleUseStale ( long singerId ) { } }
|
// [ START singleUseStale ]
String column = "FirstName" ; Struct row = dbClient . singleUse ( TimestampBound . ofMaxStaleness ( 10 , TimeUnit . SECONDS ) ) . readRow ( "Singers" , Key . of ( singerId ) , Collections . singleton ( column ) ) ; String firstName = row . getString ( column ) ; // [ END singleUseStale ]
return firstName ;
|
public class CommerceNotificationTemplatePersistenceImpl { /** * Returns an ordered range of all the commerce notification templates where groupId = & # 63 ; and type = & # 63 ; and enabled = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceNotificationTemplateModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param groupId the group ID
* @ param type the type
* @ param enabled the enabled
* @ param start the lower bound of the range of commerce notification templates
* @ param end the upper bound of the range of commerce notification templates ( not inclusive )
* @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > )
* @ return the ordered range of matching commerce notification templates */
@ Override public List < CommerceNotificationTemplate > findByG_T_E ( long groupId , String type , boolean enabled , int start , int end , OrderByComparator < CommerceNotificationTemplate > orderByComparator ) { } }
|
return findByG_T_E ( groupId , type , enabled , start , end , orderByComparator , true ) ;
|
public class TransformXMLInterceptor { /** * Creates a new Transformer instance using cached XSLT Templates . There will be one cached template . Transformer
* instances are not thread - safe and cannot be reused ( they can after the transformation is complete ) .
* @ return A new Transformer instance . */
private static Transformer newTransformer ( ) { } }
|
if ( TEMPLATES == null ) { throw new IllegalStateException ( "TransformXMLInterceptor not initialized." ) ; } try { return TEMPLATES . newTransformer ( ) ; } catch ( TransformerConfigurationException ex ) { throw new SystemException ( "Could not create transformer for " + RESOURCE_NAME , ex ) ; }
|
public class CClassLoader { /** * Return a list of jar and classes located in path
* @ param path
* the path in which to search
* @ return a list of jar and classes located in path */
public static URL [ ] getURLs ( final String path ) { } }
|
final File topDir = new File ( path ) ; final List list = new ArrayList ( ) ; CClassLoader . getClasses ( topDir , list ) ; final URL ret [ ] = new URL [ list . size ( ) + 1 ] ; for ( int i = 0 ; i < list . size ( ) ; i ++ ) { ret [ i ] = ( URL ) list . get ( i ) ; } try { ret [ list . size ( ) ] = topDir . toURI ( ) . toURL ( ) ; } catch ( final Exception ignore ) { } return ret ;
|
public class MessageFormat { /** * Parse the argument as a date - time with the format - wide time zone . */
private ZonedDateTime parseDateTime ( MessageArg arg ) { } }
|
long instant = arg . asLong ( ) ; ZoneId timeZone = arg . timeZone ( ) != null ? arg . timeZone ( ) : this . timeZone ; return ZonedDateTime . ofInstant ( Instant . ofEpochMilli ( instant ) , timeZone ) ;
|
public class ReflectionFormat { /** * { @ inheritDoc } */
@ Override public T newInstance ( final Class < T > clazz , final javolution . xml . XMLFormat . InputElement xml ) throws XMLStreamException { } }
|
try { return _constructor . newInstance ( INITARGS ) ; } catch ( final Exception e ) { throw new XMLStreamException ( e ) ; }
|
public class Bucket { /** * Creates a new blob in this bucket . Direct upload is used to upload { @ code content } . For large
* content , { @ link Blob # writer ( com . google . cloud . storage . Storage . BlobWriteOption . . . ) } is
* recommended as it uses resumable upload . MD5 and CRC32C hashes of { @ code content } are computed
* and used for validating transferred data .
* < p > Example of creating a blob in the bucket from a byte array with a content type .
* < pre > { @ code
* String blobName = " my _ blob _ name " ;
* Blob blob = bucket . create ( blobName , " Hello , World ! " . getBytes ( UTF _ 8 ) , " text / plain " ) ;
* } < / pre >
* @ param blob a blob name
* @ param content the blob content
* @ param contentType the blob content type
* @ param options options for blob creation
* @ return a complete blob information
* @ throws StorageException upon failure */
public Blob create ( String blob , byte [ ] content , String contentType , BlobTargetOption ... options ) { } }
|
BlobInfo blobInfo = BlobInfo . newBuilder ( BlobId . of ( getName ( ) , blob ) ) . setContentType ( contentType ) . build ( ) ; Tuple < BlobInfo , Storage . BlobTargetOption [ ] > target = BlobTargetOption . toTargetOptions ( blobInfo , options ) ; return storage . create ( target . x ( ) , content , target . y ( ) ) ;
|
public class AccountsInner { /** * Updates the specified Data Lake Analytics account to include the additional Data Lake Store account .
* @ param resourceGroupName The name of the Azure resource group that contains the Data Lake Analytics account .
* @ param accountName The name of the Data Lake Analytics account to which to add the Data Lake Store account .
* @ param dataLakeStoreAccountName The name of the Data Lake Store account to add .
* @ param parameters The details of the Data Lake Store account .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < Void > addDataLakeStoreAccountAsync ( String resourceGroupName , String accountName , String dataLakeStoreAccountName , AddDataLakeStoreParameters parameters , final ServiceCallback < Void > serviceCallback ) { } }
|
return ServiceFuture . fromResponse ( addDataLakeStoreAccountWithServiceResponseAsync ( resourceGroupName , accountName , dataLakeStoreAccountName , parameters ) , serviceCallback ) ;
|
public class MtasSolrCollectionCache { /** * Creates the .
* @ param id the id
* @ param size the size
* @ param data the data
* @ return the string
* @ throws IOException Signals that an I / O exception has occurred . */
public String create ( String id , Integer size , HashSet < String > data , String originalVersion ) throws IOException { } }
|
if ( collectionCachePath != null ) { // initialization
Date date = clear ( ) ; // create always new version , unless explicit original version is provided
String version ; if ( originalVersion != null && versionToItem . containsKey ( originalVersion ) ) { version = originalVersion ; } else { do { version = UUID . randomUUID ( ) . toString ( ) ; } while ( versionToItem . containsKey ( version ) ) ; } // create new item
MtasSolrCollectionCacheItem item ; if ( id != null ) { item = new MtasSolrCollectionCacheItem ( id , size , data ) ; // remove if item with id already exists
deleteById ( id ) ; } else { item = new MtasSolrCollectionCacheItem ( version , size , data ) ; } // register
idToVersion . put ( id , version ) ; expirationVersion . put ( version , date . getTime ( ) + ( 1000 * lifeTime ) ) ; versionToItem . put ( version , item ) ; // store data in file
File file = collectionCachePath . resolve ( version ) . toFile ( ) ; try ( OutputStream outputStream = new FileOutputStream ( file ) ; Writer outputStreamWriter = new OutputStreamWriter ( outputStream , StandardCharsets . UTF_8 ) ; ) { outputStreamWriter . write ( encode ( item ) ) ; // set correct time to reconstruct administration on restart
if ( ! file . setLastModified ( date . getTime ( ) ) ) { log . debug ( "couldn't change filetime " + file . getAbsolutePath ( ) ) ; } // don ' t store data in memory
item . data = null ; // return version
return version ; } catch ( IOException e ) { idToVersion . remove ( id ) ; expirationVersion . remove ( version ) ; versionToItem . remove ( version ) ; throw new IOException ( "couldn't create " + version , e ) ; } } else { throw new IOException ( "no cachePath available, can't store data" ) ; }
|
public class ScatterChartModel { /** * Returns the value with the given index of the given dimension .
* @ param dim Reference dimension for value extraction
* @ param index Index of the desired value
* @ return Value with the given index of the given dimension */
@ Override public Number getValue ( ValueDimension dim , int index ) { } }
|
return values . get ( dim ) . get ( index ) ;
|
public class NettyServerHandler { /** * Handler for the Channel shutting down . */
@ Override public void channelInactive ( ChannelHandlerContext ctx ) throws Exception { } }
|
try { if ( keepAliveManager != null ) { keepAliveManager . onTransportTermination ( ) ; } if ( maxConnectionIdleManager != null ) { maxConnectionIdleManager . onTransportTermination ( ) ; } if ( maxConnectionAgeMonitor != null ) { maxConnectionAgeMonitor . cancel ( false ) ; } final Status status = Status . UNAVAILABLE . withDescription ( "connection terminated for unknown reason" ) ; // Any streams that are still active must be closed
connection ( ) . forEachActiveStream ( new Http2StreamVisitor ( ) { @ Override public boolean visit ( Http2Stream stream ) throws Http2Exception { NettyServerStream . TransportState serverStream = serverStream ( stream ) ; if ( serverStream != null ) { serverStream . transportReportStatus ( status ) ; } return true ; } } ) ; } finally { super . channelInactive ( ctx ) ; }
|
public class JobStepsInner { /** * Creates or updates a job step . This will implicitly create a new job version .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param jobAgentName The name of the job agent .
* @ param jobName The name of the job .
* @ param stepName The name of the job step .
* @ param parameters The requested state of the job step .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the JobStepInner object */
public Observable < JobStepInner > createOrUpdateAsync ( String resourceGroupName , String serverName , String jobAgentName , String jobName , String stepName , JobStepInner parameters ) { } }
|
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , jobAgentName , jobName , stepName , parameters ) . map ( new Func1 < ServiceResponse < JobStepInner > , JobStepInner > ( ) { @ Override public JobStepInner call ( ServiceResponse < JobStepInner > response ) { return response . body ( ) ; } } ) ;
|
public class RestTemplateTransportClientFactory { /** * Provides the serialization configurations required by the Eureka Server . JSON
* content exchanged with eureka requires a root node matching the entity being
* serialized or deserialized . Achived with
* { @ link SerializationFeature # WRAP _ ROOT _ VALUE } and
* { @ link DeserializationFeature # UNWRAP _ ROOT _ VALUE } .
* { @ link PropertyNamingStrategy . SnakeCaseStrategy } is applied to the underlying
* { @ link ObjectMapper } .
* @ return a { @ link MappingJackson2HttpMessageConverter } object */
public MappingJackson2HttpMessageConverter mappingJacksonHttpMessageConverter ( ) { } }
|
MappingJackson2HttpMessageConverter converter = new MappingJackson2HttpMessageConverter ( ) ; converter . setObjectMapper ( new ObjectMapper ( ) . setPropertyNamingStrategy ( PropertyNamingStrategy . SNAKE_CASE ) ) ; SimpleModule jsonModule = new SimpleModule ( ) ; jsonModule . setSerializerModifier ( createJsonSerializerModifier ( ) ) ; // keyFormatter ,
// compact ) ) ;
converter . getObjectMapper ( ) . registerModule ( jsonModule ) ; converter . getObjectMapper ( ) . configure ( SerializationFeature . WRAP_ROOT_VALUE , true ) ; converter . getObjectMapper ( ) . configure ( DeserializationFeature . UNWRAP_ROOT_VALUE , true ) ; converter . getObjectMapper ( ) . addMixIn ( Applications . class , ApplicationsJsonMixIn . class ) ; converter . getObjectMapper ( ) . addMixIn ( InstanceInfo . class , InstanceInfoJsonMixIn . class ) ; // converter . getObjectMapper ( ) . addMixIn ( DataCenterInfo . class ,
// DataCenterInfoXmlMixIn . class ) ;
// converter . getObjectMapper ( ) . addMixIn ( InstanceInfo . PortWrapper . class ,
// PortWrapperXmlMixIn . class ) ;
// converter . getObjectMapper ( ) . addMixIn ( Application . class ,
// ApplicationXmlMixIn . class ) ;
// converter . getObjectMapper ( ) . addMixIn ( Applications . class ,
// ApplicationsXmlMixIn . class ) ;
return converter ;
|
public class HttpBody { /** * Appends the given { @ code contents } to the body , up to a certain length .
* If the { @ code contents } are { @ code null } or the { @ code length } negative or zero , the call to this method has no effect .
* @ param contents the contents to append , might be { @ code null }
* @ param length the length of contents to append */
public void append ( byte [ ] contents , int length ) { } }
|
if ( contents == null || length <= 0 ) { return ; } int len = Math . min ( contents . length , length ) ; if ( pos + len > body . length ) { byte [ ] newBody = new byte [ pos + len ] ; System . arraycopy ( body , 0 , newBody , 0 , pos ) ; body = newBody ; } System . arraycopy ( contents , 0 , body , pos , len ) ; pos += len ; cachedString = null ;
|
public class CmsJspStandardContextBean { /** * Returns if the current page is used to manage model groups . < p >
* @ return < code > true < / code > if the current page is used to manage model groups */
public boolean isModelGroupPage ( ) { } }
|
CmsResource page = getPageResource ( ) ; return ( page != null ) && CmsContainerpageService . isEditingModelGroups ( m_cms , page ) ;
|
public class SavedHistoryCache { public synchronized void record ( String historyKey , LaJobHistory jobHistory , int limit ) { } }
|
if ( historyMap . size ( ) >= limit ) { final String removedKey = historyKeyList . remove ( 0 ) ; historyMap . remove ( removedKey ) ; } historyMap . put ( historyKey , jobHistory ) ; historyKeyList . add ( historyKey ) ;
|
public class StreamsUtils { /** * < p > Generates a stream by regrouping the elements of the provided stream and putting them in a substream .
* This grouping operation scans the elements of the stream using the splitter predicate . When this predicate
* returns true , then the elements of the stream are accumulated in a subtream , until the splitter predicate
* is true again . In this case , a new substream is created , until the elements of the provided stream are
* exhausted . < / p >
* < p > In the case where several consecutive splitting elements are met , no empty stream is generated .
* The splitting element is added only once to the next substream if needed . < / p >
* < p > The boolean < code > included < / code > controls whether the splitting element is added to the substreams or not . < / p >
* < p > A { @ code { @ link NullPointerException } } is thrown if the stream to be grouped or the splitter is null . < / p >
* @ param stream The stream to be grouped . Will throw a < code > NullPointerException < / code > if < code > null < / code > .
* @ param splitter The predicate used to check for an splitting element .
* @ param included if true : includes the splitting element at the beginning of each substream
* @ param < E > The type of the elements of the provided stream .
* @ return A grouped stream of streams . */
public static < E > Stream < Stream < E > > group ( Stream < E > stream , Predicate < ? super E > splitter , boolean included ) { } }
|
Objects . requireNonNull ( stream ) ; Objects . requireNonNull ( splitter ) ; GroupingOnSplittingSpliterator < E > spliterator = GroupingOnSplittingSpliterator . of ( stream . spliterator ( ) , splitter , included ) ; return StreamSupport . stream ( spliterator , stream . isParallel ( ) ) . onClose ( stream :: close ) ;
|
public class DoubleStream { /** * Performs a reduction on the elements of this stream , using an
* associative accumulation function , and returns an { @ code OptionalDouble }
* describing the reduced value , if any .
* < p > The { @ code accumulator } function must be an associative function .
* < p > This is a terminal operation .
* @ param accumulator the accumulation function
* @ return the result of the reduction
* @ see # reduce ( com . annimon . stream . function . DoubleBinaryOperator ) */
@ NotNull public OptionalDouble reduce ( @ NotNull DoubleBinaryOperator accumulator ) { } }
|
boolean foundAny = false ; double result = 0 ; while ( iterator . hasNext ( ) ) { final double value = iterator . nextDouble ( ) ; if ( ! foundAny ) { foundAny = true ; result = value ; } else { result = accumulator . applyAsDouble ( result , value ) ; } } return foundAny ? OptionalDouble . of ( result ) : OptionalDouble . empty ( ) ;
|
public class HpelHelper { /** * D512713 - method to convert non - printable chars into a printable string for the log . */
private final static String escape ( String src ) { } }
|
if ( src == null ) { return "" ; } StringBuffer result = null ; for ( int i = 0 , max = src . length ( ) , delta = 0 ; i < max ; i ++ ) { char c = src . charAt ( i ) ; if ( ! Character . isWhitespace ( c ) && Character . isISOControl ( c ) || Character . getType ( c ) == Character . UNASSIGNED ) { String hexVal = Integer . toHexString ( c ) ; String replacement = "\\u" + ( "0000" + hexVal ) . substring ( hexVal . length ( ) ) ; if ( result == null ) { result = new StringBuffer ( src ) ; } result . replace ( i + delta , i + delta + 1 , replacement ) ; delta += replacement . length ( ) - 1 ; } } if ( result == null ) { return src ; } else { return result . toString ( ) ; }
|
public class ChannelFlushPromiseNotifier { /** * Notify all { @ link ChannelFuture } s that were registered with { @ link # add ( ChannelPromise , int ) } and
* their pendingDatasize is smaller then the current writeCounter returned by { @ link # writeCounter ( ) } using
* the given cause1.
* After a { @ link ChannelFuture } was notified it will be removed from this { @ link ChannelFlushPromiseNotifier } and
* so not receive anymore notification .
* The rest of the remaining { @ link ChannelFuture } s will be failed with the given { @ link Throwable } .
* So after this operation this { @ link ChannelFutureListener } is empty .
* @ param cause1 the { @ link Throwable } which will be used to fail all of the { @ link ChannelFuture } s which
* pendingDataSize is smaller then the current writeCounter returned by { @ link # writeCounter ( ) }
* @ param cause2 the { @ link Throwable } which will be used to fail the remaining { @ link ChannelFuture } s */
public ChannelFlushPromiseNotifier notifyPromises ( Throwable cause1 , Throwable cause2 ) { } }
|
notifyPromises0 ( cause1 ) ; for ( ; ; ) { FlushCheckpoint cp = flushCheckpoints . poll ( ) ; if ( cp == null ) { break ; } if ( tryNotify ) { cp . promise ( ) . tryFailure ( cause2 ) ; } else { cp . promise ( ) . setFailure ( cause2 ) ; } } return this ;
|
public class HtmlTableTemplate { /** * Get the TD element
* @ param root
* @ param column
* @ param row
* @ return */
public static TableCellElement getCell ( Element root , int column , int row ) { } }
|
TableSectionElement tbody = getTBodyElement ( root ) ; TableRowElement tr = tbody . getChild ( row ) . cast ( ) ; TableCellElement td = tr . getChild ( column ) . cast ( ) ; return td ;
|
public class OpentracingService { /** * " An Tags . ERROR tag SHOULD be added to a Span on failed operations .
* It means for any server error ( 5xx ) codes . If there is an exception
* object available the implementation SHOULD also add logs event = error
* and error . object = < error object instance > to the active span . "
* https : / / github . com / eclipse / microprofile - opentracing / blob / master / spec / src / main / asciidoc / microprofile - opentracing . asciidoc # server - span - tags
* @ param span The span to add the information to .
* @ param exception Optional exception details . */
public static void addSpanErrorInfo ( Span span , Throwable exception ) { } }
|
String methodName = "addSpanErrorInfo" ; span . setTag ( Tags . ERROR . getKey ( ) , true ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " error" , Boolean . TRUE ) ; } if ( exception != null ) { Map < String , Object > log = new HashMap < > ( ) ; // https : / / github . com / opentracing / specification / blob / master / semantic _ conventions . md # log - fields - table
log . put ( "event" , "error" ) ; // Throwable implements Serializable so all exceptions are serializable
log . put ( "error.object" , exception ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " adding log entry" , log ) ; } span . log ( log ) ; }
|
public class InternalXbaseParser { /** * InternalXbase . g : 1433:1 : entryRuleXReturnExpression : ruleXReturnExpression EOF ; */
public final void entryRuleXReturnExpression ( ) throws RecognitionException { } }
|
try { // InternalXbase . g : 1434:1 : ( ruleXReturnExpression EOF )
// InternalXbase . g : 1435:1 : ruleXReturnExpression EOF
{ if ( state . backtracking == 0 ) { before ( grammarAccess . getXReturnExpressionRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; ruleXReturnExpression ( ) ; state . _fsp -- ; if ( state . failed ) return ; if ( state . backtracking == 0 ) { after ( grammarAccess . getXReturnExpressionRule ( ) ) ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return ;
|
public class Schemas { /** * Index info from IndexDefinition
* @ param indexReference
* @ param schemaRead
* @ param tokens
* @ return */
private IndexConstraintNodeInfo nodeInfoFromIndexDefinition ( IndexReference indexReference , SchemaRead schemaRead , TokenNameLookup tokens ) { } }
|
int [ ] labelIds = indexReference . schema ( ) . getEntityTokenIds ( ) ; if ( labelIds . length != 1 ) throw new IllegalStateException ( "Index with more than one label" ) ; String labelName = tokens . labelGetName ( labelIds [ 0 ] ) ; List < String > properties = new ArrayList < > ( ) ; Arrays . stream ( indexReference . properties ( ) ) . forEach ( ( i ) -> properties . add ( tokens . propertyKeyGetName ( i ) ) ) ; try { return new IndexConstraintNodeInfo ( // Pretty print for index name
String . format ( ":%s(%s)" , labelName , StringUtils . join ( properties , "," ) ) , labelName , properties , schemaRead . indexGetState ( indexReference ) . toString ( ) , ! indexReference . isUnique ( ) ? "INDEX" : "UNIQUENESS" , schemaRead . indexGetState ( indexReference ) . equals ( InternalIndexState . FAILED ) ? schemaRead . indexGetFailure ( indexReference ) : "NO FAILURE" , schemaRead . indexGetPopulationProgress ( indexReference ) . getCompleted ( ) / schemaRead . indexGetPopulationProgress ( indexReference ) . getTotal ( ) * 100 , schemaRead . indexSize ( indexReference ) , schemaRead . indexUniqueValuesSelectivity ( indexReference ) , indexReference . userDescription ( tokens ) ) ; } catch ( IndexNotFoundKernelException e ) { return new IndexConstraintNodeInfo ( // Pretty print for index name
String . format ( ":%s(%s)" , labelName , StringUtils . join ( properties , "," ) ) , labelName , properties , "NOT_FOUND" , ! indexReference . isUnique ( ) ? "INDEX" : "UNIQUENESS" , "NOT_FOUND" , 0 , 0 , 0 , indexReference . userDescription ( tokens ) ) ; }
|
public class DetectPolygonFromContour { /** * Checks to see if some part of the contour touches the image border . Most likely cropped */
protected final boolean touchesBorder ( List < Point2D_I32 > contour ) { } }
|
int endX = imageWidth - 1 ; int endY = imageHeight - 1 ; for ( int j = 0 ; j < contour . size ( ) ; j ++ ) { Point2D_I32 p = contour . get ( j ) ; if ( p . x == 0 || p . y == 0 || p . x == endX || p . y == endY ) { return true ; } } return false ;
|
public class RMIRegistryManager { /** * Checks if a service is exported on the rmi registry with specified port .
* @ param port
* @ return true if rmiregistry is running on the specified port and service
* is exported on the rmi registry , false otherwise */
public static boolean isServiceExported ( Configuration configuration , int port , String serviceName ) { } }
|
if ( serviceName == null ) return false ; try { final Registry registry = RegistryFinder . getInstance ( ) . getRegistry ( configuration , port ) ; String [ ] list = registry . list ( ) ; if ( list != null ) { for ( int i = 0 ; i < list . length ; i ++ ) { if ( serviceName . equals ( list [ i ] ) ) return true ; } } } catch ( RemoteException ex ) { return false ; } catch ( Exception e ) { return false ; } return false ;
|
public class CustomizedProcessor { /** * ~ Methoden - - - - - */
@ Override public int print ( ChronoDisplay formattable , Appendable buffer , AttributeQuery attributes , Set < ElementPosition > positions , // optional
boolean quickPath ) throws IOException { } }
|
if ( quickPath && this . optPrinter ) { attributes = ChronoFormatter . class . cast ( this . printer ) . getAttributes ( ) ; } // special optimization avoiding double conversion from Moment to ZonalDateTime
if ( this . passThroughZDT && ( formattable instanceof ZonalDateTime ) && ( positions == null ) ) { ChronoFormatter < ? > cf = ( ChronoFormatter < ? > ) this . printer ; cf . print ( formattable , buffer , attributes , false ) ; return Integer . MAX_VALUE ; // unknown number of printed characters
} V value = formattable . get ( this . element ) ; StringBuilder collector = new StringBuilder ( ) ; if ( ( positions != null ) && ( buffer instanceof CharSequence ) ) { int offset = ( ( CharSequence ) buffer ) . length ( ) ; if ( this . printer instanceof ChronoFormatter ) { ChronoFormatter < ? > cf = ChronoFormatter . class . cast ( this . printer ) ; Set < ElementPosition > result = print ( cf , value , collector , attributes ) ; Set < ElementPosition > set = new LinkedHashSet < > ( ) ; for ( ElementPosition ep : result ) { set . add ( new ElementPosition ( ep . getElement ( ) , offset + ep . getStartIndex ( ) , offset + ep . getEndIndex ( ) ) ) ; } positions . addAll ( set ) ; } else { this . printer . print ( value , collector , attributes ) ; } positions . add ( new ElementPosition ( this . element , offset , offset + collector . length ( ) ) ) ; } else { this . printer . print ( value , collector , attributes ) ; } buffer . append ( collector ) ; return collector . length ( ) ;
|
public class ThrottlingException { /** * The payload associated with the exception .
* The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service .
* Users of the SDK should not perform Base64 encoding on this field .
* Warning : ByteBuffers returned by the SDK are mutable . Changes to the content or position of the byte buffer will
* be seen by all objects that have a reference to this object . It is recommended to call ByteBuffer . duplicate ( ) or
* ByteBuffer . asReadOnlyBuffer ( ) before using or reading from the buffer . This behavior will be changed in a future
* major version of the SDK .
* @ param payload
* The payload associated with the exception . */
@ com . fasterxml . jackson . annotation . JsonProperty ( "payload" ) public void setPayload ( java . nio . ByteBuffer payload ) { } }
|
this . payload = payload ;
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcTimeMeasure ( ) { } }
|
if ( ifcTimeMeasureEClass == null ) { ifcTimeMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 886 ) ; } return ifcTimeMeasureEClass ;
|
public class DataSynchronizer { /** * Adds and returns a document with a new version to the given document .
* @ param document the document to attach a new version to .
* @ param newVersion the version to attach to the document
* @ return a document with a new version to the given document . */
private static BsonDocument withNewVersion ( final BsonDocument document , final BsonDocument newVersion ) { } }
|
final BsonDocument newDocument = BsonUtils . copyOfDocument ( document ) ; newDocument . put ( DOCUMENT_VERSION_FIELD , newVersion ) ; return newDocument ;
|
public class SQLSupport { /** * Create a SQL order fragment from the list of { @ link Sort } objects . This fragment does not begin with
* ORDER BY and is just the < i > fragment < / i > for such a clause . If the given list of
* sorts contains a sort with sort expression " foo " and sort direction { @ link SortDirection # DESCENDING } ,
* the generated SQL statement will appear as :
* < pre >
* foo DESC
* < / pre >
* @ param sorts the list of { @ link Sort } objects
* @ return the generated SQL statement order fragment or an emtpy string if there are no sorts */
public final String createOrderByFragment ( List /* < Sort > */
sorts ) { } }
|
if ( sorts == null || sorts . size ( ) == 0 ) return EMPTY ; InternalStringBuilder sql = new InternalStringBuilder ( ) ; internalCreateOrderByFragment ( sql , sorts ) ; return sql . toString ( ) ;
|
public class SingleSubscriberProcessor { /** * Get the current { @ link Subscriber } .
* @ return The { @ link Subscriber }
* @ throws IllegalStateException if the subscriber is not present */
protected Subscriber < ? super R > getSubscriber ( ) { } }
|
Subscriber < ? super R > subscriber = this . subscriber . get ( ) ; verifyState ( subscriber ) ; return subscriber ;
|
public class Caster { /** * cast a Object to a Query Object
* @ param o Object to cast
* @ param duplicate duplicate the object or not
* @ param defaultValue
* @ return casted Query Object */
public static Query toQuery ( Object o , boolean duplicate , Query defaultValue ) { } }
|
try { return toQuery ( o , duplicate ) ; } catch ( PageException e ) { return defaultValue ; }
|
public class EthereumUtil { /** * Determines the size of a RLP list . Note : it does not change the position in the ByteBuffer
* @ param bb
* @ return - 1 if not an RLP encoded list , otherwise size of list INCLUDING the prefix of the list ( e . g . byte that indicates that it is a list and size of list in bytes ) in bytes */
public static long getRLPListSize ( ByteBuffer bb ) { } }
|
long result = - 1 ; bb . mark ( ) ; byte detector = bb . get ( ) ; int unsignedDetector = detector & 0xFF ; if ( ( unsignedDetector >= 0xc0 ) && ( unsignedDetector <= 0xf7 ) ) { result = unsignedDetector ; // small list
} else if ( ( unsignedDetector >= 0xf8 ) && ( unsignedDetector <= 0xff ) ) { // the first byte
// large list
// read size of indicator ( size of the size )
int noOfBytesSize = unsignedDetector - 0xf7 ; byte [ ] indicator = new byte [ noOfBytesSize + 1 ] ; indicator [ 0 ] = detector ; bb . get ( indicator , 1 , noOfBytesSize ) ; result = indicator . length + convertIndicatorToRLPSize ( indicator ) ; } bb . reset ( ) ; return result ;
|
public class AbstractCompensatingTransactionManagerDelegate { /** * @ seeorg . springframework . jdbc . datasource . DataSourceTransactionManager #
* doGetTransaction ( ) */
public Object doGetTransaction ( ) throws TransactionException { } }
|
CompensatingTransactionHolderSupport holder = ( CompensatingTransactionHolderSupport ) TransactionSynchronizationManager . getResource ( getTransactionSynchronizationKey ( ) ) ; return new CompensatingTransactionObject ( holder ) ;
|
public class FnLong { /** * Determines whether the target object and the specified object are NOT equal
* by calling the < tt > equals < / tt > method on the target object .
* @ param object the { @ link Long } to compare to the target
* @ return false if both objects are equal , true if not . */
public static final Function < Long , Boolean > notEq ( final Long object ) { } }
|
return ( Function < Long , Boolean > ) ( ( Function ) FnObject . notEq ( object ) ) ;
|
public class KieBuilderImpl { /** * This can be used for performance reason to avoid the recomputation of the pomModel when it is already available */
public void setPomModel ( PomModel pomModel ) { } }
|
this . pomModel = pomModel ; if ( srcMfs . isAvailable ( "pom.xml" ) ) { this . pomXml = srcMfs . getBytes ( "pom.xml" ) ; }
|
public class BaseCommandTask { /** * Returns the value at i + 1 , guarding against ArrayOutOfBound exceptions .
* If the next element is not a value but an argument flag ( starts with - )
* return null .
* @ return String value as defined above , or { @ code null } if at end of args . */
protected String getValue ( String arg ) { } }
|
String [ ] split = arg . split ( "=" ) ; if ( split . length == 1 ) { return null ; } else if ( split . length == 2 ) { return split [ 1 ] ; } else { // Handle DN case with multiple = s
StringBuffer value = new StringBuffer ( ) ; for ( int i = 1 ; i < split . length ; i ++ ) { value . append ( split [ i ] ) ; if ( i < ( split . length - 1 ) ) { value . append ( "=" ) ; } } return value . toString ( ) ; }
|
public class FileSystem { /** * Filter files / directories in the given path using the user - supplied path
* filter . Results are added to the given array < code > results < / code > . */
private void listStatus ( ArrayList < FileStatus > results , Path f , PathFilter filter ) throws IOException { } }
|
FileStatus listing [ ] = listStatus ( f ) ; if ( listing == null ) { throw new FileNotFoundException ( "File " + f + " does not exist" ) ; } for ( int i = 0 ; i < listing . length ; i ++ ) { if ( filter . accept ( listing [ i ] . getPath ( ) ) ) { results . add ( listing [ i ] ) ; } }
|
public class PatternUtils { /** * Pre - processes the pattern by handling backslash escapes such as \ b and \ 007. */
public static String preProcessPattern ( String pattern ) { } }
|
int index = pattern . indexOf ( '\\' ) ; if ( index < 0 ) { return pattern ; } StringBuilder sb = new StringBuilder ( ) ; for ( int pos = 0 ; pos < pattern . length ( ) ; ) { char ch = pattern . charAt ( pos ) ; if ( ch != '\\' ) { sb . append ( ch ) ; pos ++ ; continue ; } if ( pos + 1 >= pattern . length ( ) ) { // we ' ll end the pattern with a ' \ \ ' char
sb . append ( ch ) ; break ; } ch = pattern . charAt ( ++ pos ) ; switch ( ch ) { case 'b' : sb . append ( '\b' ) ; pos ++ ; break ; case 'f' : sb . append ( '\f' ) ; pos ++ ; break ; case 'n' : sb . append ( '\n' ) ; pos ++ ; break ; case '0' : case '1' : case '2' : case '3' : case '4' : case '5' : case '6' : case '7' : { // 1-3 octal characters : \ 1 \ 01 or \ 017
pos += radixCharsToChar ( sb , pattern , pos , 3 , 8 ) ; break ; } case 'r' : sb . append ( '\r' ) ; pos ++ ; break ; case 't' : sb . append ( '\t' ) ; pos ++ ; break ; case 'x' : { // 1-2 hex characters : \ xD or \ xD9
int adjust = radixCharsToChar ( sb , pattern , pos + 1 , 2 , 16 ) ; if ( adjust > 0 ) { // adjust by 1 for the x
pos += 1 + adjust ; } else { sb . append ( ch ) ; pos ++ ; } break ; } case ' ' : case '\\' : default : sb . append ( ch ) ; pos ++ ; break ; } } return sb . toString ( ) ;
|
public class JmesPathCodeGenVisitor { /** * Generates the code for a new JmesPathSubExpression .
* @ param subExpression JmesPath subexpression type
* @ param aVoid void
* @ return String that represents a call to
* the new subexpression
* @ throws InvalidTypeException */
@ Override public String visit ( final JmesPathSubExpression subExpression , final Void aVoid ) throws InvalidTypeException { } }
|
final String prefix = "new JmesPathSubExpression( " ; return subExpression . getExpressions ( ) . stream ( ) . map ( a -> a . accept ( this , aVoid ) ) . collect ( Collectors . joining ( "," , prefix , ")" ) ) ;
|
public class CommsByteBuffer { /** * This method will fill a buffer list with WsByteBuffer ' s that when put together form a
* packet describing an exception and it ' s linked exceptions . It will traverse down the cause
* exceptions until one of them is null .
* @ param throwable The exception to examine .
* @ param probeId The probe id ( if any ) associated with the top level exception .
* @ param conversation The conversation that the exception data will be sent over . This is
* used to determine the level of FAP being used - and hence how to
* encode certain exception information . */
public synchronized void putException ( Throwable throwable , String probeId , Conversation conversation ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "putException" , new Object [ ] { throwable , probeId , conversation } ) ; Throwable currentException = throwable ; // First we need to work out how many exceptions to send back
short numberOfExceptions = 0 ; while ( currentException != null ) { currentException = currentException . getCause ( ) ; numberOfExceptions ++ ; } // Now add them to the buffer
currentException = throwable ; // First put in the buffer how many exceptions are being sent back
putShort ( numberOfExceptions ) ; // Now iterate over the rest
while ( currentException != null ) { short exceptionId = getExceptionId ( currentException ) ; addException ( currentException , exceptionId , probeId ) ; // Now get the next one in the chain
currentException = currentException . getCause ( ) ; // Ensure we null out the probe - this doesn ' t apply for any more exceptions
probeId = null ; } final HandshakeProperties handshakeProperties = conversation . getHandshakeProperties ( ) ; if ( ( handshakeProperties != null ) && ( ( CATHandshakeProperties ) handshakeProperties ) . isFapLevelKnown ( ) ) { // Only do FAP level checking if we know the FAP level - otherwise , assume a pre - FAP
// 9 format for the exception flow . We can find ourselves in a situation where we
// don ' t know the FAP level if , for example , an exception is thrown during handshaking .
final int fapLevel = ( ( CATHandshakeProperties ) handshakeProperties ) . getFapLevel ( ) ; if ( fapLevel >= JFapChannelConstants . FAP_VERSION_9 ) { // At FAP version 9 or greater we transport the reason and inserts
// of any exception which implements the Reasonable interface , or
// inherits from SIException or SIErrorException .
int reason = Reasonable . DEFAULT_REASON ; String inserts [ ] = Reasonable . DEFAULT_INSERTS ; if ( throwable instanceof Reasonable ) { reason = ( ( Reasonable ) throwable ) . getExceptionReason ( ) ; inserts = ( ( Reasonable ) throwable ) . getExceptionInserts ( ) ; } else if ( throwable instanceof SIException ) { reason = ( ( SIException ) throwable ) . getExceptionReason ( ) ; inserts = ( ( SIException ) throwable ) . getExceptionInserts ( ) ; } else if ( throwable instanceof SIErrorException ) { reason = ( ( SIErrorException ) throwable ) . getExceptionReason ( ) ; inserts = ( ( SIErrorException ) throwable ) . getExceptionInserts ( ) ; } putInt ( reason ) ; putShort ( inserts . length ) ; for ( int i = 0 ; i < inserts . length ; ++ i ) { putString ( inserts [ i ] ) ; } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "putException" ) ;
|
public class MesosToSchedulerDriverAdapter { /** * Broken out into a separate function to allow testing with custom ` Mesos ` implementations . */
@ VisibleForTesting protected Mesos startInternal ( ) { } }
|
String version = System . getenv ( "MESOS_API_VERSION" ) ; if ( version == null ) { version = "V0" ; } LOGGER . info ( "Using Mesos API version: {}" , version ) ; if ( version . equals ( "V0" ) ) { if ( credential == null ) { return new V0Mesos ( this , frameworkInfo , master ) ; } else { return new V0Mesos ( this , frameworkInfo , master , credential ) ; } } else if ( version . equals ( "V1" ) ) { if ( credential == null ) { return new V1Mesos ( this , master ) ; } else { return new V1Mesos ( this , master , credential ) ; } } else { throw new IllegalArgumentException ( "Unsupported API version: " + version ) ; }
|
public class GoroService { /** * Initialize GoroService which will allow you to use { @ code Goro . bindXXX } methods .
* @ param context context instance used to enable GoroService component
* @ param goro instance of Goro that should be used by the service */
public static void setup ( final Context context , final Goro goro ) { } }
|
if ( goro == null ) { throw new IllegalArgumentException ( "Goro instance cannot be null" ) ; } if ( ! Util . checkMainThread ( ) ) { throw new IllegalStateException ( "GoroService.setup must be called on the main thread" ) ; } GoroService . goro = goro ; context . getPackageManager ( ) . setComponentEnabledSetting ( new ComponentName ( context , GoroService . class ) , COMPONENT_ENABLED_STATE_ENABLED , DONT_KILL_APP ) ;
|
public class AnycastOutputHandler { /** * Method to handle a ControlBrowseGet message from an RME
* @ param remoteME The UUID of the RME
* @ param browseId The unique browseId , relative to this RME
* @ param selector The selector , valid only when seqNum = 0
* @ param seqNum The cursor position in the browse */
private final void handleControlBrowseGet ( SIBUuid8 remoteME , SIBUuid12 gatheringTargetDestUuid , long browseId , SelectionCriteria criteria , long seqNum ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "handleControlBrowseGet" , new Object [ ] { remoteME , gatheringTargetDestUuid , Long . valueOf ( browseId ) , criteria , Long . valueOf ( seqNum ) } ) ; // first we see if there is an existing AOBrowseSession
AOBrowserSessionKey key = new AOBrowserSessionKey ( remoteME , gatheringTargetDestUuid , browseId ) ; AOBrowserSession session ; synchronized ( this ) { if ( startedCloseAndFlush ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "handleControlBrowseGet" ) ; return ; } session = ( AOBrowserSession ) browserSessionTable . get ( key ) ; } if ( session == null ) { // there is not an existing session
if ( seqNum == 0 ) { // this message indicates a start of session , so create a session
try { JSConsumerManager streamConsumerDispatcher = null ; if ( ! isPubSub ) { try { // If we are performing a remote gather we need to work out where to gather from
DestinationHandler dest = destinationHandler ; if ( gatheringTargetDestUuid != null ) { // We are a performing a remote gather . We need to lookup the destination that the gatheringUuid
// refers to . It may be an alias ( used for scoping the MEs ) , or the real destination we
// are on ( no scoping ) , or may not exist at all ( exception thrown and caught below )
dest = mp . getDestinationManager ( ) . getDestination ( gatheringTargetDestUuid , true ) ; } // Lookup the relevant ConsumerManager for this AOStream ( This will either be a local consumer dispatcher
// usually - or a gathering consumer dispatcher for a remote gatherer )
streamConsumerDispatcher = ( JSConsumerManager ) dest . chooseConsumerManager ( gatheringTargetDestUuid , null , null ) ; } catch ( SIException e ) { // FFDC
// Couldnt locate the gathering target MEs so flush the stream
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.AnycastOutputHandler.handleControlBrowseGet" , "1:1388:1.89.4.1" , this ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0001" , new Object [ ] { "com.ibm.ws.sib.processor.impl.AnycastOutputHandler" , "1:1394:1.89.4.1" } ) ; throw e ; } } else streamConsumerDispatcher = pubSubConsumerDispatcher ; BrowseCursor browseCursor = ( ( Browsable ) streamConsumerDispatcher ) . getBrowseCursor ( criteria ) ; // next , create the session , and add it to the table
session = new AOBrowserSession ( this , browseCursor , remoteME , gatheringTargetDestUuid , browseId , mp . getAlarmManager ( ) ) ; key = session . getKey ( ) ; AOBrowserSession existing = ( AOBrowserSession ) browserSessionTable . put ( key , session ) ; if ( existing != null ) { // log error ( probable bug since two BrowseGets with same browseId , remoteME and seqNum = = 0
// must have been received )
SIErrorException e = new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0001" , new Object [ ] { "com.ibm.ws.sib.processor.impl.AnycastOutputHandler" , "1:1422:1.89.4.1" } , null ) ) ; // FFDC
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.AnycastOutputHandler.handleControlBrowseGet" , "1:1429:1.89.4.1" , this ) ; SibTr . exception ( tc , e ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0001" , new Object [ ] { "com.ibm.ws.sib.processor.impl.AnycastOutputHandler" , "1:1436:1.89.4.1" } ) ; existing . close ( ) ; } } catch ( SISelectorSyntaxException e1 ) { FFDCFilter . processException ( e1 , "com.ibm.ws.sib.processor.impl.AnycastOutputHandler.handleControlBrowseGet" , "1:1188:1.77" , this ) ; SibTr . exception ( tc , e1 ) ; sendBrowseEnd ( remoteME , gatheringTargetDestUuid , browseId , SIMPConstants . BROWSE_BAD_FILTER ) ; session = null ; } catch ( SIException e1 ) { FFDCFilter . processException ( e1 , "com.ibm.ws.sib.processor.impl.AnycastOutputHandler.handleControlBrowseGet" , "1:1454:1.89.4.1" , this ) ; SibTr . exception ( tc , e1 ) ; sendBrowseEnd ( remoteME , gatheringTargetDestUuid , browseId , SIMPConstants . BROWSE_STORE_EXCEPTION ) ; session = null ; } // end catch
} // end if ( seqNum = = 0)
else { // this ControlBrowseGet is out - of - order .
// Return error to the remote ME
sendBrowseEnd ( remoteME , gatheringTargetDestUuid , browseId , SIMPConstants . BROWSE_OUT_OF_ORDER ) ; } } // process the message if session exists / created
if ( session != null ) { boolean closed = session . next ( seqNum ) ; if ( closed ) { browserSessionTable . remove ( key ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "handleControlBrowseGet" ) ;
|
public class Utility_Validation { public static void displayTargetsData ( AnnotationTargetsImpl_Targets annotationTargets , PrintWriter writer ) { } }
|
Util_BidirectionalMap packageData = annotationTargets . getPackageAnnotationData ( ) ; int uniquePackages = packageData . getHolderSet ( ) . size ( ) ; int uniquePackageAnnotations = packageData . getHeldSet ( ) . size ( ) ; Util_BidirectionalMap classData = annotationTargets . getClassAnnotationData ( ) ; int uniqueClasses = classData . getHolderSet ( ) . size ( ) ; int uniqueClassAnnotations = classData . getHeldSet ( ) . size ( ) ; Util_BidirectionalMap fieldData = annotationTargets . getFieldAnnotationData ( ) ; int uniqueClassesWithFieldAnnotations = fieldData . getHolderSet ( ) . size ( ) ; int uniqueFieldAnnotations = fieldData . getHeldSet ( ) . size ( ) ; Util_BidirectionalMap methodData = annotationTargets . getMethodAnnotationData ( ) ; int uniqueClassesWithMethodAnnotations = methodData . getHolderSet ( ) . size ( ) ; int uniqueMethodAnnotations = methodData . getHeldSet ( ) . size ( ) ; writer . println ( "Targets data:" ) ; writer . println ( " Packages with annotations [ " + Integer . toString ( uniquePackages ) + " ]" ) ; writer . println ( " Unique packages annotations [ " + Integer . toString ( uniquePackageAnnotations ) + " ]" ) ; writer . println ( " Classes with class annotations [ " + Integer . toString ( uniqueClasses ) + " ]" ) ; writer . println ( " Unique class annotations [ " + Integer . toString ( uniqueClassAnnotations ) + " ]" ) ; writer . println ( " Classes with field annotations [ " + Integer . toString ( uniqueClassesWithFieldAnnotations ) + " ]" ) ; writer . println ( " Unique field annotations [ " + Integer . toString ( uniqueFieldAnnotations ) + " ]" ) ; writer . println ( " Classes with method annotations [ " + Integer . toString ( uniqueClassesWithMethodAnnotations ) + " ]" ) ; writer . println ( " Unique method annotations [ " + Integer . toString ( uniqueMethodAnnotations ) + " ]" ) ;
|
public class SpecTopic { /** * Gets the list of Level Relationships for this topic whose type is " PREREQUISITE " .
* @ return A list of prerequisite level relationships */
public List < TargetRelationship > getPrerequisiteLevelRelationships ( ) { } }
|
final ArrayList < TargetRelationship > relationships = new ArrayList < TargetRelationship > ( ) ; for ( final TargetRelationship relationship : levelRelationships ) { if ( relationship . getType ( ) == RelationshipType . PREREQUISITE ) { relationships . add ( relationship ) ; } } return relationships ;
|
public class TrafficForecastAdjustment { /** * Gets the forecastAdjustmentSegments value for this TrafficForecastAdjustment .
* @ return forecastAdjustmentSegments * Each adjustment segment is a forecast adjustment targeting
* a continuous date range . */
public com . google . api . ads . admanager . axis . v201902 . TrafficForecastAdjustmentSegment [ ] getForecastAdjustmentSegments ( ) { } }
|
return forecastAdjustmentSegments ;
|
public class DataTableCore { /** * Optional parameter defining which columns are selected when the datatable is initially rendered . If this attribute is an integer , it ' s the column index . If it ' s a string , it ' s a jQuery expression . Automatically sets selection = ' true ' and selected - items = ' column ' . < P >
* @ return Returns the value of the attribute , or null , if it hasn ' t been set by the JSF file . */
public java . lang . Object getSelectedColumn ( ) { } }
|
return ( java . lang . Object ) getStateHelper ( ) . eval ( PropertyKeys . selectedColumn ) ;
|
public class ExampleProvider { /** * 根据Example更新
* @ param ms
* @ return */
public String updateByExample ( MappedStatement ms ) { } }
|
Class < ? > entityClass = getEntityClass ( ms ) ; StringBuilder sql = new StringBuilder ( ) ; if ( isCheckExampleEntityClass ( ) ) { sql . append ( SqlHelper . exampleCheck ( entityClass ) ) ; } // 安全更新 , Example 必须包含条件
if ( getConfig ( ) . isSafeUpdate ( ) ) { sql . append ( SqlHelper . exampleHasAtLeastOneCriteriaCheck ( "example" ) ) ; } sql . append ( SqlHelper . updateTable ( entityClass , tableName ( entityClass ) , "example" ) ) ; sql . append ( SqlHelper . updateSetColumnsIgnoreVersion ( entityClass , "record" , false , false ) ) ; sql . append ( SqlHelper . updateByExampleWhereClause ( ) ) ; return sql . toString ( ) ;
|
public class PullerInternal { /** * Implementation of BlockingQueueListener . changed ( EventType , Object , BlockingQueue ) for Pull Replication
* Note : Pull replication needs to send IDLE after PUT / { db } / _ local .
* However sending IDLE from Push replicator breaks few unit test cases .
* This is reason changed ( ) method was override for pull replication */
@ Override public void changed ( EventType type , Object o , BlockingQueue queue ) { } }
|
if ( ( type == EventType . PUT || type == EventType . ADD ) && isContinuous ( ) && ! queue . isEmpty ( ) ) { synchronized ( lockWaitForPendingFutures ) { if ( waitingForPendingFutures ) { return ; } } fireTrigger ( ReplicationTrigger . RESUME ) ; waitForPendingFuturesWithNewThread ( ) ; }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.