signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class AddressUtils { /** * 判断该ip是否为本机ip , 一台机器可能同时有多个IP * @ param ip * @ return */ public static boolean isHostIp ( String ip ) { } }
InetAddress localAddress = null ; try { localAddress = InetAddress . getLocalHost ( ) ; if ( localAddress . isLoopbackAddress ( ) || isValidHostAddress ( localAddress ) && ( localAddress . getHostAddress ( ) . equals ( ip ) || localAddress . getHostName ( ) . equals ( ip ) ) ) { return true ; } } catch ( Throwable e ) { logger . warn ( "Failed to retriving local host ip address, try scan network card ip address. cause: " + e . getMessage ( ) ) ; } try { Enumeration < NetworkInterface > interfaces = NetworkInterface . getNetworkInterfaces ( ) ; if ( interfaces != null ) { while ( interfaces . hasMoreElements ( ) ) { try { NetworkInterface network = interfaces . nextElement ( ) ; Enumeration < InetAddress > addresses = network . getInetAddresses ( ) ; if ( addresses != null ) { while ( addresses . hasMoreElements ( ) ) { try { InetAddress address = addresses . nextElement ( ) ; if ( address . isLoopbackAddress ( ) || isValidHostAddress ( address ) && address . getHostAddress ( ) . equals ( ip ) ) { return true ; } } catch ( Throwable e ) { logger . warn ( "Failed to retriving network card ip address. cause:" + e . getMessage ( ) ) ; } } } } catch ( Throwable e ) { logger . warn ( "Failed to retriving network card ip address. cause:" + e . getMessage ( ) ) ; } } } } catch ( Throwable e ) { logger . warn ( "Failed to retriving network card ip address. cause:" + e . getMessage ( ) ) ; } return false ;
public class SnapshotDeleteAgent { /** * return null . Yes , ugly . Bang it out , then refactor later . */ private String parseParams ( ParameterSet params , JSONObject obj ) throws Exception { } }
if ( params . size ( ) < 2 ) { return "@SnapshotDelete expects 2 or 3 arguments, received " + params . size ( ) ; } String [ ] paths = null ; Object paramList [ ] = params . toArray ( ) ; try { paths = ( String [ ] ) ( ParameterConverter . tryToMakeCompatible ( String [ ] . class , paramList [ 0 ] ) ) ; } catch ( Exception e ) { return e . getMessage ( ) ; } if ( paths == null || paths . length == 0 ) { return "No paths supplied" ; } for ( String path : paths ) { if ( path == null || path . trim ( ) . isEmpty ( ) ) { return "A path was null or the empty string" ; } } String [ ] nonces = null ; try { nonces = ( String [ ] ) ( ParameterConverter . tryToMakeCompatible ( String [ ] . class , paramList [ 1 ] ) ) ; } catch ( Exception e ) { return e . getMessage ( ) ; } if ( nonces == null || nonces . length == 0 ) { return "No nonces supplied" ; } for ( String nonce : nonces ) { if ( nonce == null || nonce . trim ( ) . isEmpty ( ) ) { return "A nonce was null or the empty string" ; } } if ( paths . length != nonces . length ) { return "A path must be provided for every nonce" ; } String stype = SnapshotPathType . SNAP_PATH . toString ( ) ; if ( params . size ( ) > 2 ) { stype = ( String ) ( ParameterConverter . tryToMakeCompatible ( String . class , paramList [ 2 ] ) ) ; } // Dupe SNAPSHOTSCAN as the subselector in case we consolidate later obj . put ( "subselector" , "SNAPSHOTDELETE" ) ; obj . put ( "interval" , false ) ; obj . put ( "paths" , paths ) ; obj . put ( SnapshotUtil . JSON_PATH_TYPE , stype ) ; obj . put ( "nonces" , nonces ) ; return null ;
public class Property { /** * Returns the boolean value of this property . */ public boolean booleanValue ( ) { } }
final String value = getInternal ( null , false ) ; if ( value == null ) { return false ; } return toBoolean ( value ) ;
public class CommonIronJacamarParser { /** * Store admin object * @ param ao The admin object * @ param writer The writer * @ exception Exception Thrown if an error occurs */ protected void storeAdminObject ( AdminObject ao , XMLStreamWriter writer ) throws Exception { } }
writer . writeStartElement ( CommonXML . ELEMENT_ADMIN_OBJECT ) ; if ( ao . getClassName ( ) != null ) writer . writeAttribute ( CommonXML . ATTRIBUTE_CLASS_NAME , ao . getValue ( CommonXML . ATTRIBUTE_CLASS_NAME , ao . getClassName ( ) ) ) ; if ( ao . getJndiName ( ) != null ) writer . writeAttribute ( CommonXML . ATTRIBUTE_JNDI_NAME , ao . getValue ( CommonXML . ATTRIBUTE_JNDI_NAME , ao . getJndiName ( ) ) ) ; if ( ao . isEnabled ( ) != null && ( ao . hasExpression ( CommonXML . ATTRIBUTE_ENABLED ) || ! Defaults . ENABLED . equals ( ao . isEnabled ( ) ) ) ) writer . writeAttribute ( CommonXML . ATTRIBUTE_ENABLED , ao . getValue ( CommonXML . ATTRIBUTE_ENABLED , ao . isEnabled ( ) . toString ( ) ) ) ; if ( ao . getId ( ) != null ) writer . writeAttribute ( CommonXML . ATTRIBUTE_ID , ao . getValue ( CommonXML . ATTRIBUTE_ID , ao . getId ( ) ) ) ; if ( ao . getConfigProperties ( ) != null && ! ao . getConfigProperties ( ) . isEmpty ( ) ) { Iterator < Map . Entry < String , String > > it = ao . getConfigProperties ( ) . entrySet ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { Map . Entry < String , String > entry = it . next ( ) ; writer . writeStartElement ( CommonXML . ELEMENT_CONFIG_PROPERTY ) ; writer . writeAttribute ( CommonXML . ATTRIBUTE_NAME , entry . getKey ( ) ) ; writer . writeCharacters ( ao . getValue ( CommonXML . ELEMENT_CONFIG_PROPERTY , entry . getKey ( ) , entry . getValue ( ) ) ) ; writer . writeEndElement ( ) ; } } writer . writeEndElement ( ) ;
public class IOCase { /** * Checks if one string contains another at a specific index using the case - sensitivity rule . * This method mimics parts of { @ link String # regionMatches ( boolean , int , String , int , int ) } * but takes case - sensitivity into account . * @ param str the string to check , not null * @ param strStartIndex the index to start at in str * @ param search the start to search for , not null * @ return true if equal using the case rules * @ throws NullPointerException if either string is null */ public boolean checkRegionMatches ( String str , int strStartIndex , String search ) { } }
return str . regionMatches ( ! sensitive , strStartIndex , search , 0 , search . length ( ) ) ;
public class TreeTranslator { /** * Visitor methods */ public void visitTopLevel ( JCCompilationUnit tree ) { } }
tree . pid = translate ( tree . pid ) ; tree . defs = translate ( tree . defs ) ; result = tree ;
public class Section { /** * Defines the color that will be used to colorize a highlighted section * @ param COLOR */ public void setHighlightColor ( final Color COLOR ) { } }
if ( null == highlightColor ) { _highlightColor = COLOR ; fireSectionEvent ( UPDATE_EVENT ) ; } else { highlightColor . set ( COLOR ) ; }
public class MediaQueryTools { /** * Get the CSS wrapped in the specified media query . Note : all existing rule * objects are reused , so modifying them also modifies the original CSS ! * @ param aCSS * The CSS to be wrapped . May not be < code > null < / code > . * @ param aMediaQueries * The media queries to use . May neither be < code > null < / code > nor empty * nor may it contain < code > null < / code > elements . * @ param bAllowNestedMediaQueries * if < code > true < / code > nested media queries are allowed , * < code > false < / code > if they are prohibited . * @ return < code > null < / code > if out CSS cannot be wrapped , the newly created * { @ link CascadingStyleSheet } object otherwise . */ @ Nullable public static CascadingStyleSheet getWrappedInMediaQuery ( @ Nonnull final CascadingStyleSheet aCSS , @ Nonnull @ Nonempty final Iterable < ? extends CSSMediaQuery > aMediaQueries , final boolean bAllowNestedMediaQueries ) { } }
ValueEnforcer . notNull ( aCSS , "CSS" ) ; ValueEnforcer . notEmpty ( aMediaQueries , "MediaQueries" ) ; if ( ! canWrapInMediaQuery ( aCSS , bAllowNestedMediaQueries ) ) return null ; final CascadingStyleSheet ret = new CascadingStyleSheet ( ) ; // Copy all import rules for ( final CSSImportRule aImportRule : aCSS . getAllImportRules ( ) ) { if ( aImportRule . hasMediaQueries ( ) ) { // import rule already has a media query - do not alter ret . addImportRule ( aImportRule ) ; } else { // Create a new rule and add the passed media queries final CSSImportRule aNewImportRule = new CSSImportRule ( aImportRule . getLocation ( ) ) ; for ( final CSSMediaQuery aMediaQuery : aMediaQueries ) aNewImportRule . addMediaQuery ( aMediaQuery ) ; ret . addImportRule ( aNewImportRule ) ; } } // Copy all namespace rules for ( final CSSNamespaceRule aNamespaceRule : aCSS . getAllNamespaceRules ( ) ) ret . addNamespaceRule ( aNamespaceRule ) ; // Create a single top - level media rule . . . // into this media rule final CSSMediaRule aNewMediaRule = new CSSMediaRule ( ) ; for ( final CSSMediaQuery aMediaQuery : aMediaQueries ) aNewMediaRule . addMediaQuery ( aMediaQuery ) ; // . . . and add the existing top - level rules into this media rule for ( final ICSSTopLevelRule aRule : aCSS . getAllRules ( ) ) aNewMediaRule . addRule ( aRule ) ; // Finally add the resulting media rule into the new CSS ret . addRule ( aNewMediaRule ) ; return ret ;
public class GSAPImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case AfplibPackage . GSAP__P : return getP ( ) ; case AfplibPackage . GSAP__Q : return getQ ( ) ; case AfplibPackage . GSAP__R : return getR ( ) ; case AfplibPackage . GSAP__S : return getS ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class WebService { /** * method to calculate froma non - ambiguous HELM input the molecular * properties : molecular formula , molecular weight , exact mass , extinction * coefficient * @ param notation * given HELM * @ return { @ code List < String > } containing the molecule properties * @ throws BuilderMoleculeException * if the molecule can ' t be built * @ throws CTKException * general ChemToolKit exception passed to HELMToolKit * @ throws ExtinctionCoefficientException * if extinction coefficient can not be calculated * @ throws ValidationException * if notation is not valid * @ throws MonomerLoadingException * if monomers can not be loaded * @ throws ChemistryException * if the Chemistry Engine can not be initialized */ public List < String > getMolecularProperties ( String notation ) throws BuilderMoleculeException , CTKException , ExtinctionCoefficientException , ValidationException , MonomerLoadingException , ChemistryException { } }
MoleculeProperty result = MoleculePropertyCalculator . getMoleculeProperties ( validate ( notation ) ) ; setMonomerFactoryToDefault ( notation ) ; return new LinkedList < String > ( Arrays . asList ( result . getMolecularFormula ( ) , Double . toString ( result . getMolecularWeight ( ) ) , Double . toString ( result . getExactMass ( ) ) , Double . toString ( result . getExtinctionCoefficient ( ) ) ) ) ;
public class EndpointUtil { /** * This method returns the operation part of the supplied endpoint . * @ param endpoint The endpoint * @ param stripped Whether brackets should be stripped * @ return The operation */ public static String decodeEndpointOperation ( String endpoint , boolean stripped ) { } }
int ind = endpoint . indexOf ( '[' ) ; if ( ind != - 1 ) { if ( stripped ) { return endpoint . substring ( ind + 1 , endpoint . length ( ) - 1 ) ; } return endpoint . substring ( ind ) ; } return null ;
public class XMLSerializer { /** * Write attribute . * @ param namespaceURI the namespace URI * @ param localName the local name * @ param value the value * @ throws Exception the exception */ public void writeAttribute ( String namespaceURI , String localName , String value ) throws Exception { } }
this . attribute ( namespaceURI , localName , value . toString ( ) ) ;
public class DataSourceService { /** * Declarative services method to unset the JAASLoginContextEntry . */ protected void unsetJaasLoginContextEntry ( ServiceReference < com . ibm . ws . security . jaas . common . JAASLoginContextEntry > svc ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "unsetJaasLoginContextEntry" , svc ) ; } jaasLoginContextEntryName = null ;
public class GeometryService { /** * This geometry is empty if there are no geometries / coordinates stored inside . * @ param geometry The geometry to check . * @ return true or false . */ public static boolean isEmpty ( Geometry geometry ) { } }
return ( geometry . getCoordinates ( ) == null || geometry . getCoordinates ( ) . length == 0 ) && ( geometry . getGeometries ( ) == null || geometry . getGeometries ( ) . length == 0 ) ;
public class CleverTapAPI { /** * InApp */ private static void showInApp ( Context context , final CTInAppNotification inAppNotification , CleverTapInstanceConfig config ) { } }
Logger . v ( config . getAccountId ( ) , "Attempting to show next In-App" ) ; if ( ! appForeground ) { pendingNotifications . add ( inAppNotification ) ; Logger . v ( config . getAccountId ( ) , "Not in foreground, queueing this In App" ) ; return ; } if ( currentlyDisplayingInApp != null ) { pendingNotifications . add ( inAppNotification ) ; Logger . v ( config . getAccountId ( ) , "In App already displaying, queueing this In App" ) ; return ; } currentlyDisplayingInApp = inAppNotification ; CTInAppBaseFragment inAppFragment = null ; CTInAppType type = inAppNotification . getInAppType ( ) ; switch ( type ) { case CTInAppTypeCoverHTML : case CTInAppTypeInterstitialHTML : case CTInAppTypeHalfInterstitialHTML : case CTInAppTypeCover : case CTInAppTypeHalfInterstitial : case CTInAppTypeInterstitial : case CTInAppTypeAlert : case CTInAppTypeInterstitialImageOnly : case CTInAppTypeHalfInterstitialImageOnly : case CTInAppTypeCoverImageOnly : Intent intent = new Intent ( context , InAppNotificationActivity . class ) ; intent . putExtra ( "inApp" , inAppNotification ) ; intent . putExtra ( "config" , config ) ; try { Activity currentActivity = getCurrentActivity ( ) ; if ( currentActivity == null ) { throw new IllegalStateException ( "Current activity reference not found" ) ; } config . getLogger ( ) . verbose ( config . getAccountId ( ) , "calling InAppActivity for notification: " + inAppNotification . getJsonDescription ( ) ) ; currentActivity . startActivity ( intent ) ; Logger . d ( "Displaying In-App: " + inAppNotification . getJsonDescription ( ) ) ; } catch ( Throwable t ) { Logger . v ( "Please verify the integration of your app." + " It is not setup to support in-app notifications yet." , t ) ; } break ; case CTInAppTypeFooterHTML : inAppFragment = new CTInAppHtmlFooterFragment ( ) ; break ; case CTInAppTypeHeaderHTML : inAppFragment = new CTInAppHtmlHeaderFragment ( ) ; break ; case CTInAppTypeFooter : inAppFragment = new CTInAppNativeFooterFragment ( ) ; break ; case CTInAppTypeHeader : inAppFragment = new CTInAppNativeHeaderFragment ( ) ; break ; default : Logger . d ( config . getAccountId ( ) , "Unknown InApp Type found: " + type ) ; currentlyDisplayingInApp = null ; return ; } if ( inAppFragment != null ) { Logger . d ( "Displaying In-App: " + inAppNotification . getJsonDescription ( ) ) ; try { // noinspection ConstantConditions FragmentTransaction fragmentTransaction = getCurrentActivity ( ) . getFragmentManager ( ) . beginTransaction ( ) ; Bundle bundle = new Bundle ( ) ; bundle . putParcelable ( "inApp" , inAppNotification ) ; bundle . putParcelable ( "config" , config ) ; inAppFragment . setArguments ( bundle ) ; fragmentTransaction . setCustomAnimations ( android . R . animator . fade_in , android . R . animator . fade_out ) ; fragmentTransaction . add ( android . R . id . content , inAppFragment ) ; Logger . v ( config . getAccountId ( ) , "calling InAppFragment " + inAppNotification . getCampaignId ( ) ) ; fragmentTransaction . commit ( ) ; } catch ( Throwable t ) { Logger . v ( config . getAccountId ( ) , "Fragment not able to render" , t ) ; } }
public class DecisionDefinitionEntity { /** * Updates all modifiable fields from another decision definition entity . * @ param updatingDecisionDefinition */ @ Override public void updateModifiableFieldsFromEntity ( DecisionDefinitionEntity updatingDecisionDefinition ) { } }
if ( this . key . equals ( updatingDecisionDefinition . key ) && this . deploymentId . equals ( updatingDecisionDefinition . deploymentId ) ) { this . revision = updatingDecisionDefinition . revision ; this . historyTimeToLive = updatingDecisionDefinition . historyTimeToLive ; } else { LOG . logUpdateUnrelatedDecisionDefinitionEntity ( this . key , updatingDecisionDefinition . key , this . deploymentId , updatingDecisionDefinition . deploymentId ) ; }
public class AmazonNeptuneClient { /** * Creates an event notification subscription . This action requires a topic ARN ( Amazon Resource Name ) created by * either the Neptune console , the SNS console , or the SNS API . To obtain an ARN with SNS , you must create a topic * in Amazon SNS and subscribe to the topic . The ARN is displayed in the SNS console . * You can specify the type of source ( SourceType ) you want to be notified of , provide a list of Neptune sources * ( SourceIds ) that triggers the events , and provide a list of event categories ( EventCategories ) for events you * want to be notified of . For example , you can specify SourceType = db - instance , SourceIds = mydbinstance1, * mydbinstance2 and EventCategories = Availability , Backup . * If you specify both the SourceType and SourceIds , such as SourceType = db - instance and SourceIdentifier = * myDBInstance1 , you are notified of all the db - instance events for the specified source . If you specify a * SourceType but do not specify a SourceIdentifier , you receive notice of the events for that source type for all * your Neptune sources . If you do not specify either the SourceType nor the SourceIdentifier , you are notified of * events generated from all Neptune sources belonging to your customer account . * @ param createEventSubscriptionRequest * @ return Result of the CreateEventSubscription operation returned by the service . * @ throws EventSubscriptionQuotaExceededException * @ throws SubscriptionAlreadyExistException * @ throws SNSInvalidTopicException * @ throws SNSNoAuthorizationException * @ throws SNSTopicArnNotFoundException * @ throws SubscriptionCategoryNotFoundException * @ throws SourceNotFoundException * @ sample AmazonNeptune . CreateEventSubscription * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / neptune - 2014-10-31 / CreateEventSubscription " * target = " _ top " > AWS API Documentation < / a > */ @ Override public EventSubscription createEventSubscription ( CreateEventSubscriptionRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateEventSubscription ( request ) ;
public class TransactionContext { /** * Commits the current transaction . This will : check for any conflicts , based on the change set aggregated from * all registered { @ link TransactionAware } instances ; flush any pending writes from the { @ code TransactionAware } s ; * commit the current transaction with the { @ link TransactionSystemClient } ; and clear the current transaction state . * @ throws TransactionConflictException if a conflict is detected with a recently committed transaction * @ throws TransactionFailureException if an error occurs while committing */ public void finish ( ) throws TransactionFailureException { } }
Preconditions . checkState ( currentTx != null , "Cannot finish tx that has not been started" ) ; // each of these steps will abort and rollback the tx in case if errors , and throw an exception checkForConflicts ( ) ; persist ( ) ; commit ( ) ; postCommit ( ) ; currentTx = null ;
public class DeltaCalculator { /** * Returns the best estimate client / server time - delta . */ public long getTimeDelta ( ) { } }
if ( _iter == 0 ) { // no responses yet return 0L ; } // Return a median value as our estimate , rather than an average . // Mdb writes : // I used the median because that was more likely to result in a // sensible value . // Assuming there are two kinds of packets , one that goes and comes // back without delay and provides an accurate time value , and one // that gets delayed somewhere on the way there or the way back and // provides an inaccurate time value . // If no packets are delayed , both algorithms should be fine . If one // packet is delayed the median will select the middle , non - delayed // packet , whereas the average will skew everything a bit because // of the delayed packet . If two packets are delayed , the median // will be more skewed than the average because it will benefit // from the one accurate packet and if all three packets are delayed // both algorithms will be ( approximately ) equally inaccurate . // I believe the chances are most likely that zero or one packets // will be delayed , so I chose the median rather than the average . // copy the deltas array so that we don ' t alter things before // all pongs have arrived long [ ] deltasCopy = new long [ _iter ] ; System . arraycopy ( _deltas , 0 , deltasCopy , 0 , _iter ) ; // sort the estimates and return one from the middle Arrays . sort ( deltasCopy ) ; return deltasCopy [ deltasCopy . length / 2 ] ;
public class ValueList { /** * Creates a list of the provided elements in the same order . * @ param value zero or more values . * @ return a list of the provided values . */ public static < V extends Value > ValueList < V > getInstance ( V ... value ) { } }
ValueList < V > result = new ValueList < > ( value . length ) ; for ( V val : value ) { result . add ( val ) ; } return result ;
public class CliFrontend { /** * Displays an optional exception message for incorrect program parametrization . * @ param e The exception to display . * @ return The return code for the process . */ private static int handleParametrizationException ( ProgramParametrizationException e ) { } }
LOG . error ( "Program has not been parametrized properly." , e ) ; System . err . println ( e . getMessage ( ) ) ; return 1 ;
public class ArrayUtils { /** * < p > Converts an array of object Character to primitives handling < code > null < / code > . < / p > * < p > This method returns < code > null < / code > for a < code > null < / code > input array . < / p > * @ param array a < code > Character < / code > array , may be < code > null < / code > * @ param valueForNull the value to insert if < code > null < / code > found * @ return a < code > char < / code > array , < code > null < / code > if null array input */ public static char [ ] toPrimitive ( Character [ ] array , char valueForNull ) { } }
if ( array == null ) { return null ; } else if ( array . length == 0 ) { return EMPTY_CHAR_ARRAY ; } final char [ ] result = new char [ array . length ] ; for ( int i = 0 ; i < array . length ; i ++ ) { Character b = array [ i ] ; result [ i ] = ( b == null ? valueForNull : b ) ; } return result ;
public class VideoSampleActivity { /** * Initialize ViedeoView with a video by default . */ private void initializeVideoView ( ) { } }
Uri path = Uri . parse ( APPLICATION_RAW_PATH + R . raw . video ) ; videoView . setVideoURI ( path ) ; videoView . start ( ) ;
public class JMJson { /** * Transform t 2. * @ param < T1 > the type parameter * @ param < T2 > the type parameter * @ param object the object * @ param typeClass the type class * @ return the t 2 */ public static < T1 , T2 > T2 transform ( T1 object , Class < T2 > typeClass ) { } }
try { return jsonMapper . convertValue ( object , typeClass ) ; } catch ( Exception e ) { return JMExceptionManager . handleExceptionAndReturnNull ( log , e , "transform" , object ) ; }
public class FindPositionArray { /** * This method updates the internal array only if the bit vector has been * changed since the last update or creation of this class . */ void updateCount ( ) { } }
if ( this . hasChanged ) { this . positionArray = ArrayUtils . toPrimitive ( getPositionList ( ) . toArray ( new Long [ 0 ] ) ) ; this . hasChanged = false ; }
public class FlatBuffersMapper { /** * This method just converts enums * @ param val * @ return */ public static ByteOrder getOrderFromByte ( byte val ) { } }
if ( val == org . nd4j . graph . ByteOrder . LE ) return ByteOrder . LITTLE_ENDIAN ; else return ByteOrder . BIG_ENDIAN ;
public class GobblinClusterManager { /** * Get additional { @ link Tag } s required for any type of reporting . */ private List < ? extends Tag < ? > > getMetadataTags ( String applicationName , String applicationId ) { } }
return Tag . fromMap ( new ImmutableMap . Builder < String , Object > ( ) . put ( GobblinClusterMetricTagNames . APPLICATION_NAME , applicationName ) . put ( GobblinClusterMetricTagNames . APPLICATION_ID , applicationId ) . build ( ) ) ;
public class ZoteroItemDataProvider { /** * Makes the given ID unique * @ param id the ID * @ param knownIds a set of known IDs to compare to * @ return the unique IDs */ private static String uniquify ( String id , Set < String > knownIds ) { } }
int n = 10 ; String olda = id ; while ( knownIds . contains ( id ) ) { id = olda + Integer . toString ( n , Character . MAX_RADIX ) ; ++ n ; } return id ;
public class CardInputWidget { /** * Checks on the horizontal position of a touch event to see if * that event needs to be associated with one of the controls even * without having actually touched it . This essentially gives a larger * touch surface to the controls . We return { @ code null } if the user touches * actually inside the widget because no interception is necessary - the touch will * naturally give focus to that control , and we don ' t want to interfere with what * Android will naturally do in response to that touch . * @ param touchX distance in pixels from the left side of this control * @ return a { @ link StripeEditText } that needs to request focus , or { @ code null } * if no such request is necessary . */ @ VisibleForTesting @ Nullable StripeEditText getFocusRequestOnTouch ( int touchX ) { } }
int frameStart = mFrameLayout . getLeft ( ) ; if ( mCardNumberIsViewed ) { // Then our view is // | CARDVIEW | | space | | DATEVIEW | if ( touchX < frameStart + mPlacementParameters . cardWidth ) { // Then the card edit view will already handle this touch . return null ; } else if ( touchX < mPlacementParameters . cardTouchBufferLimit ) { // Then we want to act like this was a touch on the card view return mCardNumberEditText ; } else if ( touchX < mPlacementParameters . dateStartPosition ) { // Then we act like this was a touch on the date editor . return mExpiryDateEditText ; } else { // Then the date editor will already handle this touch . return null ; } } else { // Our view is // | PEEK | | space | | DATE | | space | | CVC | if ( touchX < frameStart + mPlacementParameters . peekCardWidth ) { // This was a touch on the card number editor , so we don ' t need to handle it . return null ; } else if ( touchX < mPlacementParameters . cardTouchBufferLimit ) { // Then we need to act like the user touched the card editor return mCardNumberEditText ; } else if ( touchX < mPlacementParameters . dateStartPosition ) { // Then we need to act like this was a touch on the date editor return mExpiryDateEditText ; } else if ( touchX < mPlacementParameters . dateStartPosition + mPlacementParameters . dateWidth ) { // Just a regular touch on the date editor . return null ; } else if ( touchX < mPlacementParameters . dateRightTouchBufferLimit ) { // We need to act like this was a touch on the date editor return mExpiryDateEditText ; } else if ( touchX < mPlacementParameters . cvcStartPosition ) { // We need to act like this was a touch on the cvc editor . return mCvcNumberEditText ; } else { return null ; } }
public class ComplexStubPersonAttributeDao { /** * The backing Map to use for queries , the outer map is keyed on the query attribute . The inner * Map is the set of user attributes to be returned for the query attribute . * @ param backingMap backing map */ public void setBackingMap ( final Map < String , Map < String , List < Object > > > backingMap ) { } }
if ( backingMap == null ) { this . backingMap = new HashMap < > ( ) ; this . possibleUserAttributeNames = new HashSet < > ( ) ; } else { this . backingMap = new LinkedHashMap < > ( backingMap ) ; this . initializePossibleAttributeNames ( ) ; }
public class ELParser { /** * Note that both an empty Set and an empty Map are represented by { } . The * parser will always parse { } as an empty Set and special handling is required * to convert it to an empty Map when appropriate . */ final public void MapData ( ) throws ParseException { } }
/* @ bgen ( jjtree ) MapData */ AstMapData jjtn000 = new AstMapData ( JJTMAPDATA ) ; boolean jjtc000 = true ; jjtree . openNodeScope ( jjtn000 ) ; try { jj_consume_token ( START_SET_OR_MAP ) ; switch ( ( jj_ntk == - 1 ) ? jj_ntk ( ) : jj_ntk ) { case START_SET_OR_MAP : case INTEGER_LITERAL : case FLOATING_POINT_LITERAL : case STRING_LITERAL : case TRUE : case FALSE : case NULL : case LPAREN : case LBRACK : case NOT0 : case NOT1 : case EMPTY : case MINUS : case IDENTIFIER : MapEntry ( ) ; label_18 : while ( true ) { switch ( ( jj_ntk == - 1 ) ? jj_ntk ( ) : jj_ntk ) { case COMMA : ; break ; default : jj_la1 [ 46 ] = jj_gen ; break label_18 ; } jj_consume_token ( COMMA ) ; MapEntry ( ) ; } break ; default : jj_la1 [ 47 ] = jj_gen ; ; } jj_consume_token ( RBRACE ) ; } catch ( Throwable jjte000 ) { if ( jjtc000 ) { jjtree . clearNodeScope ( jjtn000 ) ; jjtc000 = false ; } else { jjtree . popNode ( ) ; } if ( jjte000 instanceof RuntimeException ) { { if ( true ) throw ( RuntimeException ) jjte000 ; } } if ( jjte000 instanceof ParseException ) { { if ( true ) throw ( ParseException ) jjte000 ; } } { if ( true ) throw ( Error ) jjte000 ; } } finally { if ( jjtc000 ) { jjtree . closeNodeScope ( jjtn000 , true ) ; } }
public class ApiOvhEmailpro { /** * Alter this object properties * REST : PUT / email / pro / { service } / externalContact / { externalEmailAddress } * @ param body [ required ] New object properties * @ param service [ required ] The internal name of your pro organization * @ param externalEmailAddress [ required ] Contact email * API beta */ public void service_externalContact_externalEmailAddress_PUT ( String service , String externalEmailAddress , OvhExternalContact body ) throws IOException { } }
String qPath = "/email/pro/{service}/externalContact/{externalEmailAddress}" ; StringBuilder sb = path ( qPath , service , externalEmailAddress ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class Messenger { /** * Request user name auth * @ param userName userName to authenticate * @ return Command for execution */ @ NotNull @ ObjectiveCName ( "requestStartAuthCommandWithUserName:" ) public Command < AuthState > requestStartUserNameAuth ( String userName ) { } }
return modules . getAuthModule ( ) . requestStartUserNameAuth ( userName ) ;
public class GL20 { /** * Checks for any GL error codes and logs them ( if { @ link # checkErrors } is true ) . * @ return true if any errors were reported . */ public boolean checkError ( String op ) { } }
int reported = 0 ; if ( checkErrors ) { int error ; while ( ( error = glGetError ( ) ) != GL_NO_ERROR ) { reported += 1 ; System . err . println ( op + ": glError " + error ) ; } } return reported > 0 ;
public class Unsigned { /** * Create an < code > unsigned short < / code > * @ throws NumberFormatException If < code > value < / code > does not contain a * parsable < code > unsigned short < / code > . * @ see UShort # valueOf ( String ) */ public static UShort ushort ( String value ) throws NumberFormatException { } }
return value == null ? null : UShort . valueOf ( value ) ;
public class Vacuum { /** * Get an array with the details of all cleanups . * @ return An array with the details of all cleanups . Empty if no cleanups were performed . * @ throws CommandExecutionException When there has been a error during the communication or the response was invalid . */ public VacuumCleanup [ ] getAllCleanups ( ) throws CommandExecutionException { } }
JSONArray cleanupIDs = getCleaningSummary ( ) . optJSONArray ( 3 ) ; if ( cleanupIDs == null ) return null ; VacuumCleanup [ ] res = new VacuumCleanup [ cleanupIDs . length ( ) ] ; for ( int i = 0 ; i < cleanupIDs . length ( ) ; i ++ ) { JSONArray send = new JSONArray ( ) ; send . put ( cleanupIDs . optLong ( i ) ) ; JSONArray ar = sendToArray ( "get_clean_record" , send ) . optJSONArray ( 0 ) ; res [ i ] = new VacuumCleanup ( ar ) ; } return res ;
public class ParquetGroupConverter { /** * Convert a parquet group field as though it were a map . Logical types of ' list ' and ' map ' will be transformed * into java lists and maps respectively ( { @ link ParquetGroupConverter # convertLogicalList } and * { @ link ParquetGroupConverter # convertLogicalMap } ) , repeated fields will also be translated to lists , and * primitive types will be extracted into an ingestion friendly state ( e . g . ' int ' and ' long ' ) . Finally , * if a field is not present , this method will return null . */ @ Nullable Object convertField ( Group g , String fieldName ) { } }
return convertField ( g , fieldName , binaryAsString ) ;
public class RegexpExpression { /** * Convert a regexp match to a map of matcher arguments . */ private Map < Any2 < Integer , String > , String > resolve_template_args_ ( Matcher matcher ) { } }
return template_ . getArguments ( ) . stream ( ) . collect ( Collectors . toMap ( arg -> arg , arg -> arg . mapCombine ( matcher :: group , matcher :: group ) ) ) ;
public class DbSecurity { /** * For a set of tables , retrieves access to columns specified in the security tables . * This method uses the table ' dbsec _ columns ' to perform this work . The resulting information * in the column data is incomplete : it lacks types and other elements . * @ param tableDataSet Tables to query * @ throws Exception */ private void retrieveColumnData ( Collection < TableSchemaImpl > tableDataSet ) throws Exception { } }
for ( TableSchemaImpl tableData : tableDataSet ) { retrieveColumnData ( tableData ) ; }
public class ModelsEngine { /** * TODO Daniele doc * @ param U * @ param T * @ param theSplit * @ param binNum * @ param num _ max * @ return */ public static double split2realvectors ( double [ ] U , double [ ] T , SplitVectors theSplit , int binNum , int num_max , IHMProgressMonitor pm ) { } }
double binStep = 0 , minValue = 0 , maxValue ; int i , count = 0 , previousCount , minPosition = 0 , maxPosition = 0 , emptyBins ; int [ ] bins ; int head = 0 ; bins = new int [ U . length ] ; if ( binNum <= 1 ) { previousCount = 1 ; count = 1 ; int index = 0 ; while ( count < U . length ) { // was while ( count < = U . length & & U [ count ] = = U [ count - 1 ] ) { while ( count < U . length && U [ count ] == U [ count - 1 ] ) { count ++ ; } index ++ ; bins [ index ] = count - previousCount ; head ++ ; previousCount = count ; count ++ ; if ( head > num_max ) throw new ModelsIllegalargumentException ( "The number of bin exceeds the maximum number allowed." , "MODEL" , pm ) ; } } else if ( binNum > 1 ) { minPosition = 0 ; maxValue = U [ U . length - 1 ] ; while ( minPosition < U . length && isNovalue ( U [ minPosition ] ) ) { minPosition ++ ; } if ( minPosition == U . length ) { // force the situation of non calculation binStep = 0 ; } else { minValue = U [ minPosition ] ; maxPosition = U . length - 1 ; binStep = ( maxValue - minValue ) / ( binNum - 1 ) ; } if ( binStep != 0 ) { int binIndex = 0 ; previousCount = minPosition ; // the novalues are already left aside count = minPosition ; emptyBins = 0 ; double runningCenter = minValue + binStep / 2.0 ; for ( int n = 0 ; n < binNum - 1 ; n ++ ) { double upperLimitOfBin ; if ( n == binNum - 2 ) { upperLimitOfBin = maxValue ; } else { upperLimitOfBin = runningCenter + binStep / 2.0 ; } if ( U [ count ] <= upperLimitOfBin ) { double value = U [ count ] ; while ( value <= upperLimitOfBin ) { count ++ ; if ( count > maxPosition ) { break ; } value = U [ count ] ; } bins [ binIndex ] = count - previousCount ; // contained in the bin binIndex ++ ; head ++ ; previousCount = count ; // count + + ; } else { emptyBins ++ ; } runningCenter += binStep ; } if ( emptyBins != 0 ) { pm . message ( emptyBins + " empty bins where found" ) ; } } else { for ( double tmpValue : U ) { if ( ! isNovalue ( tmpValue ) ) { count ++ ; } } bins [ 0 ] = count ; head = count ; } } if ( head < 1 ) { throw new ModelsIllegalargumentException ( "Something wrong happened in binning" , "MODEL" , pm ) ; } else { theSplit . initIndex ( head ) ; int maxnumberinbin = 0 ; for ( i = 0 ; i < head ; i ++ ) { theSplit . splitIndex [ i ] = bins [ i ] ; if ( bins [ i ] > maxnumberinbin ) maxnumberinbin = bins [ i ] ; } /* * now a list of the values inside the bins are put into the * matrixes , therefore we need as many rows as bins and a column * number high enough to hold the major number of values hold inside * a bin . */ theSplit . initValues ( head , maxnumberinbin ) ; int index = minPosition ; for ( int j = 0 ; j < head ; j ++ ) { for ( int k = 0 ; k < theSplit . splitIndex [ j ] ; k ++ ) { theSplit . splitValues1 [ j ] [ k ] = U [ index ] ; theSplit . splitValues2 [ j ] [ k ] = T [ index ] ; index ++ ; } } } if ( binNum < 2 ) binStep = 0 ; return binStep ;
public class Utils { /** * Gets the client world . * @ return the client world */ @ SideOnly ( Side . CLIENT ) public static World getClientWorld ( ) { } }
return Minecraft . getMinecraft ( ) != null ? Minecraft . getMinecraft ( ) . world : null ;
public class JobConf { /** * Get the memory required to run a task of this job , in bytes . See * { @ link # MAPRED _ TASK _ MAXVMEM _ PROPERTY } * This method is deprecated . Now , different memory limits can be * set for map and reduce tasks of a job , in MB . * For backward compatibility , if the job configuration sets the * key { @ link # MAPRED _ TASK _ MAXVMEM _ PROPERTY } to a value different * from { @ link # DISABLED _ MEMORY _ LIMIT } , that value is returned . * Otherwise , this method will return the larger of the values returned by * { @ link # getMemoryForMapTask ( ) } and { @ link # getMemoryForReduceTask ( ) } * after converting them into bytes . * @ return Memory required to run a task of this job , in bytes , * or { @ link # DISABLED _ MEMORY _ LIMIT } , if unset . * @ see # setMaxVirtualMemoryForTask ( long ) * @ deprecated Use { @ link # getMemoryForMapTask ( ) } and * { @ link # getMemoryForReduceTask ( ) } */ @ Deprecated public long getMaxVirtualMemoryForTask ( ) { } }
LOG . warn ( "getMaxVirtualMemoryForTask() is deprecated. " + "Instead use getMemoryForMapTask() and getMemoryForReduceTask()" ) ; long value = getLong ( MAPRED_TASK_MAXVMEM_PROPERTY , DISABLED_MEMORY_LIMIT ) ; value = normalizeMemoryConfigValue ( value ) ; if ( value == DISABLED_MEMORY_LIMIT ) { value = Math . max ( getMemoryForMapTask ( ) , getMemoryForReduceTask ( ) ) ; value = normalizeMemoryConfigValue ( value ) ; if ( value != DISABLED_MEMORY_LIMIT ) { value *= 1024 * 1024 ; } } return value ;
public class Mac { /** * Returns a < code > Mac < / code > object that implements the * specified MAC algorithm . * < p > A new Mac object encapsulating the * MacSpi implementation from the specified provider * is returned . The specified provider must be registered * in the security provider list . * < p > Note that the list of registered providers may be retrieved via * the { @ link Security # getProviders ( ) Security . getProviders ( ) } method . * @ param algorithm the standard name of the requested MAC algorithm . * See the Mac section in the < a href = * " { @ docRoot } openjdk - redirect . html ? v = 8 & path = / technotes / guides / security / StandardNames . html # Mac " > * Java Cryptography Architecture Standard Algorithm Name Documentation < / a > * for information about standard algorithm names . * @ param provider the name of the provider . * @ return the new < code > Mac < / code > object . * @ exception NoSuchAlgorithmException if a MacSpi * implementation for the specified algorithm is not * available from the specified provider . * @ exception NoSuchProviderException if the specified provider is not * registered in the security provider list . * @ exception IllegalArgumentException if the < code > provider < / code > * is null or empty . * @ see java . security . Provider */ public static final Mac getInstance ( String algorithm , String provider ) throws NoSuchAlgorithmException , NoSuchProviderException { } }
Instance instance = JceSecurity . getInstance ( "Mac" , MacSpi . class , algorithm , provider ) ; return new Mac ( ( MacSpi ) instance . impl , instance . provider , algorithm ) ;
public class AbstractVariable { /** * { @ inheritDoc } */ @ Override public T getConvertedValue ( Map < String , String > variables ) { } }
return convert ( variables . get ( getName ( ) ) ) ;
public class Bucket { /** * Returns an iterable over all the items in this bucket * @ param type The storage location to fetch from * @ return An iterable which will return all items in the bucket . * Note this currently makes a copy of the items list , making it thread safe . It also means * that this will potentially return stale data */ public Iterable < Item > getMasterItems ( final Storage . StorageType type ) { } }
return new Iterable < Item > ( ) { @ Override public Iterator < Item > iterator ( ) { return getMasterItemsIterator ( type ) ; } } ;
public class LogGammaDistribution { /** * The log CDF , static version . * @ param x Value * @ param k Shape k * @ param theta Theta = 1.0 / Beta aka . " scaling " parameter * @ return cdf value */ public static double logcdf ( double x , double k , double theta , double shift ) { } }
x = ( x - shift ) ; return x <= 0. ? - Double . NEGATIVE_INFINITY : GammaDistribution . logregularizedGammaP ( k , FastMath . log1p ( x ) * theta ) ;
public class PythonDataStream { /** * A thin wrapper layer over { @ link DataStream # flatMap ( FlatMapFunction ) } . * @ param flat _ mapper The FlatMapFunction that is called for each element of the * DataStream * @ return The transformed { @ link PythonDataStream } . */ public PythonDataStream < SingleOutputStreamOperator < PyObject > > flat_map ( FlatMapFunction < PyObject , Object > flat_mapper ) throws IOException { } }
return new PythonSingleOutputStreamOperator ( stream . flatMap ( new PythonFlatMapFunction ( flat_mapper ) ) ) ;
public class LabelCache { /** * Adds the label list to the labels for the account . * @ param labels The labels to add */ public void add ( Collection < Label > labels ) { } }
for ( Label label : labels ) this . labels . put ( label . getKey ( ) , label ) ;
public class DeploymentLaunchConfigMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeploymentLaunchConfig deploymentLaunchConfig , ProtocolMarshaller protocolMarshaller ) { } }
if ( deploymentLaunchConfig == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deploymentLaunchConfig . getPackageName ( ) , PACKAGENAME_BINDING ) ; protocolMarshaller . marshall ( deploymentLaunchConfig . getPreLaunchFile ( ) , PRELAUNCHFILE_BINDING ) ; protocolMarshaller . marshall ( deploymentLaunchConfig . getLaunchFile ( ) , LAUNCHFILE_BINDING ) ; protocolMarshaller . marshall ( deploymentLaunchConfig . getPostLaunchFile ( ) , POSTLAUNCHFILE_BINDING ) ; protocolMarshaller . marshall ( deploymentLaunchConfig . getEnvironmentVariables ( ) , ENVIRONMENTVARIABLES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AmazonAlexaForBusinessClient { /** * Configures the email template for the user enrollment invitation with the specified attributes . * @ param putInvitationConfigurationRequest * @ return Result of the PutInvitationConfiguration operation returned by the service . * @ throws NotFoundException * The resource is not found . * @ throws ConcurrentModificationException * There is a concurrent modification of resources . * @ sample AmazonAlexaForBusiness . PutInvitationConfiguration * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / alexaforbusiness - 2017-11-09 / PutInvitationConfiguration " * target = " _ top " > AWS API Documentation < / a > */ @ Override public PutInvitationConfigurationResult putInvitationConfiguration ( PutInvitationConfigurationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executePutInvitationConfiguration ( request ) ;
public class Music { /** * Returns an element for the given reference , < TT > null < / TT > if not found */ public MusicElement getElementByReference ( MusicElementReference ref ) { } }
if ( voiceExists ( ref . getVoice ( ) ) ) { for ( Object o : getVoice ( ref . getVoice ( ) ) ) { MusicElement element = ( MusicElement ) o ; if ( element . getReference ( ) . equals ( ref ) ) return element ; } } return null ;
public class ListChildrenRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListChildrenRequest listChildrenRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listChildrenRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listChildrenRequest . getParentId ( ) , PARENTID_BINDING ) ; protocolMarshaller . marshall ( listChildrenRequest . getChildType ( ) , CHILDTYPE_BINDING ) ; protocolMarshaller . marshall ( listChildrenRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listChildrenRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class LazyFeatureDependencies { /** * Creates a LazyProxyFactory * @ return the LazyProxyFactory */ public static LazyProxyFactory createDefaultProxyFactory ( ) { } }
if ( testDependencyFullFilled ( ) ) { final String factoryClassName = "dev.morphia.mapping.lazy.CGLibLazyProxyFactory" ; try { return ( LazyProxyFactory ) Class . forName ( factoryClassName ) . newInstance ( ) ; } catch ( Exception e ) { LOG . error ( "While instantiating " + factoryClassName , e ) ; } } return null ;
public class SearchHelper { /** * Set up an OR filter for each map key and value . Consider the following example . * < table border = " 1 " > < caption > Example Values < / caption > * < tr > < td > < b > Attribute < / b > < / td > < td > < b > Value < / b > < / td > < / tr > * < tr > < td > givenName < / td > < td > John < / td > < / tr > * < tr > < td > sn < / td > < td > Smith < / td > < / tr > * < / table > * < p > < i > Result < / i > < / p > * < code > ( | ( givenName = John ) ( sn = Smith ) ) < / code > * @ param nameValuePairs A valid list of attribute to name pairs */ public void setFilterOr ( final Map < String , String > nameValuePairs ) { } }
if ( nameValuePairs == null ) { throw new NullPointerException ( ) ; } if ( nameValuePairs . size ( ) < 1 ) { throw new IllegalArgumentException ( "requires at least one key" ) ; } final List < FilterSequence > filters = new ArrayList < > ( ) ; for ( final Map . Entry < String , String > entry : nameValuePairs . entrySet ( ) ) { filters . add ( new FilterSequence ( entry . getKey ( ) , entry . getValue ( ) , FilterSequence . MatchingRuleEnum . EQUALS ) ) ; } setFilterBind ( filters , "|" ) ;
public class ChiIndexUtils { /** * Evaluates the valence corrected chi index for a set of fragments . * This method takes into account the S and P atom types described in * Kier & Hall ( 1986 ) , page 20 for which empirical delta V values are used . * @ param atomContainer The target < code > AtomContainer < / code > * @ param fragList A list of fragments * @ return The valence corrected chi index * @ throws CDKException if the < code > IsotopeFactory < / code > cannot be created */ public static double evalValenceIndex ( IAtomContainer atomContainer , List < List < Integer > > fragList ) throws CDKException { } }
try { IsotopeFactory ifac = Isotopes . getInstance ( ) ; ifac . configureAtoms ( atomContainer ) ; } catch ( IOException e ) { throw new CDKException ( "IO problem occurred when using the CDK atom config\n" + e . getMessage ( ) , e ) ; } double sum = 0 ; for ( List < Integer > aFragList : fragList ) { List < Integer > frag = aFragList ; double prod = 1.0 ; for ( Object aFrag : frag ) { int atomSerial = ( Integer ) aFrag ; IAtom atom = atomContainer . getAtom ( atomSerial ) ; String sym = atom . getSymbol ( ) ; if ( sym . equals ( "S" ) ) { // check for some special S environments double tmp = deltavSulphur ( atom , atomContainer ) ; if ( tmp != - 1 ) { prod = prod * tmp ; continue ; } } if ( sym . equals ( "P" ) ) { // check for some special P environments double tmp = deltavPhosphorous ( atom , atomContainer ) ; if ( tmp != - 1 ) { prod = prod * tmp ; continue ; } } int z = atom . getAtomicNumber ( ) ; // TODO there should be a neater way to get the valence electron count int zv = getValenceElectronCount ( atom ) ; int hsupp = atom . getImplicitHydrogenCount ( ) ; double deltav = ( double ) ( zv - hsupp ) / ( double ) ( z - zv - 1 ) ; prod = prod * deltav ; } if ( prod != 0 ) sum += 1.0 / Math . sqrt ( prod ) ; } return sum ;
public class ObjectParameter { /** * Parse an Enum definition by calling Enum . valueOf . * @ param serializedObject the full enumerated value * @ return the class object */ @ SuppressWarnings ( "unchecked" ) private Object parseEnumParameter ( final Enum < ? > e , final String serializedObject ) { } }
final Object res = Enum . valueOf ( e . getClass ( ) , serializedObject ) ; return res ;
public class SynchronizedPDUSender { /** * ( non - Javadoc ) * @ see org . jsmpp . PDUSender # sendQuerySmResp ( java . io . OutputStream , int , * java . lang . String , java . lang . String , org . jsmpp . bean . MessageState , * byte ) */ public byte [ ] sendQuerySmResp ( OutputStream os , int sequenceNumber , String messageId , String finalDate , MessageState messageState , byte errorCode ) throws PDUStringException , IOException { } }
synchronized ( os ) { return pduSender . sendQuerySmResp ( os , sequenceNumber , messageId , finalDate , messageState , errorCode ) ; }
public class Checksum { /** * Calculates the MD5 checksum of a specified bytes . * @ param algorithm the algorithm to use ( md5 , sha1 , etc . ) to calculate the * message digest * @ param bytes the bytes to generate the MD5 checksum * @ return the hex representation of the MD5 hash */ public static String getChecksum ( String algorithm , byte [ ] bytes ) { } }
final MessageDigest digest = getMessageDigest ( algorithm ) ; final byte [ ] b = digest . digest ( bytes ) ; return getHex ( b ) ;
public class XMLEncodingDetector { /** * org . apache . xerces . impl . XMLEntityManager . startEntity ( ) */ private void createInitialReader ( ) throws IOException , JspCoreException { } }
// wrap this stream in RewindableInputStream stream = new RewindableInputStream ( stream ) ; // perform auto - detect of encoding if necessary if ( encoding == null ) { // read first four bytes and determine encoding final byte [ ] b4 = new byte [ 4 ] ; int count = 0 ; for ( ; count < 4 ; count ++ ) { b4 [ count ] = ( byte ) stream . read ( ) ; } if ( count == 4 ) { Object [ ] encodingDesc = getEncodingName ( b4 , count ) ; encoding = ( String ) ( encodingDesc [ 0 ] ) ; isBigEndian = ( Boolean ) ( encodingDesc [ 1 ] ) ; if ( encodingDesc . length > 2 ) { isBomPresent = ( Boolean ) ( encodingDesc [ 2 ] ) ; } else { isBomPresent = true ; } stream . reset ( ) ; // Special case UTF - 8 files with BOM created by Microsoft // tools . It ' s more efficient to consume the BOM than make // the reader perform extra checks . - Ac if ( count > 2 && encoding . equals ( "UTF-8" ) ) { int b0 = b4 [ 0 ] & 0xFF ; int b1 = b4 [ 1 ] & 0xFF ; int b2 = b4 [ 2 ] & 0xFF ; if ( b0 == 0xEF && b1 == 0xBB && b2 == 0xBF ) { // ignore first three bytes . . . stream . skip ( 3 ) ; } } reader = createReader ( stream , encoding , isBigEndian ) ; } else { reader = createReader ( stream , encoding , isBigEndian ) ; } }
public class JstormOnYarn { /** * Monitor the submitted application for completion . * Kill application if time expires . * @ param appId Application Id of application to be monitored * @ return true if application completed successfully * @ throws YarnException * @ throws IOException */ private boolean monitorApplication ( ApplicationId appId ) throws YarnException , IOException { } }
Integer monitorTimes = JOYConstants . MONITOR_TIMES ; while ( true ) { // Check app status every 1 second . try { Thread . sleep ( JOYConstants . MONITOR_TIME_INTERVAL ) ; } catch ( InterruptedException e ) { LOG . debug ( "Thread sleep in monitoring loop interrupted" ) ; } // Get application report for the appId we are interested in ApplicationReport report = jstormClientContext . yarnClient . getApplicationReport ( appId ) ; try { File writename = new File ( JOYConstants . RPC_ADDRESS_FILE ) ; writename . createNewFile ( ) ; BufferedWriter out = new BufferedWriter ( new FileWriter ( writename ) ) ; out . write ( report . getHost ( ) + JOYConstants . NEW_LINE ) ; out . write ( report . getRpcPort ( ) + JOYConstants . NEW_LINE ) ; out . flush ( ) ; out . close ( ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } LOG . info ( "Got application report from ASM for" + ", appId=" + appId . getId ( ) + ", clientToAMToken=" + report . getClientToAMToken ( ) + ", appDiagnostics=" + report . getDiagnostics ( ) + ", appMasterHost=" + report . getHost ( ) + ", appQueue=" + report . getQueue ( ) + ", appMasterRpcPort=" + report . getRpcPort ( ) + ", appStartTime=" + report . getStartTime ( ) + ", yarnAppState=" + report . getYarnApplicationState ( ) . toString ( ) + ", distributedFinalState=" + report . getFinalApplicationStatus ( ) . toString ( ) + ", appTrackingUrl=" + report . getTrackingUrl ( ) + ", appUser=" + report . getUser ( ) ) ; YarnApplicationState state = report . getYarnApplicationState ( ) ; FinalApplicationStatus dsStatus = report . getFinalApplicationStatus ( ) ; if ( YarnApplicationState . FINISHED == state ) { if ( FinalApplicationStatus . SUCCEEDED == dsStatus ) { LOG . info ( "Application has completed successfully. Breaking monitoring loop" ) ; return true ; } else { LOG . info ( "Application did finished unsuccessfully." + " YarnState=" + state . toString ( ) + ", DSFinalStatus=" + dsStatus . toString ( ) + ". Breaking monitoring loop" ) ; return false ; } } else if ( YarnApplicationState . KILLED == state || YarnApplicationState . FAILED == state ) { LOG . info ( "Application did not finish." + " YarnState=" + state . toString ( ) + ", DSFinalStatus=" + dsStatus . toString ( ) + ". Breaking monitoring loop" ) ; return false ; } else if ( YarnApplicationState . RUNNING == state ) { LOG . info ( "Application is running successfully. Breaking monitoring loop" ) ; return true ; } else { if ( monitorTimes -- <= 0 ) { forceKillApplication ( appId ) ; return false ; } } }
public class TimedMetadataInsertion { /** * Id3Insertions contains the array of Id3Insertion instances . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setId3Insertions ( java . util . Collection ) } or { @ link # withId3Insertions ( java . util . Collection ) } if you want * to override the existing values . * @ param id3Insertions * Id3Insertions contains the array of Id3Insertion instances . * @ return Returns a reference to this object so that method calls can be chained together . */ public TimedMetadataInsertion withId3Insertions ( Id3Insertion ... id3Insertions ) { } }
if ( this . id3Insertions == null ) { setId3Insertions ( new java . util . ArrayList < Id3Insertion > ( id3Insertions . length ) ) ; } for ( Id3Insertion ele : id3Insertions ) { this . id3Insertions . add ( ele ) ; } return this ;
public class Transporter { /** * - - - ERROR PACKET ( STREAMING ) - - - */ public void sendErrorPacket ( String cmd , String nodeID , Context ctx , Throwable cause , long sequence ) { } }
FastBuildTree msg = throwableToTree ( ctx . id , this . nodeID , cause ) ; // Stream packet counter ( 1 . . . N ) msg . putUnsafe ( "seq" , sequence ) ; // End of streaming msg . putUnsafe ( "stream" , false ) ; // Send message publish ( cmd , nodeID , msg ) ;
public class ZipNumIndex { /** * TODO : replace with matchType version */ public CloseableIterator < String > getCDXIterator ( String key , String start , boolean exact , ZipNumParams params ) throws IOException { } }
CloseableIterator < String > summaryIter = summary . getRecordIteratorLT ( key ) ; if ( params . getTimestampDedupLength ( ) > 0 ) { summaryIter = new TimestampDedupIterator ( summaryIter , params . getTimestampDedupLength ( ) ) ; } summaryIter = wrapPrefix ( summaryIter , start , exact ) ; if ( blockLoader . isBufferFully ( ) && ( params != null ) && ( params . getMaxBlocks ( ) > 0 ) ) { LineBufferingIterator lineBufferIter = new LineBufferingIterator ( summaryIter , params . getMaxBlocks ( ) , params . isReverse ( ) ) ; lineBufferIter . bufferInput ( ) ; summaryIter = lineBufferIter ; } return wrapStartIterator ( getCDXIterator ( summaryIter , params ) , start ) ;
public class MutatorImpl { /** * { @ inheritDoc } */ @ Override public < N > Mutator < K > addDeletion ( K key , String cf , long clock ) { } }
addDeletion ( key , cf , null , null , clock ) ; return this ;
public class IOUtils { /** * Get a input file stream ( automatically gunzip / bunzip2 depending on file extension ) * @ param filename Name of file to open * @ return Input stream that can be used to read from the file * @ throws IOException if there are exceptions opening the file */ public static InputStream getFileInputStream ( String filename ) throws IOException { } }
InputStream in = new FileInputStream ( filename ) ; if ( filename . endsWith ( ".gz" ) ) { in = new GZIPInputStream ( in ) ; } else if ( filename . endsWith ( ".bz2" ) ) { // in = new CBZip2InputStream ( in ) ; in = getBZip2PipedInputStream ( filename ) ; } return in ;
public class AgreementSite { /** * Construct a ZK transaction that will add the initiator to the cluster */ public CountDownLatch requestJoin ( final long joiningSite ) throws Exception { } }
final CountDownLatch cdl = new CountDownLatch ( 1 ) ; final Runnable r = new Runnable ( ) { @ Override public void run ( ) { try { final long txnId = m_idManager . getNextUniqueTransactionId ( ) ; for ( long initiatorHSId : m_hsIds ) { if ( initiatorHSId == m_hsId ) continue ; JSONObject jsObj = new JSONObject ( ) ; jsObj . put ( "txnId" , txnId ) ; jsObj . put ( "initiatorHSId" , m_hsId ) ; jsObj . put ( "joiningHSId" , joiningSite ) ; jsObj . put ( "lastSafeTxnId" , m_safetyState . getNewestSafeTxnIdForExecutorBySiteId ( initiatorHSId ) ) ; byte payload [ ] = jsObj . toString ( 4 ) . getBytes ( "UTF-8" ) ; ByteBuffer metadata = ByteBuffer . allocate ( 1 ) ; metadata . put ( BINARY_PAYLOAD_JOIN_REQUEST ) ; BinaryPayloadMessage bpm = new BinaryPayloadMessage ( metadata . array ( ) , payload ) ; m_mailbox . send ( initiatorHSId , bpm ) ; } m_txnQueue . noteTransactionRecievedAndReturnLastSeen ( m_hsId , txnId , m_safetyState . getNewestGloballySafeTxnId ( ) ) ; AgreementRejoinTransactionState arts = new AgreementRejoinTransactionState ( txnId , m_hsId , joiningSite , cdl ) ; if ( ! m_txnQueue . add ( arts ) ) { org . voltdb . VoltDB . crashLocalVoltDB ( "Shouldn't have failed to add txn" , true , null ) ; } m_transactionsById . put ( arts . txnId , arts ) ; } catch ( Throwable e ) { org . voltdb . VoltDB . crashLocalVoltDB ( "Error constructing JSON" , false , e ) ; } } } ; LocalObjectMessage lom = new LocalObjectMessage ( r ) ; lom . m_sourceHSId = m_hsId ; m_mailbox . deliver ( lom ) ; return cdl ;
public class ColumnMapperInteger { /** * { @ inheritDoc } */ @ Override public Field field ( String name , Object value ) { } }
Integer number = indexValue ( name , value ) ; Field field = new IntField ( name , number , STORE ) ; field . setBoost ( boost ) ; return field ;
public class PassConfig { /** * Find the first pass provider that does not have a delegate . */ final PassConfig getBasePassConfig ( ) { } }
PassConfig current = this ; while ( current instanceof PassConfigDelegate ) { current = ( ( PassConfigDelegate ) current ) . delegate ; } return current ;
public class CmsPropertyDialogExtension { /** * Opens the property dialog for a resource to be created with the ' New ' dialog . < p > * @ param builder the resource builder used by the ' New ' dialog to create the resource */ public void editPropertiesForNewResource ( CmsNewResourceBuilder builder ) { } }
try { CmsPropertiesBean propData = builder . getPropertyData ( ) ; String serializedPropData = RPC . encodeResponseForSuccess ( I_CmsVfsService . class . getMethod ( "loadPropertyData" , CmsUUID . class ) , propData , CmsPrefetchSerializationPolicy . instance ( ) ) ; getRpcProxy ( I_CmsPropertyClientRpc . class ) . editPropertiesForNewResource ( serializedPropData ) ; m_newResourceBuilder = builder ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
public class CheerleaderPlayer { /** * Add a list of track to thr current SoundCloud player playlist . * @ param tracks list of { @ link fr . tvbarthel . cheerleader . library . client . SoundCloudTrack } * to be added to the player . */ public void addTracks ( List < SoundCloudTrack > tracks ) { } }
checkState ( ) ; for ( SoundCloudTrack track : tracks ) { addTrack ( track ) ; }
public class NodeImpl { /** * TODO It would be nicer if we could return PropertyNode */ public static NodeImpl createPropertyNode ( final String name , final NodeImpl parent ) { } }
return new NodeImpl ( name , parent , false , null , null , ElementKind . PROPERTY , EMPTY_CLASS_ARRAY , null , null , null , null ) ;
public class SebContextWait { /** * Sleeps for defined timeout without checking for any * condition . */ public SebContextWait sleep ( ) { } }
try { sleeper . sleep ( timeout ) ; return this ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; throw new WebDriverException ( e ) ; }
public class BuildInfo { /** * Generates the { @ code build - info . properties } file in the configured * { @ link # setDestinationDir ( File ) destination } . */ @ TaskAction public void generateBuildProperties ( ) { } }
try { new BuildPropertiesWriter ( new File ( getDestinationDir ( ) , "build-info.properties" ) ) . writeBuildProperties ( new ProjectDetails ( this . properties . getGroup ( ) , ( this . properties . getArtifact ( ) != null ) ? this . properties . getArtifact ( ) : "unspecified" , this . properties . getVersion ( ) , this . properties . getName ( ) , this . properties . getTime ( ) , coerceToStringValues ( this . properties . getAdditional ( ) ) ) ) ; } catch ( IOException ex ) { throw new TaskExecutionException ( this , ex ) ; }
public class SimpleDataWriter { /** * Write a source record to the staging file * @ param record data record to write * @ throws java . io . IOException if there is anything wrong writing the record */ @ Override public void write ( byte [ ] record ) throws IOException { } }
Preconditions . checkNotNull ( record ) ; byte [ ] toWrite = record ; if ( this . recordDelimiter . isPresent ( ) ) { toWrite = Arrays . copyOf ( record , record . length + 1 ) ; toWrite [ toWrite . length - 1 ] = this . recordDelimiter . get ( ) ; } if ( this . prependSize ) { long recordSize = toWrite . length ; ByteBuffer buf = ByteBuffer . allocate ( Longs . BYTES ) ; buf . putLong ( recordSize ) ; toWrite = ArrayUtils . addAll ( buf . array ( ) , toWrite ) ; } this . stagingFileOutputStream . write ( toWrite ) ; this . bytesWritten += toWrite . length ; this . recordsWritten ++ ;
public class DisplayerEditor { /** * Widget listeners callback notifications */ void onDataSetLookupChanged ( @ Observes DataSetLookupChangedEvent event ) { } }
DataSetLookup dataSetLookup = event . getDataSetLookup ( ) ; displayerSettings . setDataSet ( null ) ; displayerSettings . setDataSetLookup ( dataSetLookup ) ; removeStaleSettings ( ) ; initDisplayer ( ) ; initSettingsEditor ( ) ; showDisplayer ( ) ;
public class PoolFairnessCalculator { /** * This method takes a list of { @ link PoolMetadata } objects and calculates * fairness metrics of how well scheduling is doing . * The goals of the fair scheduling are to insure that every pool is getting * an equal share . The expected share of resources for each pool is * complicated by the pools not requiring an equal share * or pools that have a minimum or maximum allocation of resources . * @ param poolMetadataList List of all pool metadata * @ param metricsRecord Where to write the metrics */ public static void calculateFairness ( final List < PoolMetadata > poolMetadataList , final MetricsRecord metricsRecord ) { } }
if ( poolMetadataList == null || poolMetadataList . isEmpty ( ) ) { return ; } // Find the total available usage and guaranteed resources by resource // type . Add the resource metadata to the sorted set to schedule if // there is something to schedule ( desiredAfterConstraints > 0) long startTime = System . currentTimeMillis ( ) ; Map < String , TotalResourceMetadata > resourceTotalMap = new HashMap < String , TotalResourceMetadata > ( ) ; Map < String , Set < ResourceMetadata > > resourceSchedulablePoolMap = new HashMap < String , Set < ResourceMetadata > > ( ) ; for ( PoolMetadata poolMetadata : poolMetadataList ) { for ( String resourceName : poolMetadata . getResourceMetadataKeys ( ) ) { ResourceMetadata resourceMetadata = poolMetadata . getResourceMetadata ( resourceName ) ; TotalResourceMetadata totalResourceMetadata = resourceTotalMap . get ( resourceName ) ; if ( totalResourceMetadata == null ) { totalResourceMetadata = new TotalResourceMetadata ( ) ; resourceTotalMap . put ( resourceName , totalResourceMetadata ) ; } totalResourceMetadata . totalAvailable += resourceMetadata . getCurrentlyUsed ( ) ; Set < ResourceMetadata > schedulablePoolSet = resourceSchedulablePoolMap . get ( resourceName ) ; if ( schedulablePoolSet == null ) { schedulablePoolSet = new HashSet < ResourceMetadata > ( ) ; resourceSchedulablePoolMap . put ( resourceName , schedulablePoolSet ) ; } if ( resourceMetadata . getDesiredAfterConstraints ( ) > 0 ) { if ( ! schedulablePoolSet . add ( resourceMetadata ) ) { throw new RuntimeException ( "Duplicate resource metadata " + resourceMetadata + " in " + schedulablePoolSet ) ; } } } } // First , allocate resources for all the min guaranteed resources // for the pools . Ordering is done by the largest // min ( min guaranteed , desired ) . GuaranteedDesiredComparator guarantedDesiredComparator = new GuaranteedDesiredComparator ( ) ; List < ResourceMetadata > removePoolList = new ArrayList < ResourceMetadata > ( ) ; for ( Map . Entry < String , TotalResourceMetadata > entry : resourceTotalMap . entrySet ( ) ) { List < ResourceMetadata > resourceMetadataList = new ArrayList < ResourceMetadata > ( resourceSchedulablePoolMap . get ( entry . getKey ( ) ) ) ; TotalResourceMetadata totalResourceMetadata = entry . getValue ( ) ; Collections . sort ( resourceMetadataList , guarantedDesiredComparator ) ; while ( ( totalResourceMetadata . totalAllocated < totalResourceMetadata . totalAvailable ) && ! resourceMetadataList . isEmpty ( ) ) { removePoolList . clear ( ) ; for ( ResourceMetadata resourceMetadata : resourceMetadataList ) { if ( resourceMetadata . getExpectedUsed ( ) == resourceMetadata . getGuaranteedUsedAndDesired ( ) ) { removePoolList . add ( resourceMetadata ) ; continue ; } resourceMetadata . incrExpectedUsed ( ) ; ++ totalResourceMetadata . totalAllocated ; } resourceMetadataList . removeAll ( removePoolList ) ; } LOG . info ( "After allocating min guaranteed and desired - " + "Resource type " + entry . getKey ( ) + " totalAvailable=" + totalResourceMetadata . totalAvailable + ", totalAllocated=" + totalResourceMetadata . totalAllocated ) ; } // At this point , all pools have been allocated their guaranteed used and // desired resources . If there are any more resources to allocate , give // resources to lowest allocated pool that hasn ' t reached desired // until all the resources are gone ExpectedUsedComparator expectedUsedComparator = new ExpectedUsedComparator ( ) ; PriorityQueue < ResourceMetadata > minHeap = new PriorityQueue < ResourceMetadata > ( 100 , expectedUsedComparator ) ; for ( Map . Entry < String , TotalResourceMetadata > entry : resourceTotalMap . entrySet ( ) ) { minHeap . addAll ( resourceSchedulablePoolMap . get ( entry . getKey ( ) ) ) ; TotalResourceMetadata totalResourceMetadata = entry . getValue ( ) ; while ( ( totalResourceMetadata . totalAllocated < totalResourceMetadata . totalAvailable ) && ! minHeap . isEmpty ( ) ) { ResourceMetadata resourceMetadata = minHeap . remove ( ) ; if ( resourceMetadata . getExpectedUsed ( ) == resourceMetadata . getDesiredAfterConstraints ( ) ) { continue ; } resourceMetadata . incrExpectedUsed ( ) ; ++ totalResourceMetadata . totalAllocated ; minHeap . add ( resourceMetadata ) ; } minHeap . clear ( ) ; } // Now calculate the difference of the expected allocation and the // actual allocation to get the following metrics . When calculating // the percent bad allocated divide by 2 because the difference double // counts a bad allocation // 1 ) total tasks difference between expected and actual allocation // 0 is totally fair , higher is less fair // 2 ) % of tasks incorrectly allocated // 0 is totally fair , higher is less fair // 3 ) average difference per pool // 0 is totally fair , higher is less fair // 4 ) standard deviation per pool // 0 is totally fair , higher is less fair for ( PoolMetadata poolMetadata : poolMetadataList ) { for ( String resourceName : poolMetadata . getResourceMetadataKeys ( ) ) { ResourceMetadata resourceMetadata = poolMetadata . getResourceMetadata ( resourceName ) ; int diff = Math . abs ( resourceMetadata . getExpectedUsed ( ) - resourceMetadata . getCurrentlyUsed ( ) ) ; LOG . info ( "Pool " + poolMetadata . getPoolName ( ) + ", resourceName=" + resourceName + ", expectedUsed=" + resourceMetadata . getExpectedUsed ( ) + ", currentUsed=" + resourceMetadata . getCurrentlyUsed ( ) + ", maxAllowed=" + resourceMetadata . getMaxAllowed ( ) + ", desiredAfterConstraints=" + resourceMetadata . getDesiredAfterConstraints ( ) + ", guaranteedUsedAndDesired=" + resourceMetadata . getGuaranteedUsedAndDesired ( ) + ", diff=" + diff ) ; resourceTotalMap . get ( resourceName ) . totalFairnessDifference += diff ; resourceTotalMap . get ( resourceName ) . totalFairnessDifferenceSquared += diff * diff ; } } TotalResourceMetadata allResourceMetadata = new TotalResourceMetadata ( ) ; allResourceMetadata . resourceTypeCount = resourceTotalMap . size ( ) ; for ( TotalResourceMetadata totalResourceMetadata : resourceTotalMap . values ( ) ) { allResourceMetadata . totalAvailable += totalResourceMetadata . totalAvailable ; allResourceMetadata . totalFairnessDifference += totalResourceMetadata . totalFairnessDifference ; allResourceMetadata . totalFairnessDifferenceSquared += totalResourceMetadata . totalFairnessDifferenceSquared ; } resourceTotalMap . put ( "all" , allResourceMetadata ) ; StringBuilder metricsBuilder = new StringBuilder ( ) ; for ( Map . Entry < String , TotalResourceMetadata > entry : resourceTotalMap . entrySet ( ) ) { TotalResourceMetadata totalResourceMetadata = entry . getValue ( ) ; totalResourceMetadata . percentUnfair = ( totalResourceMetadata . totalAvailable == 0 ) ? 0 : totalResourceMetadata . totalFairnessDifference * 100f / 2 / totalResourceMetadata . totalAvailable ; totalResourceMetadata . stdDevUnfair = ( float ) Math . sqrt ( ( double ) totalResourceMetadata . totalFairnessDifferenceSquared / poolMetadataList . size ( ) / totalResourceMetadata . resourceTypeCount ) ; totalResourceMetadata . averageUnfairPerPool = ( float ) totalResourceMetadata . totalFairnessDifference / poolMetadataList . size ( ) / totalResourceMetadata . resourceTypeCount ; metricsRecord . setMetric ( FAIRNESS_DIFFERENCE_COUNT_PREFIX + entry . getKey ( ) , totalResourceMetadata . totalFairnessDifference ) ; metricsBuilder . append ( FAIRNESS_DIFFERENCE_COUNT_PREFIX + entry . getKey ( ) + "=" + totalResourceMetadata . totalFairnessDifference + "\n" ) ; metricsRecord . setMetric ( FAIRNESS_PERCENT_UNFAIR_PREFIX + entry . getKey ( ) , totalResourceMetadata . percentUnfair ) ; metricsBuilder . append ( FAIRNESS_PERCENT_UNFAIR_PREFIX + entry . getKey ( ) + "=" + totalResourceMetadata . percentUnfair + "\n" ) ; metricsRecord . setMetric ( FAIRNESS_DIFFERENCE_PER_POOL_PREFIX + entry . getKey ( ) , totalResourceMetadata . averageUnfairPerPool ) ; metricsBuilder . append ( FAIRNESS_DIFFERENCE_PER_POOL_PREFIX + entry . getKey ( ) + "=" + totalResourceMetadata . averageUnfairPerPool + "\n" ) ; metricsRecord . setMetric ( FAIRNESS_UNFAIR_STD_DEV_PERFIX + entry . getKey ( ) , totalResourceMetadata . stdDevUnfair ) ; metricsBuilder . append ( FAIRNESS_UNFAIR_STD_DEV_PERFIX + entry . getKey ( ) + "=" + totalResourceMetadata . stdDevUnfair + "\n" ) ; metricsBuilder . append ( TOTAL_RESOURCES_PREFIX + entry . getKey ( ) + "=" + totalResourceMetadata . totalAvailable + "\n" ) ; } if ( LOG . isInfoEnabled ( ) ) { LOG . info ( "calculateFairness took " + ( System . currentTimeMillis ( ) - startTime ) + " millisecond(s)." ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "\n" + metricsBuilder . toString ( ) ) ; }
public class PlannerDataModelerHelperUtils { /** * TODO introduce PlannerDatamodelerRenameHelper once DataModelerService . rename uses RenameService to rename DataObject */ private void onDataObjectRename ( @ Observes DataObjectRenamedEvent event ) { } }
Path updatedPath = event . getPath ( ) ; if ( updatedPath != null ) { try { updateDataObject ( updatedPath ) ; } catch ( Exception e ) { logger . error ( "Data object couldn't be updated, path: " + updatedPath + "." ) ; } }
public class Classes { /** * Retrieve resource , identified by qualified name , as input stream . This method does its best to load requested * resource but throws exception if fail . Resource is loaded using { @ link ClassLoader # getResourceAsStream ( String ) } and * < code > name < / code > argument should follow Java class loader convention : it is always considered as absolute path , * that is , should contain package but does not start with leading path separator , e . g . < code > js / fop / config . xml < / code > * Resource is searched into next class loaders , in given order : * < ul > * < li > current thread context class loader , * < li > this utility class loader , * < li > system class loader , as returned by { @ link ClassLoader # getSystemClassLoader ( ) } * < / ul > * @ param name resource qualified name , using path separators instead of dots . * @ return resource input stream . * @ throws IllegalArgumentException if < code > name < / code > argument is null or empty . * @ throws NoSuchBeingException if resource not found . */ public static InputStream getResourceAsStream ( String name ) { } }
Params . notNullOrEmpty ( name , "Resource name" ) ; // not documented behavior : accept but ignore trailing path separator if ( name . charAt ( 0 ) == '/' ) { name = name . substring ( 1 ) ; } InputStream stream = getResourceAsStream ( name , new ClassLoader [ ] { Thread . currentThread ( ) . getContextClassLoader ( ) , Classes . class . getClassLoader ( ) , ClassLoader . getSystemClassLoader ( ) } ) ; if ( stream == null ) { throw new NoSuchBeingException ( "Resource |%s| not found." , name ) ; } return stream ;
public class CommerceCountryPersistenceImpl { /** * Returns the first commerce country in the ordered set where groupId = & # 63 ; and billingAllowed = & # 63 ; and active = & # 63 ; . * @ param groupId the group ID * @ param billingAllowed the billing allowed * @ param active the active * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce country * @ throws NoSuchCountryException if a matching commerce country could not be found */ @ Override public CommerceCountry findByG_B_A_First ( long groupId , boolean billingAllowed , boolean active , OrderByComparator < CommerceCountry > orderByComparator ) throws NoSuchCountryException { } }
CommerceCountry commerceCountry = fetchByG_B_A_First ( groupId , billingAllowed , active , orderByComparator ) ; if ( commerceCountry != null ) { return commerceCountry ; } StringBundler msg = new StringBundler ( 8 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( ", billingAllowed=" ) ; msg . append ( billingAllowed ) ; msg . append ( ", active=" ) ; msg . append ( active ) ; msg . append ( "}" ) ; throw new NoSuchCountryException ( msg . toString ( ) ) ;
public class Serializer { /** * Registers a default type serializer for the given base type . * Default serializers are used to serialize types for which no specific { @ link TypeSerializer } is provided . * When a serializable type is { @ link # register ( Class ) registered } without a { @ link TypeSerializer } , the * first default serializer found for the given type is assigned as the serializer for that type . Default * serializers are evaluated against registered types in reverse insertion order , so default serializers * registered more recently take precedence over default serializers registered earlier . * < pre > * { @ code * serializer . registerDefault ( Serializable . class , SerializableSerializer . class ) ; * serializer . register ( SomeSerializable . class , 1 ) ; * < / pre > * If an object of a type that has not been { @ link # register ( Class ) registered } is * { @ link # writeObject ( Object ) serialized } and { @ link # isWhitelistRequired ( ) whitelisting } is disabled , * the object will be serialized with the class name and a default serializer if one is found . * @ param baseType The base type for which to register the default serializer . Types that extend the base * type and are registered without a specific { @ link TypeSerializer } will be serialized * using the registered default { @ link TypeSerializer } . * @ param factory The default type serializer factory with which to serialize instances of the base type . * @ return The serializer . * @ throws NullPointerException if either argument is { @ code null } */ public Serializer registerDefault ( Class < ? > baseType , TypeSerializerFactory factory ) { } }
registry . registerDefault ( baseType , factory ) ; return this ;
public class iptunnel { /** * Use this API to fetch filtered set of iptunnel resources . * filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */ public static iptunnel [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
iptunnel obj = new iptunnel ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; iptunnel [ ] response = ( iptunnel [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class ZipUtils { /** * zip . * @ param fileNames a { @ link java . util . List } object . * @ param zipPath a { @ link java . lang . String } object . * @ return a { @ link java . io . File } object . */ public static File zip ( List < String > fileNames , String zipPath ) { } }
return zip ( fileNames , zipPath , null ) ;
public class PostgisGeoPlugin { /** * 检查并替换输入的地理信息相关参数 , 使用PostGis提供的ST _ GeomFromText函数 * @ author Pin Liu */ protected void checkAndReplaceInput ( List < IntrospectedColumn > columns , XmlElement xe ) { } }
for ( Element element : xe . getElements ( ) ) { if ( element instanceof XmlElement ) { checkAndReplaceInput ( columns , ( XmlElement ) element ) ; } if ( element instanceof TextElement ) { TextElement te = ( TextElement ) element ; checkAndReplaceInput ( columns , te ) ; } }
public class WsMessageReaderImpl { /** * - - - - - WebSocketMessageReader Implementation - - - - - */ @ Override public ByteBuffer getBinary ( ) throws IOException { } }
if ( _messageType == null ) { return null ; } if ( _messageType == WebSocketMessageType . EOS ) { String s = "End of stream has reached as the connection has been closed" ; throw new WebSocketException ( s ) ; } if ( _messageType != WebSocketMessageType . BINARY ) { String s = "Invalid WebSocketMessageType: Cannot decode the payload " + "as a binary message" ; throw new WebSocketException ( s ) ; } return ByteBuffer . wrap ( ( ( ByteBuffer ) _payload ) . array ( ) ) ;
public class RendererModel { /** * Sets the atom currently highlighted . * @ param highlightedAtom * The atom to be highlighted */ public void setHighlightedAtom ( IAtom highlightedAtom ) { } }
if ( ( this . highlightedAtom != null ) || ( highlightedAtom != null ) ) { this . highlightedAtom = highlightedAtom ; fireChange ( ) ; }
public class CmsHtmlConverter { /** * Converts the given HTML code according to the settings of the converter . < p > * @ param htmlInput HTML input stored in a string * @ return string containing the converted HTML * @ throws UnsupportedEncodingException if the encoding set for the conversion is not supported */ public String convertToString ( String htmlInput ) throws UnsupportedEncodingException { } }
// first : collect all converter classes to use on the input Map < String , List < String > > converters = new HashMap < String , List < String > > ( ) ; for ( Iterator < String > i = getModes ( ) . iterator ( ) ; i . hasNext ( ) ; ) { String mode = i . next ( ) ; String converterClass = OpenCms . getResourceManager ( ) . getHtmlConverter ( mode ) ; List < String > modes = new ArrayList < String > ( ) ; if ( converters . containsKey ( converterClass ) ) { // converter class already defined for a previous mode , get mode list modes = converters . get ( converterClass ) ; } // add mode name to list for the converter modes . add ( mode ) ; // store converter with modes in map converters . put ( converterClass , modes ) ; } // second : convert the content with all found converter classes for ( Iterator < Entry < String , List < String > > > i = converters . entrySet ( ) . iterator ( ) ; i . hasNext ( ) ; ) { Entry < String , List < String > > entry = i . next ( ) ; String className = entry . getKey ( ) ; List < String > modes = entry . getValue ( ) ; try { I_CmsHtmlConverter converter = ( I_CmsHtmlConverter ) Class . forName ( className ) . newInstance ( ) ; // initialize converter converter . init ( getEncoding ( ) , modes ) ; // convert input String htmlInput = converter . convertToString ( htmlInput ) ; } catch ( ClassNotFoundException e ) { LOG . error ( org . opencms . loader . Messages . get ( ) . getBundle ( ) . key ( org . opencms . loader . Messages . LOG_HTML_CONVERTER_CLASS_NOT_FOUND_1 , className ) , e ) ; } catch ( IllegalAccessException e ) { LOG . error ( org . opencms . loader . Messages . get ( ) . getBundle ( ) . key ( org . opencms . loader . Messages . LOG_HTML_CONVERTER_CLASS_NOT_FOUND_1 , className ) , e ) ; } catch ( InstantiationException e ) { LOG . error ( org . opencms . loader . Messages . get ( ) . getBundle ( ) . key ( org . opencms . loader . Messages . LOG_HTML_CONVERTER_CLASS_NOT_FOUND_1 , className ) , e ) ; } } return htmlInput ;
public class MPSubscriptionImpl { /** * Remove a selection criteria from the subscription */ public void removeSelectionCriteria ( SelectionCriteria selCriteria ) throws SIResourceException { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "removeSelectionCriteria" , new Object [ ] { selCriteria } ) ; // If the selection criteria was removed then this is an indication that it was // in the matchspace and it can be removed . boolean wasRemoved = _consumerDispatcher . getConsumerDispatcherState ( ) . removeSelectionCriteria ( selCriteria ) ; if ( wasRemoved ) { Transaction tran = _messageProcessor . getTXManager ( ) . createAutoCommitTransaction ( ) ; if ( ! _consumerDispatcher . getReferenceStream ( ) . isUpdating ( ) ) { try { _consumerDispatcher . getReferenceStream ( ) . requestUpdate ( tran ) ; } catch ( MessageStoreException e ) { // MessageStoreException shouldn ' t occur so FFDC . FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.MPSubscriptionImpl.removeSelectionCriteria" , "1:203:1.6" , this ) ; // Add the criteria back into the list as the remove failed . _consumerDispatcher . getConsumerDispatcherState ( ) . addSelectionCriteria ( selCriteria ) ; SibTr . exception ( tc , e ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "removeSelectionCriteria" , "SIResourceException" ) ; throw new SIResourceException ( e ) ; } } // Remove the criteria from the matchspace _messageProcessor . getMessageProcessorMatching ( ) . removeConsumerDispatcherMatchTarget ( _consumerDispatcher , selCriteria ) ; } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "removeSelectionCriteria" ) ;
public class ClusteredStore { /** * The assumption is that this method will be invoked only by cache . getAll method . */ @ Override public Map < K , ValueHolder < V > > bulkComputeIfAbsent ( final Set < ? extends K > keys , final Function < Iterable < ? extends K > , Iterable < ? extends Map . Entry < ? extends K , ? extends V > > > mappingFunction ) throws StoreAccessException { } }
if ( mappingFunction instanceof Ehcache . GetAllFunction ) { Map < K , ValueHolder < V > > map = new HashMap < > ( ) ; for ( K key : keys ) { ValueHolder < V > value ; try { value = getInternal ( key ) ; } catch ( TimeoutException e ) { // This timeout handling is safe * * only * * in the context of a get / read operation ! value = null ; } map . put ( key , value ) ; } return map ; } else { throw new UnsupportedOperationException ( "This bulkComputeIfAbsent method is not yet capable of handling generic computation functions" ) ; }
public class AppConfig { /** * Returns property value for a key . * @ param key key of property . * @ return value for this key , < code > null < / code > if not found . */ public static String getProperty ( String key ) { } }
if ( ! isInited ( ) ) { init ( ) ; } Property p = props . get ( key ) ; return p == null ? null : p . getValue ( ) ;
public class Name { /** * Sets the node ' s identifier * @ throws IllegalArgumentException if identifier is null */ public void setIdentifier ( String identifier ) { } }
assertNotNull ( identifier ) ; this . identifier = identifier ; setLength ( identifier . length ( ) ) ;
public class AbstractLoader { /** * { @ inheritDoc } */ public void load ( ) { } }
Source source = getSource ( ) ; Target target = getTarget ( ) ; source . beginExport ( ) ; target . beginImport ( ) ; Set < String > propertyNames = mode . equals ( MappingMode . AUTO ) ? target . getPropertyNames ( ) : source . getPropertyNames ( ) ; for ( Object sourceItem : source ) { Object targetItem = target . createTargetInstance ( source , sourceItem ) ; for ( String property : propertyNames ) { try { Extractor extractor = source . getExtractor ( property ) ; Updater updater = target . getUpdater ( property ) ; updater . update ( targetItem , extractor . extract ( sourceItem ) ) ; } catch ( RuntimeException e ) { LOG . error ( "Mapping error for property '" + property + "': " + e . getMessage ( ) , e ) ; throw e ; } } target . importItem ( targetItem ) ; } source . endExport ( ) ; target . endImport ( ) ;
public class AbstractMessage { /** * / * ( non - Javadoc ) * @ see javax . jms . Message # setJMSMessageID ( java . lang . String ) */ @ Override public final void setJMSMessageID ( String id ) throws JMSException { } }
assertDeserializationLevel ( MessageSerializationLevel . FULL ) ; this . id = id ;
public class WebDavServletController { /** * Tries to mount the resource served by this servlet as a WebDAV drive on the local machine . * @ param mountParams Optional mount parameters , that may be required for certain operating systems . * @ return A { @ link Mount } instance allowing unmounting and revealing the drive . * @ throws CommandFailedException If mounting failed . */ public Mount mount ( MountParams mountParams ) throws CommandFailedException { } }
if ( ! contextHandler . isStarted ( ) ) { throw new IllegalStateException ( "Mounting only possible for running servlets." ) ; } URI uri = getServletRootUri ( mountParams . getOrDefault ( MountParam . WEBDAV_HOSTNAME , connector . getHost ( ) ) ) ; LOG . info ( "Mounting {} using {}" , uri , mounter . getClass ( ) . getName ( ) ) ; return mounter . mount ( uri , mountParams ) ;
public class ClassGraph { /** * Print the specified relation * @ param from the source class * @ param to the destination class */ private void relation ( Options opt , RelationType rt , ClassDoc from , ClassDoc to , String tailLabel , String label , String headLabel ) { } }
relation ( opt , rt , from , from . toString ( ) , to , to . toString ( ) , tailLabel , label , headLabel ) ;
public class ImLoggedInBase { /** * Browse to the login page and do the login . * @ param noPage this param won ' t be considered : may be null */ @ Override public WebPage run ( WebPage noPage ) { } }
LoginPage < ? > loginPage = ( LoginPage < ? > ) super . run ( noPage ) ; return loginPage . loginAs ( user . getUsername ( ) , user . getPassword ( ) ) ;
public class ReportService { /** * Returns the output directory for reporting . */ public Path getReportDirectory ( ) { } }
WindupConfigurationModel cfg = WindupConfigurationService . getConfigurationModel ( getGraphContext ( ) ) ; Path path = cfg . getOutputPath ( ) . asFile ( ) . toPath ( ) . resolve ( REPORTS_DIR ) ; createDirectoryIfNeeded ( path ) ; return path . toAbsolutePath ( ) ;
public class BeanUtil { /** * 把Bean里面的String属性做trim操作 。 * 通常bean直接用来绑定页面的input , 用户的输入可能首尾存在空格 , 通常保存数据库前需要把首尾空格去掉 * @ param < T > Bean类型 * @ param bean Bean对象 * @ param ignoreFields 不需要trim的Field名称列表 ( 不区分大小写 ) */ public static < T > T trimStrFields ( T bean , String ... ignoreFields ) { } }
if ( bean == null ) { return bean ; } final Field [ ] fields = ReflectUtil . getFields ( bean . getClass ( ) ) ; for ( Field field : fields ) { if ( ignoreFields != null && ArrayUtil . containsIgnoreCase ( ignoreFields , field . getName ( ) ) ) { // 不处理忽略的Fields continue ; } if ( String . class . equals ( field . getType ( ) ) ) { // 只有String的Field才处理 final String val = ( String ) ReflectUtil . getFieldValue ( bean , field ) ; if ( null != val ) { final String trimVal = StrUtil . trim ( val ) ; if ( false == val . equals ( trimVal ) ) { // Field Value不为null , 且首尾有空格才处理 ReflectUtil . setFieldValue ( bean , field , trimVal ) ; } } } } return bean ;
public class JoinDomainRequest { /** * List of IPv4 addresses , NetBIOS names , or host names of your domain server . If you need to specify the port * number include it after the colon ( “ : ” ) . For example , < code > mydc . mydomain . com : 389 < / code > . * @ return List of IPv4 addresses , NetBIOS names , or host names of your domain server . If you need to specify the * port number include it after the colon ( “ : ” ) . For example , < code > mydc . mydomain . com : 389 < / code > . */ public java . util . List < String > getDomainControllers ( ) { } }
if ( domainControllers == null ) { domainControllers = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return domainControllers ;
public class AdaptiveCompiler { /** * httl . properties : loggers = httl . spi . loggers . Log4jLogger */ public void setLogger ( Logger logger ) { } }
this . logger = logger ; if ( compiler instanceof AbstractCompiler ) { ( ( AbstractCompiler ) compiler ) . setLogger ( logger ) ; }
public class TemplateMsgAPI { /** * 删除模板 * @ param templateId 模板ID * @ return 删除结果 */ public BaseResponse delTemplate ( String templateId ) { } }
LOG . debug ( "删除模板......" ) ; BeanUtil . requireNonNull ( templateId , "templateId is null" ) ; String url = BASE_API_URL + "cgi-bin/template/del_private_template?access_token=#" ; Map < String , String > map = new HashMap < String , String > ( ) ; map . put ( "template_id" , templateId ) ; BaseResponse r = executePost ( url , JSONUtil . toJson ( map ) ) ; String resultJson = isSuccess ( r . getErrcode ( ) ) ? r . getErrmsg ( ) : r . toJsonString ( ) ; return JSONUtil . toBean ( resultJson , BaseResponse . class ) ;