signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class AtomContainerManipulator { /** * Is the { @ code atom } a suppressible hydrogen and can be represented as * implicit . A hydrogen is suppressible if it is not an ion , not the major * isotope ( i . e . it is a deuterium or tritium atom ) and is not molecular * hydrogen . * @ param container the structure * @ param atom an atom in the structure * @ return the atom is a hydrogen and it can be suppressed ( implicit ) */ private static boolean suppressibleHydrogen ( final IAtomContainer container , final IAtom atom ) { } }
// is the atom a hydrogen if ( ! "H" . equals ( atom . getSymbol ( ) ) ) return false ; // is the hydrogen an ion ? if ( atom . getFormalCharge ( ) != null && atom . getFormalCharge ( ) != 0 ) return false ; // is the hydrogen deuterium / tritium ? if ( atom . getMassNumber ( ) != null ) return false ; // molecule hydrogen with implicit H ? if ( atom . getImplicitHydrogenCount ( ) != null && atom . getImplicitHydrogenCount ( ) != 0 ) return false ; // molecule hydrogen List < IAtom > neighbors = container . getConnectedAtomsList ( atom ) ; if ( neighbors . size ( ) == 1 && ( neighbors . get ( 0 ) . getSymbol ( ) . equals ( "H" ) || neighbors . get ( 0 ) instanceof IPseudoAtom ) ) return false ; // what about bridging hydrogens ? // hydrogens with atom - atom mapping ? return true ;
public class SimpleHTTPRequestParser { /** * This function update the fax job from the request data . < br > * This fax job will not have any file data . * @ param request * The HTTP request * @ param queryStringMap * The query string as key / value map * @ param faxJob * The fax job to update */ protected void updateFaxJobFromRequestImpl ( HTTPRequest request , FaxJob faxJob , Map < String , String > queryStringMap ) { } }
// set priority String value = queryStringMap . get ( "priority" ) ; if ( value != null ) { FaxJobPriority priority = FaxJobPriority . valueOf ( value ) ; faxJob . setPriority ( priority ) ; } // set target address value = queryStringMap . get ( "target_address" ) ; if ( value == null ) { throw new FaxException ( "Target address not provided in query string." ) ; } faxJob . setTargetAddress ( value ) ; // set target name value = queryStringMap . get ( "target_name" ) ; if ( value != null ) { faxJob . setTargetName ( value ) ; } // set sender name value = queryStringMap . get ( "sender_name" ) ; if ( value != null ) { faxJob . setSenderName ( value ) ; } // set sender fax number value = queryStringMap . get ( "sender_fax_number" ) ; if ( value != null ) { faxJob . setSenderFaxNumber ( value ) ; } // set sender email value = queryStringMap . get ( "sender_email" ) ; if ( value != null ) { faxJob . setSenderEmail ( value ) ; } Iterator < Entry < String , String > > iterator = queryStringMap . entrySet ( ) . iterator ( ) ; Entry < String , String > entry = null ; String key = null ; String propertyPrefix = "property:" ; int propertyPrefixLength = propertyPrefix . length ( ) ; while ( iterator . hasNext ( ) ) { // get next entry entry = iterator . next ( ) ; // get next key key = entry . getKey ( ) ; if ( key . startsWith ( propertyPrefix ) ) { if ( key . length ( ) > propertyPrefixLength ) { // get key key = key . substring ( propertyPrefixLength ) ; // get value value = entry . getValue ( ) ; // set property faxJob . setProperty ( key , value ) ; } } }
public class FeatureCollectorTask { /** * The collection action . This is the task which will run on a schedule to * gather data from the feature content source system and update the * repository with retrieved data . */ @ Override public void collect ( FeatureCollector collector ) { } }
logBanner ( featureSettings . getJiraBaseUrl ( ) ) ; String proxyUrl = featureSettings . getJiraProxyUrl ( ) ; String proxyPort = featureSettings . getJiraProxyPort ( ) ; if ( ! StringUtils . isEmpty ( proxyUrl ) && ! StringUtils . isEmpty ( proxyPort ) ) { System . setProperty ( "http.proxyHost" , proxyUrl ) ; System . setProperty ( "https.proxyHost" , proxyUrl ) ; System . setProperty ( "http.proxyPort" , proxyPort ) ; System . setProperty ( "https.proxyPort" , proxyPort ) ; } try { long startTime = System . currentTimeMillis ( ) ; long diff = TimeUnit . MILLISECONDS . toHours ( startTime - collector . getLastRefreshTime ( ) ) ; LOGGER . info ( "JIRA Collector is set to work in " + collector . getMode ( ) + " mode" ) ; if ( diff > featureSettings . getRefreshTeamAndProjectHours ( ) ) { LOGGER . info ( "Hours since last run = " + diff + ". Collector is about to refresh Team/Board information" ) ; List < Team > teams = updateTeamInformation ( collector ) ; Set < Scope > scopes = updateProjectInformation ( collector ) ; if ( collector . getLastExecuted ( ) > 0 ) { if ( featureSettings . isCollectorItemOnlyUpdate ( ) ) { refreshValidIssues ( collector , getBoardList ( collector . getId ( ) ) , getScopeList ( collector . getId ( ) ) ) ; } else { refreshValidIssues ( collector , teams , scopes ) ; } } collector . setLastRefreshTime ( System . currentTimeMillis ( ) ) ; featureCollectorRepository . save ( collector ) ; LOGGER . info ( "Collected " + teams . size ( ) + " teams and " + scopes . size ( ) + " projects" ) ; } else { LOGGER . info ( "Hours since last run = " + diff + ". Collector is only collecting updated/new issues." ) ; } updateStoryInformation ( collector ) ; log ( "Finished" , startTime ) ; } catch ( Exception e ) { // catch exception here so we don ' t blow up the collector completely LOGGER . error ( "Failed to collect jira information" , e ) ; }
public class MapExtensions { /** * Remove the given pair into the map . * If the given key is inside the map , but is not mapped to the given value , the * map will not be changed . * @ param < K > type of the map keys . * @ param < V > type of the map values . * @ param map the map to update . * @ param entry the entry ( key , value ) to remove from the map . * @ return { @ code true } if the pair was removed . * @ since 2.15 */ @ Inline ( value = "$1.remove($2.getKey(), $2.getValue())" , statementExpression = true ) public static < K , V > boolean operator_remove ( Map < K , V > map , Pair < ? extends K , ? extends V > entry ) { } }
// TODO use the JRE 1.8 API : map . remove ( entry . getKey ( ) , entry . getValue ( ) ) ; final K key = entry . getKey ( ) ; final V storedValue = map . get ( entry . getKey ( ) ) ; if ( ! Objects . equal ( storedValue , entry . getValue ( ) ) || ( storedValue == null && ! map . containsKey ( key ) ) ) { return false ; } map . remove ( key ) ; return true ;
public class ScriptRuntime { /** * A cheaper and less general version of the above for well - known argument * types . * @ deprecated Use { @ link # setObjectIndex ( Object , double , Object , Context , Scriptable ) } instead */ @ Deprecated public static Object setObjectIndex ( Object obj , double dblIndex , Object value , Context cx ) { } }
return setObjectIndex ( obj , dblIndex , value , cx , getTopCallScope ( cx ) ) ;
public class SnapshotManifestReader { /** * / * ( non - Javadoc ) * @ see org . springframework . batch . item . ItemReader # read ( ) */ @ Override public synchronized ManifestEntry read ( ) throws Exception , UnexpectedInputException , ParseException , NonTransientResourceException { } }
if ( this . reader == null ) { this . reader = new BufferedReader ( new FileReader ( manifestFile ) ) ; long linesRead = getItemsRead ( ) ; if ( linesRead > 0 ) { for ( long i = 0 ; i < linesRead ; i ++ ) { this . reader . readLine ( ) ; } } } String line = this . reader . readLine ( ) ; if ( line != null ) { return ManifestFileHelper . parseManifestEntry ( line ) ; } else { return null ; }
public class Hit { /** * The highlights returned from a document that matches the search request . * @ param highlights * The highlights returned from a document that matches the search request . * @ return Returns a reference to this object so that method calls can be chained together . */ public Hit withHighlights ( java . util . Map < String , String > highlights ) { } }
setHighlights ( highlights ) ; return this ;
public class QueryFactory { /** * todo : currently ' / ' characters are blindly being replaced but this will not allow regex queries to be used */ protected static Object escape ( Object val ) { } }
if ( val instanceof String ) { return ( ( String ) val ) . replaceAll ( "/" , PATH_SEP_TOKEN ) ; } else { return val ; }
public class DateTimeField { /** * Set the Value of this field as a double . * @ param value The value of this field . * @ param iDisplayOption If true , display the new field . * @ param iMoveMove The move mode . * @ return An error code ( NORMAL _ RETURN for success ) . */ public int setValue ( double value , boolean bDisplayOption , int iMoveMode ) { } }
// Set this field ' s value java . util . Date dateTemp = new java . util . Date ( ( long ) value ) ; int iErrorCode = this . setData ( dateTemp , bDisplayOption , iMoveMode ) ; return iErrorCode ;
public class Long { /** * Returns a { @ code Long } object holding the value * extracted from the specified { @ code String } when parsed * with the radix given by the second argument . The first * argument is interpreted as representing a signed * { @ code long } in the radix specified by the second * argument , exactly as if the arguments were given to the { @ link * # parseLong ( java . lang . String , int ) } method . The result is a * { @ code Long } object that represents the { @ code long } * value specified by the string . * < p > In other words , this method returns a { @ code Long } object equal * to the value of : * < blockquote > * { @ code new Long ( Long . parseLong ( s , radix ) ) } * < / blockquote > * @ param s the string to be parsed * @ param radix the radix to be used in interpreting { @ code s } * @ return a { @ code Long } object holding the value * represented by the string argument in the specified * radix . * @ throws NumberFormatException If the { @ code String } does not * contain a parsable { @ code long } . */ public static Long valueOf ( String s , int radix ) throws NumberFormatException { } }
return Long . valueOf ( parseLong ( s , radix ) ) ;
public class StringUtils { /** * Converts a given String to upper case with Locale . ENGLISH * @ param str the string to be converted to upper case * @ return the upper case of string , or itself if string is null / empty */ public static String upperCase ( String str ) { } }
if ( isNullOrEmpty ( str ) ) { return str ; } return str . toUpperCase ( LOCALE_ENGLISH ) ;
public class CmsRelationSystemValidator { /** * Checks a link to a resource which has been deleted . < p > * @ param relation * @ param link the URI of the resource which has a link to the deleted resource * @ param fileLookup a lookup table of files to be published * @ param relationTargets * @ return true if the resource which has a link to the deleted resource is also going to be deleted */ protected boolean checkLinkForDeletedLinkTarget ( CmsRelation relation , String link , Map < String , CmsResource > fileLookup , HashMultimap < String , String > relationTargets ) { } }
boolean isValidLink = false ; // since we are going to delete the resource // check if the linked resource is also to be deleted if ( fileLookup . containsKey ( link ) ) { CmsResource offlineResource = fileLookup . get ( link ) ; Set < String > relationTargetsForLink = relationTargets . get ( link ) ; boolean hasNoRelations = ! relationTargetsForLink . contains ( relation . getTargetPath ( ) ) && ! relationTargetsForLink . contains ( relation . getTargetId ( ) . toString ( ) ) ; isValidLink = offlineResource . getState ( ) . isDeleted ( ) || hasNoRelations ; } return isValidLink ;
public class AbstractYarnClusterDescriptor { @ Override public ClusterClient < ApplicationId > retrieve ( ApplicationId applicationId ) throws ClusterRetrieveException { } }
try { // check if required Hadoop environment variables are set . If not , warn user if ( System . getenv ( "HADOOP_CONF_DIR" ) == null && System . getenv ( "YARN_CONF_DIR" ) == null ) { LOG . warn ( "Neither the HADOOP_CONF_DIR nor the YARN_CONF_DIR environment variable is set." + "The Flink YARN Client needs one of these to be set to properly load the Hadoop " + "configuration for accessing YARN." ) ; } final ApplicationReport appReport = yarnClient . getApplicationReport ( applicationId ) ; if ( appReport . getFinalApplicationStatus ( ) != FinalApplicationStatus . UNDEFINED ) { // Flink cluster is not running anymore LOG . error ( "The application {} doesn't run anymore. It has previously completed with final status: {}" , applicationId , appReport . getFinalApplicationStatus ( ) ) ; throw new RuntimeException ( "The Yarn application " + applicationId + " doesn't run anymore." ) ; } final String host = appReport . getHost ( ) ; final int rpcPort = appReport . getRpcPort ( ) ; LOG . info ( "Found application JobManager host name '{}' and port '{}' from supplied application id '{}'" , host , rpcPort , applicationId ) ; flinkConfiguration . setString ( JobManagerOptions . ADDRESS , host ) ; flinkConfiguration . setInteger ( JobManagerOptions . PORT , rpcPort ) ; flinkConfiguration . setString ( RestOptions . ADDRESS , host ) ; flinkConfiguration . setInteger ( RestOptions . PORT , rpcPort ) ; return createYarnClusterClient ( this , - 1 , // we don ' t know the number of task managers of a started Flink cluster - 1 , // we don ' t know how many slots each task manager has for a started Flink cluster appReport , flinkConfiguration , false ) ; } catch ( Exception e ) { throw new ClusterRetrieveException ( "Couldn't retrieve Yarn cluster" , e ) ; }
public class InternalPureXbaseParser { /** * InternalPureXbase . g : 647:1 : ruleXAssignment returns [ EObject current = null ] : ( ( ( ) ( ( ruleFeatureCallID ) ) ruleOpSingleAssign ( ( lv _ value _ 3_0 = ruleXAssignment ) ) ) | ( this _ XConditionalExpression _ 4 = ruleXConditionalExpression ( ( ( ( ( ) ( ( ruleOpMultiAssign ) ) ) ) = > ( ( ) ( ( ruleOpMultiAssign ) ) ) ) ( ( lv _ rightOperand _ 7_0 = ruleXAssignment ) ) ) ? ) ) ; */ public final EObject ruleXAssignment ( ) throws RecognitionException { } }
EObject current = null ; EObject lv_value_3_0 = null ; EObject this_XConditionalExpression_4 = null ; EObject lv_rightOperand_7_0 = null ; enterRule ( ) ; try { // InternalPureXbase . g : 653:2 : ( ( ( ( ) ( ( ruleFeatureCallID ) ) ruleOpSingleAssign ( ( lv _ value _ 3_0 = ruleXAssignment ) ) ) | ( this _ XConditionalExpression _ 4 = ruleXConditionalExpression ( ( ( ( ( ) ( ( ruleOpMultiAssign ) ) ) ) = > ( ( ) ( ( ruleOpMultiAssign ) ) ) ) ( ( lv _ rightOperand _ 7_0 = ruleXAssignment ) ) ) ? ) ) ) // InternalPureXbase . g : 654:2 : ( ( ( ) ( ( ruleFeatureCallID ) ) ruleOpSingleAssign ( ( lv _ value _ 3_0 = ruleXAssignment ) ) ) | ( this _ XConditionalExpression _ 4 = ruleXConditionalExpression ( ( ( ( ( ) ( ( ruleOpMultiAssign ) ) ) ) = > ( ( ) ( ( ruleOpMultiAssign ) ) ) ) ( ( lv _ rightOperand _ 7_0 = ruleXAssignment ) ) ) ? ) ) { // InternalPureXbase . g : 654:2 : ( ( ( ) ( ( ruleFeatureCallID ) ) ruleOpSingleAssign ( ( lv _ value _ 3_0 = ruleXAssignment ) ) ) | ( this _ XConditionalExpression _ 4 = ruleXConditionalExpression ( ( ( ( ( ) ( ( ruleOpMultiAssign ) ) ) ) = > ( ( ) ( ( ruleOpMultiAssign ) ) ) ) ( ( lv _ rightOperand _ 7_0 = ruleXAssignment ) ) ) ? ) ) int alt17 = 2 ; int LA17_0 = input . LA ( 1 ) ; if ( ( LA17_0 == RULE_ID ) ) { int LA17_1 = input . LA ( 2 ) ; if ( ( LA17_1 == 20 ) ) { alt17 = 1 ; } else if ( ( LA17_1 == EOF || ( LA17_1 >= RULE_STRING && LA17_1 <= RULE_ID ) || ( LA17_1 >= 13 && LA17_1 <= 19 ) || ( LA17_1 >= 21 && LA17_1 <= 62 ) || ( LA17_1 >= 64 && LA17_1 <= 81 ) ) ) { alt17 = 2 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return current ; } NoViableAltException nvae = new NoViableAltException ( "" , 17 , 1 , input ) ; throw nvae ; } } else if ( ( ( LA17_0 >= RULE_STRING && LA17_0 <= RULE_DECIMAL ) || ( LA17_0 >= 14 && LA17_0 <= 15 ) || LA17_0 == 28 || ( LA17_0 >= 44 && LA17_0 <= 45 ) || LA17_0 == 50 || ( LA17_0 >= 58 && LA17_0 <= 59 ) || LA17_0 == 61 || LA17_0 == 64 || LA17_0 == 66 || ( LA17_0 >= 69 && LA17_0 <= 80 ) ) ) { alt17 = 2 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return current ; } NoViableAltException nvae = new NoViableAltException ( "" , 17 , 0 , input ) ; throw nvae ; } switch ( alt17 ) { case 1 : // InternalPureXbase . g : 655:3 : ( ( ) ( ( ruleFeatureCallID ) ) ruleOpSingleAssign ( ( lv _ value _ 3_0 = ruleXAssignment ) ) ) { // InternalPureXbase . g : 655:3 : ( ( ) ( ( ruleFeatureCallID ) ) ruleOpSingleAssign ( ( lv _ value _ 3_0 = ruleXAssignment ) ) ) // InternalPureXbase . g : 656:4 : ( ) ( ( ruleFeatureCallID ) ) ruleOpSingleAssign ( ( lv _ value _ 3_0 = ruleXAssignment ) ) { // InternalPureXbase . g : 656:4 : ( ) // InternalPureXbase . g : 657:5: { if ( state . backtracking == 0 ) { current = forceCreateModelElement ( grammarAccess . getXAssignmentAccess ( ) . getXAssignmentAction_0_0 ( ) , current ) ; } } // InternalPureXbase . g : 663:4 : ( ( ruleFeatureCallID ) ) // InternalPureXbase . g : 664:5 : ( ruleFeatureCallID ) { // InternalPureXbase . g : 664:5 : ( ruleFeatureCallID ) // InternalPureXbase . g : 665:6 : ruleFeatureCallID { if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXAssignmentRule ( ) ) ; } } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXAssignmentAccess ( ) . getFeatureJvmIdentifiableElementCrossReference_0_1_0 ( ) ) ; } pushFollow ( FOLLOW_13 ) ; ruleFeatureCallID ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } } } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXAssignmentAccess ( ) . getOpSingleAssignParserRuleCall_0_2 ( ) ) ; } pushFollow ( FOLLOW_3 ) ; ruleOpSingleAssign ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } // InternalPureXbase . g : 686:4 : ( ( lv _ value _ 3_0 = ruleXAssignment ) ) // InternalPureXbase . g : 687:5 : ( lv _ value _ 3_0 = ruleXAssignment ) { // InternalPureXbase . g : 687:5 : ( lv _ value _ 3_0 = ruleXAssignment ) // InternalPureXbase . g : 688:6 : lv _ value _ 3_0 = ruleXAssignment { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXAssignmentAccess ( ) . getValueXAssignmentParserRuleCall_0_3_0 ( ) ) ; } pushFollow ( FOLLOW_2 ) ; lv_value_3_0 = ruleXAssignment ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXAssignmentRule ( ) ) ; } set ( current , "value" , lv_value_3_0 , "org.eclipse.xtext.purexbase.PureXbase.XAssignment" ) ; afterParserOrEnumRuleCall ( ) ; } } } } } break ; case 2 : // InternalPureXbase . g : 707:3 : ( this _ XConditionalExpression _ 4 = ruleXConditionalExpression ( ( ( ( ( ) ( ( ruleOpMultiAssign ) ) ) ) = > ( ( ) ( ( ruleOpMultiAssign ) ) ) ) ( ( lv _ rightOperand _ 7_0 = ruleXAssignment ) ) ) ? ) { // InternalPureXbase . g : 707:3 : ( this _ XConditionalExpression _ 4 = ruleXConditionalExpression ( ( ( ( ( ) ( ( ruleOpMultiAssign ) ) ) ) = > ( ( ) ( ( ruleOpMultiAssign ) ) ) ) ( ( lv _ rightOperand _ 7_0 = ruleXAssignment ) ) ) ? ) // InternalPureXbase . g : 708:4 : this _ XConditionalExpression _ 4 = ruleXConditionalExpression ( ( ( ( ( ) ( ( ruleOpMultiAssign ) ) ) ) = > ( ( ) ( ( ruleOpMultiAssign ) ) ) ) ( ( lv _ rightOperand _ 7_0 = ruleXAssignment ) ) ) ? { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXAssignmentAccess ( ) . getXConditionalExpressionParserRuleCall_1_0 ( ) ) ; } pushFollow ( FOLLOW_14 ) ; this_XConditionalExpression_4 = ruleXConditionalExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = this_XConditionalExpression_4 ; afterParserOrEnumRuleCall ( ) ; } // InternalPureXbase . g : 716:4 : ( ( ( ( ( ) ( ( ruleOpMultiAssign ) ) ) ) = > ( ( ) ( ( ruleOpMultiAssign ) ) ) ) ( ( lv _ rightOperand _ 7_0 = ruleXAssignment ) ) ) ? int alt16 = 2 ; alt16 = dfa16 . predict ( input ) ; switch ( alt16 ) { case 1 : // InternalPureXbase . g : 717:5 : ( ( ( ( ) ( ( ruleOpMultiAssign ) ) ) ) = > ( ( ) ( ( ruleOpMultiAssign ) ) ) ) ( ( lv _ rightOperand _ 7_0 = ruleXAssignment ) ) { // InternalPureXbase . g : 717:5 : ( ( ( ( ) ( ( ruleOpMultiAssign ) ) ) ) = > ( ( ) ( ( ruleOpMultiAssign ) ) ) ) // InternalPureXbase . g : 718:6 : ( ( ( ) ( ( ruleOpMultiAssign ) ) ) ) = > ( ( ) ( ( ruleOpMultiAssign ) ) ) { // InternalPureXbase . g : 728:6 : ( ( ) ( ( ruleOpMultiAssign ) ) ) // InternalPureXbase . g : 729:7 : ( ) ( ( ruleOpMultiAssign ) ) { // InternalPureXbase . g : 729:7 : ( ) // InternalPureXbase . g : 730:8: { if ( state . backtracking == 0 ) { current = forceCreateModelElementAndSet ( grammarAccess . getXAssignmentAccess ( ) . getXBinaryOperationLeftOperandAction_1_1_0_0_0 ( ) , current ) ; } } // InternalPureXbase . g : 736:7 : ( ( ruleOpMultiAssign ) ) // InternalPureXbase . g : 737:8 : ( ruleOpMultiAssign ) { // InternalPureXbase . g : 737:8 : ( ruleOpMultiAssign ) // InternalPureXbase . g : 738:9 : ruleOpMultiAssign { if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXAssignmentRule ( ) ) ; } } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXAssignmentAccess ( ) . getFeatureJvmIdentifiableElementCrossReference_1_1_0_0_1_0 ( ) ) ; } pushFollow ( FOLLOW_3 ) ; ruleOpMultiAssign ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } } } } } // InternalPureXbase . g : 754:5 : ( ( lv _ rightOperand _ 7_0 = ruleXAssignment ) ) // InternalPureXbase . g : 755:6 : ( lv _ rightOperand _ 7_0 = ruleXAssignment ) { // InternalPureXbase . g : 755:6 : ( lv _ rightOperand _ 7_0 = ruleXAssignment ) // InternalPureXbase . g : 756:7 : lv _ rightOperand _ 7_0 = ruleXAssignment { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXAssignmentAccess ( ) . getRightOperandXAssignmentParserRuleCall_1_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_2 ) ; lv_rightOperand_7_0 = ruleXAssignment ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXAssignmentRule ( ) ) ; } set ( current , "rightOperand" , lv_rightOperand_7_0 , "org.eclipse.xtext.purexbase.PureXbase.XAssignment" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; } } } break ; } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class CasConfigurationMetadataRepository { /** * Gets property group id . * @ param prop the prop * @ return the property group id */ public static String getPropertyGroupId ( final ConfigurationMetadataProperty prop ) { } }
if ( isCasProperty ( prop ) ) { return StringUtils . substringBeforeLast ( prop . getName ( ) , "." ) ; } return StringUtils . substringBeforeLast ( prop . getId ( ) , "." ) ;
public class Counters { /** * Returns a comparator backed by this counter : two objects are compared by * their associated values stored in the counter . This comparator returns keys * by descending numeric value . Note that this ordering is not fixed , but * depends on the mutable values stored in the Counter . Doing this comparison * does not depend on the type of the key , since it uses the numeric value , * which is always Comparable . * @ param counter * The Counter whose values are used for ordering the keys * @ return A Comparator using this ordering */ public static < E > Comparator < E > toComparatorDescending ( final Counter < E > counter ) { } }
return new Comparator < E > ( ) { public int compare ( E o1 , E o2 ) { return Double . compare ( counter . getCount ( o2 ) , counter . getCount ( o1 ) ) ; } } ;
public class TimePickerSettings { /** * zApplyAllowEmptyTimes , This applies the named setting to the parent component . * Notes : * The zApplyInitialTime ( ) and zApplyAllowEmptyTimes ( ) functions may theoretically be called in * any order . However , the order is currently zApplyInitialTime ( ) and zApplyAllowEmptyTimes ( ) * because that is more intuitive . * This cannot throw an exception while the time picker is being constructed , because a veto * policy cannot be set until after the time picker is constructed . */ private void zApplyAllowEmptyTimes ( ) { } }
// Find out if we need to initialize a null time . if ( ( ! allowEmptyTimes ) && ( parent . getTime ( ) == null ) ) { // We need to initialize the current time , so find out if the default time is vetoed . LocalTime defaultTime = LocalTime . of ( 7 , 0 ) ; if ( InternalUtilities . isTimeVetoed ( vetoPolicy , defaultTime ) ) { throw new RuntimeException ( "Exception in TimePickerSettings.zApplyAllowEmptyTimes(), " + "Could not initialize a null time to 7am, because 7am is vetoed by " + "the veto policy. To prevent this exception, always call " + "setAllowEmptyTimes() -before- setting a veto policy." ) ; } // Initialize the current time . parent . setTime ( defaultTime ) ; }
public class TracingKafkaUtils { /** * Inject Span Context to record headers * @ param spanContext Span Context * @ param headers record headers */ static void inject ( SpanContext spanContext , Headers headers , Tracer tracer ) { } }
tracer . inject ( spanContext , Format . Builtin . TEXT_MAP , new HeadersMapInjectAdapter ( headers , false ) ) ;
public class TableFactor { /** * Returns the highest value assignment to this TableFactor * @ return an array of variable assignments , corresponding to the variables listed in neighborIndices , that has the * highest value */ public int [ ] getBestAssignment ( ) { } }
double maxValue = Double . NEGATIVE_INFINITY ; for ( int i = 0 ; i < values . length ; i ++ ) if ( values [ i ] > maxValue ) { maxValue = values [ i ] ; } // OPTIMIZATION : // Rather than use the standard iterator , which creates lots of int [ ] arrays on the heap , which need to be GC ' d , // we use the fast version that just mutates one array . Since this is read once for us here , this is ideal . Iterator < int [ ] > fastPassByReferenceIterator = fastPassByReferenceIterator ( ) ; do { int [ ] assignment = fastPassByReferenceIterator . next ( ) ; double v = getAssignmentLogValue ( assignment ) ; if ( v == maxValue ) return assignment ; } while ( fastPassByReferenceIterator . hasNext ( ) ) ; throw new IllegalStateException ( "This is unreachable." ) ;
public class ResourceIndexModule { /** * { @ inheritDoc } */ public void modifyObject ( DOReader oldReader , DOReader newReader ) throws ResourceIndexException { } }
_ri . modifyObject ( oldReader , newReader ) ;
public class ClientAsyncResult { /** * This get method returns the result of the asynchronous method call if it is * available . Otherwise , it blocks until the result is available . It is * unblocked when the Work object that runs the asynchronous method on a work * manager finishes ( ie . with either good results or an exception ) , and * sets results on this instance . < p > * @ return - the result object * @ throws CancellationException - if the asynchronous method was canceled successfully * @ throws ExecutionException - if the asynchronous method ended with an exception * @ throws InterruptedException - if the thread is interrupted while waiting */ public Object get ( ) throws ExecutionException , InterruptedException { } }
final boolean isTraceOn = svLogger . isLoggable ( Level . FINER ) ; if ( isTraceOn ) svLogger . entering ( CLASS_NAME , "get" , toString ( ) ) ; if ( ivCancellationException != null ) { if ( isTraceOn ) svLogger . exiting ( CLASS_NAME , "get" , ivCancellationException ) ; throw ivCancellationException ; // d614994 } // If exception was caught on previous get ( ) call , return the exception again . if ( ivExecutionException != null ) { if ( isTraceOn ) svLogger . exiting ( CLASS_NAME , "get" , ivExecutionException ) ; throw ivExecutionException ; // d614994 } try { if ( ( ivResult == null ) && ( ivServer != null ) ) { if ( ivUseServerExtended ) { // F16043 try { ivResult = waitForResult ( 0 ) ; } catch ( TimeoutException ex ) { // Should not happen for infinite timeout . IllegalStateException ise = new IllegalStateException ( ex ) ; if ( isTraceOn ) svLogger . exiting ( CLASS_NAME , "get" , ise ) ; throw ise ; } } else { if ( isTraceOn ) svLogger . logp ( Level . FINER , CLASS_NAME , "get" , "calling stub.get()" ) ; ivResult = ivServer . get ( ) ; } ivServer = null ; } } catch ( ExecutionException ee ) { ivExecutionException = ee ; // d614994 ivServer = null ; // d614994 if ( isTraceOn ) svLogger . exiting ( CLASS_NAME , "get" , ee ) ; throw ee ; // d614994 } catch ( RemoteException e ) { if ( isTraceOn ) svLogger . logp ( Level . FINER , CLASS_NAME , "get" , "caught RemoteException" , e ) ; ivServer = null ; // d614994 if ( ivBusinessRmiRemote ) { ivExecutionException = new ExecutionException ( e ) ; } else { // Should be ok to use getCause ( ) on the RemoteException here , the exception mapping // on the server should insure we have an exception not an error in the RemoteException Throwable cause = e . getCause ( ) ; EJBException ejbEx = initCause ( new EJBException ( cause instanceof Exception ? ( Exception ) cause : e ) ) ; ivExecutionException = new ExecutionException ( ejbEx ) ; } if ( isTraceOn ) svLogger . exiting ( CLASS_NAME , "get" , ivExecutionException ) ; // Throw an ExecutionException to the client . throw ivExecutionException ; } if ( isTraceOn ) svLogger . exiting ( CLASS_NAME , "get" , "result" ) ; return ivResult ;
public class Translation { /** * Translates { @ code Class } to Google App Engine Datastore entity ' s kind * name . * @ param clazz The { @ code Class } of Acid House entity . * @ return The kind name translated from Acid House entity class . */ public static String toKind ( Class < ? > clazz ) { } }
if ( clazz . getAnnotation ( org . eiichiro . acidhouse . Entity . class ) == null ) { throw new IllegalArgumentException ( "Entity class [" + clazz + "] must be annotated by @org.eiichiro.acidhouse.Entity" ) ; } return clazz . getSimpleName ( ) ;
public class RibbonBarLayout { @ Override public void setStyleName ( String styleName ) { } }
super . setStyleName ( styleName ) ; for ( RibbonGroup group : groups ) { group . asWidget ( ) . setStyleName ( getStyleName ( ) + "Group" ) ; }
public class MapRandomizer { /** * Create a new { @ link MapRandomizer } with a fixed number of entries . * @ param keyRandomizer the randomizer for keys * @ param valueRandomizer the randomizer for values * @ param nbEntries the number of entries to generate * @ param < K > the type of key elements * @ param < V > the type of value elements * @ return a new { @ link MapRandomizer } */ public static < K , V > MapRandomizer < K , V > aNewMapRandomizer ( final Randomizer < K > keyRandomizer , final Randomizer < V > valueRandomizer , final int nbEntries ) { } }
return new MapRandomizer < > ( keyRandomizer , valueRandomizer , nbEntries ) ;
public class KeyAffinityServiceImpl { /** * Important : this * MUST * be called with WL on { @ link # address2key } . */ @ GuardedBy ( "maxNumberInvariant" ) private void addQueuesForAddresses ( Collection < Address > addresses ) { } }
for ( Address address : addresses ) { if ( interestedInAddress ( address ) ) { address2key . put ( address , new ArrayBlockingQueue < > ( bufferSize ) ) ; } else { log . tracef ( "Skipping address: %s" , address ) ; } }
public class DbfField { /** * < p > Creates a DBFField object from the data read from the given DataInputStream . < / p > * < p > The data in the DataInputStream object is supposed to be organised correctly * and the stream " pointer " is supposed to be positioned properly . < / p > * @ param in DataInputStream * @ return created DBFField object . * @ throws DbfException if any stream reading problems occurs . */ public static DbfField read ( DataInput in , int fieldIndex ) throws DbfException { } }
try { DbfField field = new DbfField ( fieldIndex ) ; byte firstByte = in . readByte ( ) ; if ( firstByte == HEADER_TERMINATOR ) { // we get end of the dbf header return null ; } byte [ ] nameBuf = new byte [ 11 ] ; /* 1-10 */ in . readFully ( nameBuf , 1 , 10 ) ; nameBuf [ 0 ] = firstByte ; int zeroIndex = 0 ; while ( zeroIndex < nameBuf . length && nameBuf [ zeroIndex ] != 0 ) zeroIndex ++ ; field . fieldName = new String ( nameBuf , 0 , zeroIndex ) ; byte fieldType = in . readByte ( ) ; field . dataType = DbfDataType . valueOf ( fieldType ) ; /* 11 */ if ( field . dataType == null ) { throw new DbfException ( String . format ( "Unsupported Dbf field type: %s" , Integer . toString ( fieldType , 16 ) ) ) ; } field . reserv1 = DbfUtils . readLittleEndianInt ( in ) ; /* 12-15 */ field . fieldLength = in . readUnsignedByte ( ) ; /* 16 */ field . decimalCount = in . readByte ( ) ; /* 17 */ field . reserv2 = DbfUtils . readLittleEndianShort ( in ) ; /* 18-19 */ field . workAreaId = in . readByte ( ) ; /* 20 */ field . reserv2 = DbfUtils . readLittleEndianShort ( in ) ; /* 21-22 */ field . setFieldsFlag = in . readByte ( ) ; /* 23 */ in . readFully ( field . reserv4 ) ; /* 24-30 */ field . indexFieldFlag = in . readByte ( ) ; /* 31 */ return field ; } catch ( IOException e ) { throw new DbfException ( "Cannot read Dbf field" , e ) ; }
public class ImageModerationsImpl { /** * Returns probabilities of the image containing racy or adult content . * @ param contentType The content type . * @ param imageUrl The image url . * @ param evaluateUrlInputOptionalParameter the object representing the optional parameters to be set before calling this API * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Evaluate > evaluateUrlInputAsync ( String contentType , BodyModelModel imageUrl , EvaluateUrlInputOptionalParameter evaluateUrlInputOptionalParameter , final ServiceCallback < Evaluate > serviceCallback ) { } }
return ServiceFuture . fromResponse ( evaluateUrlInputWithServiceResponseAsync ( contentType , imageUrl , evaluateUrlInputOptionalParameter ) , serviceCallback ) ;
public class DoubleTuples { /** * Returns the bias - corrected sample variance of the given tuple . * @ param t The input tuple * @ param mean The mean , which may have been computed before with * { @ link # arithmeticMean ( DoubleTuple ) } * @ return The variance */ public static double variance ( DoubleTuple t , double mean ) { } }
int d = t . getSize ( ) ; double variance = 0 ; for ( int i = 0 ; i < d ; i ++ ) { double difference = t . get ( i ) - mean ; variance += difference * difference ; } return variance / ( d - 1 ) ;
public class TimebasedOrderFilter { /** * Adds a timebased uuid to entry . If updateRdn is true , the uuid becomes the rdn . Use this to handle duplicates . */ public static void addId ( Entry entry , boolean updateRdn ) { } }
String uuid = newUUID ( ) . toString ( ) ; try { entry . add ( SchemaConstants . OBJECT_CLASS_ATTRIBUTE , UNIQUE_OBJECT_OC ) ; entry . add ( ID_ATTRIBUTE , uuid ) ; } catch ( LdapException e ) { throw new LdapRuntimeException ( e ) ; } if ( updateRdn ) { Dn newDn = LdapUtils . concatDn ( ID_ATTRIBUTE , uuid , entry . getDn ( ) . getParent ( ) ) ; entry . setDn ( newDn ) ; }
public class MapLister { /** * Print the content of the Map in a newly created String , * entries are sorted by key , formatted as " key \ t = value \ n " . < br > * 把Map里的内容列在String里 , Map里的每个entry占一行 , 按key排序 , * 每行的格式为 “ key \ t = value \ n ” 。 * @ param mapThe Map object for which the content need to be listed . < br > * 需要列出内容的Map对象 * @ return A String that holds formated content of the Map . < br > * 含有格式化过的Map内容的String */ public static String listToString ( Map < ? , ? > map ) { } }
if ( map == null ) { return "null" ; } TreeMap < ? , ? > tm = new TreeMap < Object , Object > ( map ) ; StringBuilder sb = new StringBuilder ( ) ; for ( Map . Entry < ? , ? > item : tm . entrySet ( ) ) { Object k = item . getKey ( ) ; Object v = item . getValue ( ) ; sb . append ( k == null ? "null" : k . toString ( ) ) ; sb . append ( "\t= " ) ; sb . append ( v == null ? "null" : v . toString ( ) ) ; sb . append ( '\n' ) ; } return sb . toString ( ) ;
public class UniverseApi { /** * Get moon information ( asynchronously ) Get information on a moon - - - This * route expires daily at 11:05 * @ param moonId * moon _ id integer ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param callback * The callback to be executed when the API call finishes * @ return The request call * @ throws ApiException * If fail to process the API call , e . g . serializing the request * body object */ public com . squareup . okhttp . Call getUniverseMoonsMoonIdAsync ( Integer moonId , String datasource , String ifNoneMatch , final ApiCallback < MoonResponse > callback ) throws ApiException { } }
com . squareup . okhttp . Call call = getUniverseMoonsMoonIdValidateBeforeCall ( moonId , datasource , ifNoneMatch , callback ) ; Type localVarReturnType = new TypeToken < MoonResponse > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
public class Check { /** * Check if < code > a < / code > is different to < code > b < / code > . * @ param a The parameter to test . * @ param b The parameter to compare to . * @ throws LionEngineException If check failed . */ public static void different ( double a , double b ) { } }
if ( Double . compare ( a , b ) == 0 ) { throw new LionEngineException ( ERROR_ARGUMENT + String . valueOf ( a ) + ERROR_DIFFERENT + String . valueOf ( b ) ) ; }
public class AbstractHasTopLevelRules { /** * Get the < code > @ keyframes < / code > rule at the specified index . * @ param nIndex * The index to be resolved . Should be & ge ; 0 and & lt ; * { @ link # getKeyframesRuleCount ( ) } . * @ return < code > null < / code > if an invalid index was specified . * @ since 3.7.4 */ @ Nullable public CSSKeyframesRule getKeyframesRuleAtIndex ( @ Nonnegative final int nIndex ) { } }
return m_aRules . getAtIndexMapped ( r -> r instanceof CSSKeyframesRule , nIndex , r -> ( CSSKeyframesRule ) r ) ;
public class DefaultInstalledExtension { /** * Sets the value of the specified extension property on the given namespace . * @ param key the extension property to set * @ param value the property value * @ param namespace the namespace to associate the property with , { @ code null } for the root namespace * @ since 7.0M2 */ public void setNamespaceProperty ( String key , Object value , String namespace ) { } }
try { this . propertiesLock . lock ( ) ; Map < String , Object > namespaceProperties = getNamespaceProperties ( namespace ) ; if ( namespaceProperties != null ) { namespaceProperties . put ( key , value ) ; } } finally { this . propertiesLock . unlock ( ) ; }
public class DummyResponsiveImageMediaMarkupBuilder { /** * Build JSON metadata for one rendition as image source . * @ param media Media * @ param mediaFormat Media format * @ return JSON metadata */ protected JSONObject toReponsiveImageSource ( Media media , MediaFormat mediaFormat ) { } }
String url = buildDummyImageUrl ( mediaFormat ) ; try { JSONObject source = new JSONObject ( ) ; source . put ( MediaNameConstants . PROP_BREAKPOINT , mediaFormat . getProperties ( ) . get ( MediaNameConstants . PROP_BREAKPOINT ) ) ; source . put ( ResponsiveImageMediaMarkupBuilder . PROP_SRC , url ) ; return source ; } catch ( JSONException ex ) { throw new RuntimeException ( "Error building JSON source." , ex ) ; }
public class RaCodeGen { /** * Output class import * @ param def definition * @ param out Writer * @ throws IOException ioException */ @ Override public void writeImport ( Definition def , Writer out ) throws IOException { } }
out . write ( "package " + def . getRaPackage ( ) + ";\n\n" ) ; if ( def . isSupportInbound ( ) ) { out . write ( "import " + def . getRaPackage ( ) + ".inflow." + def . getActivationClass ( ) + ";\n" ) ; out . write ( "import " + def . getRaPackage ( ) + ".inflow." + def . getAsClass ( ) + ";\n\n" ) ; if ( def . getVersion ( ) . equals ( "1.5" ) ) { out . write ( "import java.util.Collections;\n" ) ; out . write ( "import java.util.HashMap;\n" ) ; out . write ( "import java.util.Map;\n" ) ; writeEol ( out ) ; } else if ( def . getVersion ( ) . equals ( "1.6" ) || def . getVersion ( ) . equals ( "1.7" ) ) { out . write ( "import java.util.concurrent.ConcurrentHashMap;\n" ) ; writeEol ( out ) ; } } importLogging ( def , out ) ; out . write ( "import javax.resource.ResourceException;\n" ) ; out . write ( "import javax.resource.spi.ActivationSpec;\n" ) ; if ( def . isUseAnnotation ( ) && def . getAuthenMechanisms ( ) != null && def . getAuthenMechanisms ( ) . size ( ) > 0 ) { out . write ( "import javax.resource.spi.AuthenticationMechanism;\n" ) ; out . write ( "import javax.resource.spi.AuthenticationMechanism.CredentialInterface;\n" ) ; } out . write ( "import javax.resource.spi.BootstrapContext;\n" ) ; if ( def . isUseAnnotation ( ) ) { importConfigProperty ( def , out ) ; out . write ( "import javax.resource.spi.Connector;\n" ) ; } out . write ( "import javax.resource.spi.ResourceAdapter;\n" ) ; out . write ( "import javax.resource.spi.ResourceAdapterInternalException;\n" ) ; if ( def . isUseAnnotation ( ) && def . getSecurityPermissions ( ) != null && def . getSecurityPermissions ( ) . size ( ) > 0 ) { out . write ( "import javax.resource.spi.SecurityPermission;\n" ) ; } if ( def . isUseAnnotation ( ) && def . isSupportOutbound ( ) ) { out . write ( "import javax.resource.spi.TransactionSupport;\n" ) ; } out . write ( "import javax.resource.spi.endpoint.MessageEndpointFactory;\n\n" ) ; out . write ( "import javax.transaction.xa.XAResource;\n\n" ) ;
public class DistanceFormat { /** * Area formatting method . Requires an area as parameter , expressed in the CRS units of the given map . * @ param map * The map for which an area should be formatted . This map may be configured to use the metric system or * the English system . * @ param area * The original area , expressed in the coordinate reference system of the given map . * @ return Returns a string that is the formatted area of the given area . Preference goes to meters or yards * ( depending on the configured unit type ) , but when the number is larger than meters ( or yards ) , it will * switch automatically to kilometers / miles . */ public static String asMapArea ( MapWidget map , double area ) { } }
double unitLength = map . getUnitLength ( ) ; double distance = area * unitLength * unitLength ; String unit = "m" ; if ( map . getMapModel ( ) . getMapInfo ( ) . getDisplayUnitType ( ) == UnitType . METRIC ) { // Right now , the distance is expressed in meter . Switch to km ? if ( distance > ( METERS_IN_KM * METERS_IN_KM ) ) { distance /= ( METERS_IN_KM * METERS_IN_KM ) ; unit = "km" ; } } else if ( map . getMapModel ( ) . getMapInfo ( ) . getDisplayUnitType ( ) == UnitType . ENGLISH ) { if ( distance > ( METERS_IN_MILE * METERS_IN_MILE ) ) { // Switch to mile : distance = distance / ( METERS_IN_MILE * METERS_IN_MILE ) ; unit = "mi" ; } else { distance /= ( METERS_IN_YARD * METERS_IN_YARD ) ; // use yards . unit = "yd" ; } } else if ( map . getMapModel ( ) . getMapInfo ( ) . getDisplayUnitType ( ) == UnitType . CRS ) { unit = "u" ; } String formatted = NumberFormat . getDecimalFormat ( ) . format ( distance ) ; return formatted + unit + "&sup2;" ;
public class RuntimeStepExecution { /** * Creates / Updates the committedMetrics variable using the passed in metric types */ public void setCommittedMetrics ( ) { } }
for ( MetricImpl . MetricType metricType : this . tranCoordinatedMetricTypes ) { committedMetrics . put ( metricType . name ( ) , new MetricImpl ( metricType , this . getMetric ( metricType ) . getValue ( ) ) ) ; }
public class ICUService { /** * A convenience override of registerObject ( Object , String , boolean ) * that defaults visible to true . */ public Factory registerObject ( Object obj , String id ) { } }
return registerObject ( obj , id , true ) ;
public class PdfContentStreamProcessor { /** * Displays text . * @ param stringthe text to display * @ param tjthe text adjustment */ public void displayPdfString ( PdfString string , float tj ) { } }
String unicode = decode ( string ) ; // this is width in unscaled units - we have to normalize by the Tm scaling float width = getStringWidth ( unicode , tj ) ; Matrix nextTextMatrix = new Matrix ( width , 0 ) . multiply ( textMatrix ) ; displayText ( unicode , nextTextMatrix ) ; textMatrix = nextTextMatrix ;
public class DefaultDeviceManager { /** * Initializes the AndroidDebugBridge and registers the DefaultHardwareDeviceManager with the * AndroidDebugBridge device change listener . */ protected void initializeAdbConnection ( ) { } }
// Get a device bridge instance . Initialize , create and restart . try { AndroidDebugBridge . init ( false ) ; } catch ( IllegalStateException e ) { if ( ! shouldKeepAdbAlive ) { log . log ( Level . WARNING , "AndroidDebugBridge may have been already initialized at this point. It is OK to proceed." , e ) ; } } bridge = AndroidDebugBridge . getBridge ( ) ; if ( bridge == null ) { bridge = AndroidDebugBridge . createBridge ( adbPath , false ) ; } IDevice [ ] devices = bridge . getDevices ( ) ; AndroidDebugBridge . addDeviceChangeListener ( this ) ; // Add the existing devices to the list of devices we are tracking . if ( devices . length > 0 ) { for ( int i = 0 ; i < devices . length ; i ++ ) { deviceConnected ( devices [ i ] ) ; log . info ( "my devices: " + devices [ i ] . getAvdName ( ) ) ; } } else { long timeout = System . currentTimeMillis ( ) + 2000 ; while ( ( devices = bridge . getDevices ( ) ) . length == 0 && System . currentTimeMillis ( ) < timeout ) { try { Thread . sleep ( 50 ) ; } catch ( InterruptedException e ) { throw new RuntimeException ( e ) ; } } if ( devices . length > 0 ) { for ( int i = 0 ; i < devices . length ; i ++ ) { deviceConnected ( devices [ i ] ) ; log . info ( "my devices: " + devices [ i ] . getAvdName ( ) ) ; } } }
public class AiffData { /** * Creates a AiffData container from the specified url * @ param path URL to file * @ return AiffData containing data , or null if a failure occured */ public static AiffData create ( URL path ) { } }
try { return create ( AudioSystem . getAudioInputStream ( new BufferedInputStream ( path . openStream ( ) ) ) ) ; } catch ( Exception e ) { org . lwjgl . LWJGLUtil . log ( "Unable to create from: " + path ) ; e . printStackTrace ( ) ; return null ; }
public class AbstractWMultiSelectList { /** * Returns the options which are not selected . * @ return The unselected options ( s ) . */ public List < ? > getNotSelected ( ) { } }
List options = getOptions ( ) ; if ( options == null || options . isEmpty ( ) ) { return Collections . EMPTY_LIST ; } List notSelected = new ArrayList ( options ) ; notSelected . removeAll ( getSelected ( ) ) ; return Collections . unmodifiableList ( notSelected ) ;
public class OparamTag { /** * This is the method responsible for taking the result of the JSP code * that forms the body of this tag and inserts it as a parameter into the * request scope session . If any problems occur while loading the body * into the session scope then a { @ code JspException } will be thrown . * @ param pContent The body of the tag as a String . * @ throws JspException */ protected void processBody ( String pContent ) throws JspException { } }
// Okay , we have the content , we need to write it to disk somewhere String content = pContent ; if ( ! StringUtil . isEmpty ( language ) ) { content = "<%@page language=\"" + language + "\" %>" + content ; } if ( ! StringUtil . isEmpty ( prefix ) ) { content = "<%@taglib uri=\"/twelvemonkeys-common\" prefix=\"" + prefix + "\" %>" + content ; } // Write the content of the oparam to disk try { log ( "Processing subpage " + subpage . getPath ( ) ) ; FileUtil . write ( subpage , content . getBytes ( ) ) ; } catch ( IOException ioe ) { throw new JspException ( ioe ) ; }
public class MessageBuffer { /** * Creates a new MessageBuffer instance backed by ByteBuffer * @ param bb * @ return */ private static MessageBuffer newMessageBuffer ( ByteBuffer bb ) { } }
checkNotNull ( bb ) ; if ( mbBBConstructor != null ) { return newInstance ( mbBBConstructor , bb ) ; } return new MessageBuffer ( bb ) ;
public class RequestHandler { /** * Performs the logistics of collecting and assembling the individual health check information * on a per - service basis . * @ return an observable with the response once ready . */ public Observable < DiagnosticsResponse > diagnostics ( final String id ) { } }
List < Observable < EndpointHealth > > diags = new ArrayList < Observable < EndpointHealth > > ( nodes . size ( ) ) ; for ( Node node : nodes ) { diags . add ( node . diagnostics ( ) ) ; } final RingBufferDiagnostics ringBufferDiagnostics = RingBufferMonitor . instance ( ) . diagnostics ( ) ; return Observable . merge ( diags ) . toList ( ) . map ( new Func1 < List < EndpointHealth > , DiagnosticsResponse > ( ) { @ Override public DiagnosticsResponse call ( List < EndpointHealth > checks ) { return new DiagnosticsResponse ( new DiagnosticsReport ( checks , environment . userAgent ( ) , id , ringBufferDiagnostics ) ) ; } } ) ;
public class FleetLaunchTemplateConfig { /** * Any parameters that you specify override the same parameters in the launch template . * @ param overrides * Any parameters that you specify override the same parameters in the launch template . */ public void setOverrides ( java . util . Collection < FleetLaunchTemplateOverrides > overrides ) { } }
if ( overrides == null ) { this . overrides = null ; return ; } this . overrides = new com . amazonaws . internal . SdkInternalList < FleetLaunchTemplateOverrides > ( overrides ) ;
public class NioGroovyMethods { /** * Converts this Path to a { @ link groovy . lang . Writable } or delegates to default * { @ link org . codehaus . groovy . runtime . DefaultGroovyMethods # asType ( Object , Class ) } . * @ param path a Path * @ param c the desired class * @ return the converted object * @ since 2.3.0 */ @ SuppressWarnings ( "unchecked" ) public static < T > T asType ( Path path , Class < T > c ) { } }
if ( c == Writable . class ) { return ( T ) asWritable ( path ) ; } return DefaultGroovyMethods . asType ( ( Object ) path , c ) ;
public class PolygonMarkers { /** * { @ inheritDoc } */ @ Override public void setVisible ( boolean visible ) { } }
if ( polygon != null ) { polygon . setVisible ( visible ) ; } for ( Marker marker : markers ) { marker . setVisible ( visible ) ; } for ( PolygonHoleMarkers hole : holes ) { hole . setVisible ( visible ) ; }
public class Resolve { /** * If ` sym ' is a bad symbol : report error and return errSymbol * else pass through unchanged , * additional arguments duplicate what has been used in trying to find the * symbol { @ literal ( - - > flyweight pattern ) } . This improves performance since we * expect misses to happen frequently . * @ param sym The symbol that was found , or a ResolveError . * @ param pos The position to use for error reporting . * @ param location The symbol the served as a context for this lookup * @ param site The original type from where the selection took place . * @ param name The symbol ' s name . * @ param qualified Did we get here through a qualified expression resolution ? * @ param argtypes The invocation ' s value arguments , * if we looked for a method . * @ param typeargtypes The invocation ' s type arguments , * if we looked for a method . * @ param logResolveHelper helper class used to log resolve errors */ Symbol accessInternal ( Symbol sym , DiagnosticPosition pos , Symbol location , Type site , Name name , boolean qualified , List < Type > argtypes , List < Type > typeargtypes , LogResolveHelper logResolveHelper ) { } }
if ( sym . kind . isResolutionError ( ) ) { ResolveError errSym = ( ResolveError ) sym . baseSymbol ( ) ; sym = errSym . access ( name , qualified ? site . tsym : syms . noSymbol ) ; argtypes = logResolveHelper . getArgumentTypes ( errSym , sym , name , argtypes ) ; if ( logResolveHelper . resolveDiagnosticNeeded ( site , argtypes , typeargtypes ) ) { logResolveError ( errSym , pos , location , site , name , argtypes , typeargtypes ) ; } } return sym ;
public class ServletSendErrorTask { /** * { @ inheritDoc } */ public Formula getFormula ( ) { } }
Reagent [ ] reagents = new Reagent [ ] { STATUS_CODE , MESSAGE , SOURCE } ; final Formula rslt = new SimpleFormula ( ServletSendErrorTask . class , reagents ) ; return rslt ;
public class LTPATokenService { /** * { @ inheritDoc } */ @ Override public Token recreateTokenFromBytes ( byte [ ] tokenBytes ) throws InvalidTokenException , TokenExpiredException { } }
TokenFactory tokenFactory = ltpaConfig . getTokenFactory ( ) ; Token token = tokenFactory . validateTokenBytes ( tokenBytes ) ; validateRecreatedToken ( token ) ; return token ;
public class ReservoirItemsSketch { /** * Returns a copy of the items in the reservoir , or null if empty . The returned array length * may be smaller than the reservoir capacity . * < p > In order to allocate an array of generic type T , uses the class of the first item in * the array . This method method may throw an < tt > ArrayAssignmentException < / tt > if the * reservoir stores instances of a polymorphic base class . < / p > * @ return A copy of the reservoir array */ @ SuppressWarnings ( "unchecked" ) public T [ ] getSamples ( ) { } }
if ( itemsSeen_ == 0 ) { return null ; } final Class < ? > clazz = data_ . get ( 0 ) . getClass ( ) ; return data_ . toArray ( ( T [ ] ) Array . newInstance ( clazz , 0 ) ) ;
public class AbstractCell { /** * This method implements the rendering process for data grid cells . When the data grid ' s * rendering state is < b > not < / b > { @ link DataGridTagModel # RENDER _ STATE _ START } , this tag processes * its body . The tag performs the following steps in order : * < ol > * < li > The tag invokes its { @ link # applyAttributes ( ) } method to allow subclasses to apply attributes * to their { @ link CellModel } instances at a well known time . Any errors in attribute checking * should be thrown here . < / li > * < li > The tag adds the { @ link CellModel } associated with the data grid to the * { @ link javax . servlet . jsp . JspContext } under the key < code > cellModel < / code > . < / li > * < li > Rendering is performed by invoking * { @ link # renderCell ( org . apache . beehive . netui . tags . rendering . AbstractRenderAppender ) } . If content is * rendered when the body of the tag is rendered , it is written to the output stream . * < / li > * < li > The tag removes the { @ link CellModel } instance . If an exception is thrown after the * { @ link CellModel } is added to the { @ link javax . servlet . jsp . JspContext } , it the cell model * will still be removed from the JspContext . < / li > * < / ol > * @ throws JspException * @ throws IOException */ public void doTag ( ) throws JspException , IOException { } }
DataGridTagModel dataGridModel = DataGridUtil . getDataGridTagModel ( getJspContext ( ) ) ; if ( dataGridModel == null ) { String s = Bundle . getString ( "Tags_DataGrid_MissingDataGridModel" , new Object [ ] { getTagName ( ) } ) ; throw new JspException ( s ) ; } int gridRenderState = dataGridModel . getRenderState ( ) ; /* RENDER _ STATE _ START is a no - op for cells */ if ( gridRenderState == DataGridTagModel . RENDER_STATE_START ) { return ; } /* otherwise , the CellModel associated with this tag needs to be fetched from the < cell > tag for the current iteration */ else { CellModel model = internalGetCellModel ( ) ; model . setDataGridTagModel ( dataGridModel ) ; applyAttributes ( ) ; try { DataGridUtil . putCellModel ( getJspContext ( ) , model ) ; InternalStringBuilder content = new InternalStringBuilder ( ) ; AbstractRenderAppender appender = new StringBuilderRenderAppender ( content ) ; renderCell ( appender ) ; if ( content != null && content . length ( ) > 0 ) getJspContext ( ) . getOut ( ) . println ( content . toString ( ) ) ; } finally { DataGridUtil . removeCellModel ( getJspContext ( ) ) ; } } return ;
public class Component { /** * Unregister component where methods annotated by { @ link Provides } will be registered as * injection providers . * @ param providerHolder The object with methods marked by { @ link Provides } to provide injectable * instances * @ return this instance * @ throws ProviderMissingException Thrown when the any provider in the provider holder with * the given type and qualifier cannot be found under this component */ public Component unregister ( Object providerHolder ) throws ProviderMissingException { } }
Method [ ] methods = providerHolder . getClass ( ) . getDeclaredMethods ( ) ; for ( Method method : methods ) { if ( method . isAnnotationPresent ( Provides . class ) ) { Class < ? > returnType = method . getReturnType ( ) ; if ( returnType != void . class ) { Annotation qualifier = null ; Annotation [ ] annotations = method . getAnnotations ( ) ; for ( Annotation a : annotations ) { if ( a . annotationType ( ) . isAnnotationPresent ( Qualifier . class ) ) { qualifier = a ; break ; } } unregister ( returnType , qualifier ) ; } } } return this ;
public class PackageManagerUtils { /** * Checks if the device has a location feature . * @ param context the context . * @ return { @ code true } if the device has a location feature . */ @ TargetApi ( Build . VERSION_CODES . FROYO ) public static boolean hasLocationFeature ( Context context ) { } }
return hasLocationFeature ( context . getPackageManager ( ) ) ;
public class TypeRegistry { /** * If the reloadabletype cannot currently be located , this method will search the hierarchy of classloaders for it . * If it is found , we ' ll record it for later quick access . TODO need to work out what to do if it is not found , dont * want to keep looking - does that mean it isn ' t reloadable ? */ private static ReloadableType searchForReloadableType ( int typeId , TypeRegistry typeRegistry ) { } }
ReloadableType reloadableType ; reloadableType = typeRegistry . getReloadableTypeInTypeRegistryHierarchy ( NameRegistry . getTypenameById ( typeId ) ) ; typeRegistry . rememberReloadableType ( typeId , reloadableType ) ; return reloadableType ;
public class Serialized { /** * Returns the deserialized objects from the given { @ link InputStream } as an * { @ link Observable } stream . * @ param ois * the { @ link ObjectInputStream } * @ param < T > * the generic type of the returned stream * @ return the stream of deserialized objects from the { @ link InputStream } * as an { @ link Observable } . */ public static < T extends Serializable > Observable < T > read ( final ObjectInputStream ois ) { } }
return Observable . create ( new SyncOnSubscribe < ObjectInputStream , T > ( ) { @ Override protected ObjectInputStream generateState ( ) { return ois ; } @ Override protected ObjectInputStream next ( ObjectInputStream ois , Observer < ? super T > observer ) { try { @ SuppressWarnings ( "unchecked" ) T t = ( T ) ois . readObject ( ) ; observer . onNext ( t ) ; } catch ( EOFException e ) { observer . onCompleted ( ) ; } catch ( ClassNotFoundException e ) { observer . onError ( e ) ; } catch ( IOException e ) { observer . onError ( e ) ; } return ois ; } } ) ;
public class TopoPrimitiveArrayAssociationType { /** * Gets the value of the topoPrimitive property . * This accessor method returns a reference to the live list , * not a snapshot . Therefore any modification you make to the * returned list will be present inside the JAXB object . * This is why there is not a < CODE > set < / CODE > method for the topoPrimitive property . * For example , to add a new item , do as follows : * < pre > * get _ TopoPrimitive ( ) . add ( newItem ) ; * < / pre > * Objects of the following type ( s ) are allowed in the list * { @ link JAXBElement } { @ code < } { @ link NodeType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link TopoSolidType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link FaceType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link EdgeType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link AbstractTopoPrimitiveType } { @ code > } */ public List < JAXBElement < ? extends AbstractTopoPrimitiveType > > get_TopoPrimitive ( ) { } }
if ( _TopoPrimitive == null ) { _TopoPrimitive = new ArrayList < JAXBElement < ? extends AbstractTopoPrimitiveType > > ( ) ; } return this . _TopoPrimitive ;
public class TreeGraphNode { /** * Finds all arcs between this node and < code > destNode < / code > , * and returns the < code > Set < / code > of < code > Object < / code > s which * label those arcs . If no such arcs exist , returns an empty * < code > Set < / code > . * @ param destNode the destination node * @ return the < code > Set < / code > of < code > Object < / code > s which * label arcs between this node and < code > destNode < / code > */ public Set < Class < ? extends GrammaticalRelationAnnotation > > arcLabelsToNode ( TreeGraphNode destNode ) { } }
Set < Class < ? extends GrammaticalRelationAnnotation > > arcLabels = Generics . newHashSet ( ) ; CyclicCoreLabel cl = label ( ) ; for ( Iterator < Class < ? > > it = cl . keySet ( ) . iterator ( ) ; it . hasNext ( ) ; ) { Class < ? extends CoreAnnotation > key = ( Class < ? extends CoreAnnotation > ) it . next ( ) ; // javac doesn ' t compile properly if generics are fully specified ( but eclipse does . . . ) Object val = cl . get ( key ) ; if ( val != null && val instanceof Set ) { if ( ( ( Set ) val ) . contains ( destNode ) ) { if ( key != null ) { arcLabels . add ( ( Class < ? extends GrammaticalRelationAnnotation > ) key ) ; } } } } return arcLabels ;
public class RequestedAttributeTemplates { /** * Creates a { @ code RequestedAttribute } object for the PersonIdentifier attribute . * @ param isRequired * flag to tell whether the attribute is required * @ param includeFriendlyName * flag that tells whether the friendly name should be included * @ return a { @ code RequestedAttribute } object representing the PersonIdentifier attribute */ public static RequestedAttribute PERSON_IDENTIFIER ( Boolean isRequired , boolean includeFriendlyName ) { } }
return create ( AttributeConstants . EIDAS_PERSON_IDENTIFIER_ATTRIBUTE_NAME , includeFriendlyName ? AttributeConstants . EIDAS_PERSON_IDENTIFIER_ATTRIBUTE_FRIENDLY_NAME : null , Attribute . URI_REFERENCE , isRequired ) ;
public class OutputPanel { /** * A call of this method should validate the positions of the panels * components . */ @ Override public void relocate ( ) { } }
int w = 360 , h = 245 ; int x = ( this . getWidth ( ) - w ) / 2 ; int y = ( this . getHeight ( ) - h ) / 2 ; outputLabel . setLocation ( x , y ) ; outputPathField . setLocation ( x + 160 , y ) ; enableZipEncodingCompression . setLocation ( x + 110 , y + 40 ) ; outputCompression . setLocation ( x + 110 , y + 75 ) ; disableOutputCompression . setLocation ( x + 110 , y + 100 ) ; enableBZip2OutputCompression . setLocation ( x + 110 , y + 120 ) ; enable7ZipOutputCompression . setLocation ( x + 110 , y + 140 ) ; activateDataFileOutput . setLocation ( x + 110 , y + 160 ) ; enableMultipleOutputFiles . setLocation ( x , y + 190 ) ; outputSizeLimitLabel . setLocation ( x , y + 220 ) ; outputSizeLimitField . setLocation ( x + 160 , y + 220 ) ;
public class CirclePageIndicator { /** * Determines the width of this view * @ param measureSpec * A measureSpec packed into an int * @ return The width of the view , honoring constraints from measureSpec */ private int measureLong ( int measureSpec ) { } }
int result ; int specMode = MeasureSpec . getMode ( measureSpec ) ; int specSize = MeasureSpec . getSize ( measureSpec ) ; if ( ( specMode == MeasureSpec . EXACTLY ) || ( mViewPager == null ) ) { // We were told how big to be result = specSize ; } else { // Calculate the width according the views count final int count = mViewPager . getAdapter ( ) . getCount ( ) ; result = ( int ) ( getPaddingLeft ( ) + getPaddingRight ( ) + ( count * 2 * mRadius ) + ( count - 1 ) * mRadius + 1 ) ; // Respect AT _ MOST value if that was what is called for by measureSpec if ( specMode == MeasureSpec . AT_MOST ) { result = Math . min ( result , specSize ) ; } } return result ;
public class CmsStaticExportManager { /** * Starts a complete static export of all resources . < p > * @ param purgeFirst flag to delete all resources in the export folder of the rfs * @ param report an I _ CmsReport instance to print output message , or null to write messages to the log file * @ throws CmsException in case of errors accessing the VFS * @ throws IOException in case of errors writing to the export output stream * @ throws ServletException in case of errors accessing the servlet */ public synchronized void exportFullStaticRender ( boolean purgeFirst , I_CmsReport report ) throws CmsException , IOException , ServletException { } }
// set member to true to get temporary export paths for rules m_fullStaticExport = true ; // save the real export path String staticExportPathStore = m_staticExportPath ; if ( m_useTempDirs ) { // set the export path to the export work path m_staticExportPath = m_staticExportWorkPath ; } // delete all old exports if the purgeFirst flag is set if ( purgeFirst ) { Map < String , Object > eventData = new HashMap < String , Object > ( ) ; eventData . put ( I_CmsEventListener . KEY_REPORT , report ) ; CmsEvent clearCacheEvent = new CmsEvent ( I_CmsEventListener . EVENT_CLEAR_CACHES , eventData ) ; OpenCms . fireCmsEvent ( clearCacheEvent ) ; scrubExportFolders ( report ) ; // this will always use the root site CmsObject cms = OpenCms . initCmsObject ( OpenCms . getDefaultUsers ( ) . getUserExport ( ) ) ; cms . deleteAllStaticExportPublishedResources ( EXPORT_LINK_WITHOUT_PARAMETER ) ; cms . deleteAllStaticExportPublishedResources ( EXPORT_LINK_WITH_PARAMETER ) ; } // do the export CmsAfterPublishStaticExportHandler handler = new CmsAfterPublishStaticExportHandler ( ) ; // export everything handler . doExportAfterPublish ( null , report ) ; // set export path to the original one m_staticExportPath = staticExportPathStore ; // set member to false for further exports m_fullStaticExport = false ; // check if report contents no errors if ( m_useTempDirs && ! report . hasError ( ) ) { // backup old export folders for default export File staticExport = new File ( m_staticExportPath ) ; createExportBackupFolders ( staticExport , m_staticExportPath , getExportBackups ( ) . intValue ( ) , null ) ; // change the name of the used temporary export folder to the original default export path File staticExportWork = new File ( m_staticExportWorkPath ) ; staticExportWork . renameTo ( new File ( m_staticExportPath ) ) ; // backup old export folders of rule based exports Iterator < CmsStaticExportRfsRule > it = m_rfsRules . iterator ( ) ; while ( it . hasNext ( ) ) { CmsStaticExportRfsRule rule = it . next ( ) ; File staticExportRule = new File ( rule . getExportPath ( ) ) ; File staticExportWorkRule = new File ( rule . getExportWorkPath ( ) ) ; // only backup if a temporary folder exists for this rule if ( staticExportWorkRule . exists ( ) ) { createExportBackupFolders ( staticExportRule , rule . getExportPath ( ) , rule . getExportBackups ( ) . intValue ( ) , OpenCms . getResourceManager ( ) . getFileTranslator ( ) . translateResource ( rule . getName ( ) ) ) ; staticExportWorkRule . renameTo ( new File ( rule . getExportPath ( ) ) ) ; } } } else if ( report . hasError ( ) ) { report . println ( Messages . get ( ) . container ( Messages . ERR_EXPORT_NOT_SUCCESSFUL_0 ) , I_CmsReport . FORMAT_WARNING ) ; }
public class PortMapping { /** * Create a JSON specification which can be used to in a Docker API request as the ' PortBindings ' part * for creating container . * @ return ' PortBindings ' object or null if no port mappings are used . */ JsonObject toDockerPortBindingsJson ( ) { } }
Map < String , Integer > portMap = getContainerPortToHostPortMap ( ) ; if ( ! portMap . isEmpty ( ) ) { JsonObject portBindings = new JsonObject ( ) ; Map < String , String > bindToMap = getBindToHostMap ( ) ; for ( Map . Entry < String , Integer > entry : portMap . entrySet ( ) ) { String containerPortSpec = entry . getKey ( ) ; Integer hostPort = entry . getValue ( ) ; JsonObject o = new JsonObject ( ) ; o . addProperty ( "HostPort" , hostPort != null ? hostPort . toString ( ) : "" ) ; if ( bindToMap . containsKey ( containerPortSpec ) ) { o . addProperty ( "HostIp" , bindToMap . get ( containerPortSpec ) ) ; } JsonArray array = new JsonArray ( ) ; array . add ( o ) ; portBindings . add ( containerPortSpec , array ) ; } return portBindings ; } else { return null ; }
public class sslservicegroup_sslcertkey_binding { /** * Use this API to fetch sslservicegroup _ sslcertkey _ binding resources of given name . */ public static sslservicegroup_sslcertkey_binding [ ] get ( nitro_service service , String servicegroupname ) throws Exception { } }
sslservicegroup_sslcertkey_binding obj = new sslservicegroup_sslcertkey_binding ( ) ; obj . set_servicegroupname ( servicegroupname ) ; sslservicegroup_sslcertkey_binding response [ ] = ( sslservicegroup_sslcertkey_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class TopElementBuilderFragment { /** * Generate top elements from the grammar . * @ param forInterface indicates if the generated code is for interfaces . * @ param forAppender < code > true < / code > if the generation is for the ISourceAppender . * @ return the top elements . */ @ SuppressWarnings ( "unlikely-arg-type" ) protected List < TopElementDescription > generateTopElements ( boolean forInterface , boolean forAppender ) { } }
final Set < String > memberElements = determineMemberElements ( ) ; final Collection < EObject > topElementContainers = new ArrayList < > ( ) ; final List < TopElementDescription > topElements = new ArrayList < > ( ) ; for ( final CodeElementExtractor . ElementDescription description : getCodeElementExtractor ( ) . getTopElements ( getGrammar ( ) , getCodeBuilderConfig ( ) ) ) { final TopElementDescription topElementDescription = new TopElementDescription ( description , memberElements . contains ( description . getElementType ( ) . getName ( ) ) , getCodeBuilderConfig ( ) . isXtendSupportEnabled ( ) && description . isAnnotationInfo ( ) ) ; generateTopElement ( topElementDescription , forInterface , forAppender ) ; topElements . add ( topElementDescription ) ; topElementContainers . add ( topElementDescription . getElementDescription ( ) . getGrammarComponent ( ) ) ; } // Remove the top elements as members . for ( final TopElementDescription description : topElements ) { description . getNamedMembers ( ) . removeAll ( topElementContainers ) ; } return topElements ;
public class BCUtil { /** * 从pem文件中读取公钥或私钥 * @ param keyStream pem流 * @ return 密钥bytes * @ since 4.5.2 */ public static byte [ ] readKeyBytes ( InputStream keyStream ) { } }
PemObject pemObject = readPemObject ( keyStream ) ; if ( null != pemObject ) { return pemObject . getContent ( ) ; } return null ;
public class DefaultObservationManager { /** * A Component has been modified ( added or removed ) and we update our cache of Event Listeners if that Component is * an Event Listener . * @ param componentEvent the event about the Component being added or removed * @ param componentManager the { @ link ComponentManager } where the descriptor is registered * @ param descriptor the descriptor of the modified component */ private void onComponentEvent ( ComponentDescriptorEvent componentEvent , ComponentManager componentManager , ComponentDescriptor < EventListener > descriptor ) { } }
if ( componentEvent . getRoleType ( ) == EventListener . class ) { if ( componentEvent instanceof ComponentDescriptorAddedEvent ) { onEventListenerComponentAdded ( ( ComponentDescriptorAddedEvent ) componentEvent , componentManager , descriptor ) ; } else if ( componentEvent instanceof ComponentDescriptorRemovedEvent ) { onEventListenerComponentRemoved ( ( ComponentDescriptorRemovedEvent ) componentEvent , componentManager , descriptor ) ; } else { this . logger . warn ( "Ignoring unknown Component event [{}]" , componentEvent . getClass ( ) . getName ( ) ) ; } }
public class MethodMatcher { /** * Finds a constructor matching the given parameters . */ @ SuppressWarnings ( "unchecked" ) public static < T > Constructor < T > findMatchingConstructor ( Class < T > classToInspect , Object ... params ) { } }
Constructor < T > checkedConstructors = null ; for ( Constructor < ? > constructor : classToInspect . getDeclaredConstructors ( ) ) { Type [ ] parameterTypes = constructor . getParameterTypes ( ) ; if ( parameterTypes . length == params . length && checkParameterTypes ( parameterTypes , params ) ) { if ( checkedConstructors == null ) { checkedConstructors = ( Constructor < T > ) constructor ; } else { throw BusinessException . createNew ( BusinessErrorCode . AMBIGUOUS_CONSTRUCTOR_FOUND ) . put ( "constructor1" , constructor ) . put ( "constructor2" , checkedConstructors ) . put ( "object" , classToInspect . getSimpleName ( ) ) . put ( "parameters" , params ) ; } } } return checkedConstructors ;
public class Variator { /** * Assigns the default values to all the properties . * @ param properties * @ param values */ public void assignDefaults ( Map < String , CSSProperty > properties , Map < String , Term < ? > > values ) { } }
SupportedCSS css = CSSFactory . getSupportedCSS ( ) ; for ( String name : names ) { CSSProperty dp = css . getDefaultProperty ( name ) ; if ( dp != null ) properties . put ( name , dp ) ; Term < ? > dv = css . getDefaultValue ( name ) ; if ( dv != null ) values . put ( name , dv ) ; }
public class DescribeStorediSCSIVolumesResult { /** * Describes a single unit of output from < a > DescribeStorediSCSIVolumes < / a > . The following fields are returned : * < ul > * < li > * < b > ChapEnabled < / b > : Indicates whether mutual CHAP is enabled for the iSCSI target . * < / li > * < li > * < b > LunNumber < / b > : The logical disk number . * < / li > * < li > * < b > NetworkInterfaceId < / b > : The network interface ID of the stored volume that initiator use to map the stored * volume as an iSCSI target . * < / li > * < li > * < b > NetworkInterfacePort < / b > : The port used to communicate with iSCSI targets . * < / li > * < li > * < b > PreservedExistingData < / b > : Indicates if when the stored volume was created , existing data on the underlying * local disk was preserved . * < / li > * < li > * < b > SourceSnapshotId < / b > : If the stored volume was created from a snapshot , this field contains the snapshot ID * used , e . g . snap - 1122aabb . Otherwise , this field is not included . * < / li > * < li > * < b > StorediSCSIVolumes < / b > : An array of StorediSCSIVolume objects where each object contains metadata about one * stored volume . * < / li > * < li > * < b > TargetARN < / b > : The Amazon Resource Name ( ARN ) of the volume target . * < / li > * < li > * < b > VolumeARN < / b > : The Amazon Resource Name ( ARN ) of the stored volume . * < / li > * < li > * < b > VolumeDiskId < / b > : The disk ID of the local disk that was specified in the < a > CreateStorediSCSIVolume < / a > * operation . * < / li > * < li > * < b > VolumeId < / b > : The unique identifier of the storage volume , e . g . vol - 1122AABB . * < / li > * < li > * < b > VolumeiSCSIAttributes < / b > : An < a > VolumeiSCSIAttributes < / a > object that represents a collection of iSCSI * attributes for one stored volume . * < / li > * < li > * < b > VolumeProgress < / b > : Represents the percentage complete if the volume is restoring or bootstrapping that * represents the percent of data transferred . This field does not appear in the response if the stored volume is * not restoring or bootstrapping . * < / li > * < li > * < b > VolumeSizeInBytes < / b > : The size of the volume in bytes . * < / li > * < li > * < b > VolumeStatus < / b > : One of the < code > VolumeStatus < / code > values that indicates the state of the volume . * < / li > * < li > * < b > VolumeType < / b > : One of the enumeration values describing the type of the volume . Currently , on STORED volumes * are supported . * < / li > * < / ul > * @ return Describes a single unit of output from < a > DescribeStorediSCSIVolumes < / a > . The following fields are * returned : < / p > * < ul > * < li > * < b > ChapEnabled < / b > : Indicates whether mutual CHAP is enabled for the iSCSI target . * < / li > * < li > * < b > LunNumber < / b > : The logical disk number . * < / li > * < li > * < b > NetworkInterfaceId < / b > : The network interface ID of the stored volume that initiator use to map the * stored volume as an iSCSI target . * < / li > * < li > * < b > NetworkInterfacePort < / b > : The port used to communicate with iSCSI targets . * < / li > * < li > * < b > PreservedExistingData < / b > : Indicates if when the stored volume was created , existing data on the * underlying local disk was preserved . * < / li > * < li > * < b > SourceSnapshotId < / b > : If the stored volume was created from a snapshot , this field contains the * snapshot ID used , e . g . snap - 1122aabb . Otherwise , this field is not included . * < / li > * < li > * < b > StorediSCSIVolumes < / b > : An array of StorediSCSIVolume objects where each object contains metadata * about one stored volume . * < / li > * < li > * < b > TargetARN < / b > : The Amazon Resource Name ( ARN ) of the volume target . * < / li > * < li > * < b > VolumeARN < / b > : The Amazon Resource Name ( ARN ) of the stored volume . * < / li > * < li > * < b > VolumeDiskId < / b > : The disk ID of the local disk that was specified in the * < a > CreateStorediSCSIVolume < / a > operation . * < / li > * < li > * < b > VolumeId < / b > : The unique identifier of the storage volume , e . g . vol - 1122AABB . * < / li > * < li > * < b > VolumeiSCSIAttributes < / b > : An < a > VolumeiSCSIAttributes < / a > object that represents a collection of * iSCSI attributes for one stored volume . * < / li > * < li > * < b > VolumeProgress < / b > : Represents the percentage complete if the volume is restoring or bootstrapping * that represents the percent of data transferred . This field does not appear in the response if the stored * volume is not restoring or bootstrapping . * < / li > * < li > * < b > VolumeSizeInBytes < / b > : The size of the volume in bytes . * < / li > * < li > * < b > VolumeStatus < / b > : One of the < code > VolumeStatus < / code > values that indicates the state of the volume . * < / li > * < li > * < b > VolumeType < / b > : One of the enumeration values describing the type of the volume . Currently , on STORED * volumes are supported . * < / li > */ public java . util . List < StorediSCSIVolume > getStorediSCSIVolumes ( ) { } }
if ( storediSCSIVolumes == null ) { storediSCSIVolumes = new com . amazonaws . internal . SdkInternalList < StorediSCSIVolume > ( ) ; } return storediSCSIVolumes ;
public class MemoryFileManager { /** * Returns a class loader for loading plug - ins from the given location . For * example , to load annotation processors , a compiler will request a class * loader for the { @ link * StandardLocation # ANNOTATION _ PROCESSOR _ PATH * ANNOTATION _ PROCESSOR _ PATH } location . * @ param location a location * @ return a class loader for the given location ; or { @ code null } * if loading plug - ins from the given location is disabled or if * the location is not known * @ throws SecurityException if a class loader can not be created * in the current security context * @ throws IllegalStateException if { @ link # close } has been called * and this file manager cannot be reopened */ @ Override public ClassLoader getClassLoader ( JavaFileManager . Location location ) { } }
proc . debug ( DBG_FMGR , "getClassLoader: location\n" , location ) ; return stdFileManager . getClassLoader ( location ) ;
public class ProcessTree { /** * Sends terminate signal to all the process belonging to the passed process * group , allowing the group to gracefully exit . * @ param pgrpId process group id */ public static void terminateProcessGroup ( String pgrpId ) { } }
ShellCommandExecutor shexec = null ; try { String [ ] args = { "kill" , "--" , "-" + pgrpId } ; shexec = new ShellCommandExecutor ( args ) ; shexec . execute ( ) ; } catch ( IOException ioe ) { LOG . warn ( "Error executing shell command " + ioe ) ; } finally { LOG . info ( "Killing all processes in the process group " + pgrpId + " with SIGTERM. Exit code " + shexec . getExitCode ( ) ) ; }
public class Request { /** * 构建请求体 * @ throws IOException IOException */ protected void generateBody ( ) throws IOException { } }
// FIXME if ( body != null && body . getContent ( ) != null ) { // 重置请求头中的Content - Type conn . setRequestProperty ( "Content-Type" , "text/plain" ) ; // LOG . info ( String . format ( " - - 将content数据 [ % s ] 写入outPut中 " , // body . getContent ( ) ) ) ; conn . getOutputStream ( ) . write ( body . getContent ( ) . getBytes ( ) ) ; conn . getOutputStream ( ) . flush ( ) ; }
public class CLIQUESubspace { /** * Depth - first search algorithm to find connected dense units in this subspace * that build a cluster . It starts with a unit , assigns it to a cluster and * finds all units it is connected to . * @ param unit the unit * @ param cluster the IDs of the feature vectors of the current cluster * @ param model the model of the cluster */ public void dfs ( CLIQUEUnit unit , ModifiableDBIDs cluster , CLIQUESubspace model ) { } }
cluster . addDBIDs ( unit . getIds ( ) ) ; unit . markAsAssigned ( ) ; model . addDenseUnit ( unit ) ; final long [ ] dims = getDimensions ( ) ; for ( int dim = BitsUtil . nextSetBit ( dims , 0 ) ; dim >= 0 ; dim = BitsUtil . nextSetBit ( dims , dim + 1 ) ) { CLIQUEUnit left = leftNeighbor ( unit , dim ) ; if ( left != null && ! left . isAssigned ( ) ) { dfs ( left , cluster , model ) ; } CLIQUEUnit right = rightNeighbor ( unit , dim ) ; if ( right != null && ! right . isAssigned ( ) ) { dfs ( right , cluster , model ) ; } }
public class TCPReceiverThread { /** * Started by main ( ) on a single thread , this code manages reading TCP requests */ @ SuppressWarnings ( "resource" ) public void run ( ) { } }
Thread . currentThread ( ) . setPriority ( Thread . MAX_PRIORITY ) ; ServerSocketChannel errsock = null ; boolean saw_error = false ; while ( true ) { try { // Cleanup from any prior socket failures . Rare unless we ' re really sick . if ( errsock != null ) { // One time attempt a socket close final ServerSocketChannel tmp2 = errsock ; errsock = null ; tmp2 . close ( ) ; // Could throw , but errsock cleared for next pass } if ( saw_error ) Thread . sleep ( 100 ) ; // prevent deny - of - service endless socket - creates saw_error = false ; // More common - case setup of a ServerSocket if ( SOCK == null ) { SOCK = ServerSocketChannel . open ( ) ; SOCK . socket ( ) . setReceiveBufferSize ( AutoBuffer . BBSIZE ) ; SOCK . socket ( ) . bind ( H2O . SELF . _key ) ; } // Block for TCP connection and setup to read from it . SocketChannel sock = SOCK . accept ( ) ; // Pass off the TCP connection to a separate reader thread new TCPReaderThread ( sock , new AutoBuffer ( sock ) ) . start ( ) ; } catch ( java . nio . channels . AsynchronousCloseException ex ) { break ; // Socket closed for shutdown } catch ( Exception e ) { // On any error from anybody , close all sockets & re - open Log . err ( "Retrying after IO error on TCP port " + H2O . H2O_PORT + ": " , e ) ; saw_error = true ; errsock = SOCK ; SOCK = null ; // Signal error recovery on the next loop } }
public class CommerceOrderItemUtil { /** * Returns the last commerce order item in the ordered set where CPInstanceId = & # 63 ; . * @ param CPInstanceId the cp instance ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce order item , or < code > null < / code > if a matching commerce order item could not be found */ public static CommerceOrderItem fetchByCPInstanceId_Last ( long CPInstanceId , OrderByComparator < CommerceOrderItem > orderByComparator ) { } }
return getPersistence ( ) . fetchByCPInstanceId_Last ( CPInstanceId , orderByComparator ) ;
public class ConfigurationPropertyName { /** * Returns { @ code true } if this element is an immediate parent of the specified name . * @ param name the name to check * @ return { @ code true } if this name is an ancestor */ public boolean isParentOf ( ConfigurationPropertyName name ) { } }
Assert . notNull ( name , "Name must not be null" ) ; if ( this . getNumberOfElements ( ) != name . getNumberOfElements ( ) - 1 ) { return false ; } return isAncestorOf ( name ) ;
public class CampaignEstimate { /** * Sets the platformEstimates value for this CampaignEstimate . * @ param platformEstimates * Traffic estimates segmented by platform for this campaign . */ public void setPlatformEstimates ( com . google . api . ads . adwords . axis . v201809 . o . PlatformCampaignEstimate [ ] platformEstimates ) { } }
this . platformEstimates = platformEstimates ;
public class GenStreamableTask { /** * Processes a { @ link Streamable } source file . */ protected void processClass ( File source ) { } }
// load up the file and determine it ' s package and classname String name = null ; try { name = GenUtil . readClassName ( source ) ; } catch ( Exception e ) { System . err . println ( "Failed to parse " + source + ": " + e . getMessage ( ) ) ; return ; } System . err . println ( "Considering " + name + "..." ) ; try { // in order for annotations to work , this task and all the classes it uses must be // loaded from the same class loader as the classes on which we are going to // introspect ; this is non - ideal but unavoidable processClass ( source , getClass ( ) . getClassLoader ( ) . loadClass ( name ) ) ; } catch ( ClassNotFoundException cnfe ) { System . err . println ( "Failed to load " + name + ".\nMissing class: " + cnfe . getMessage ( ) ) ; System . err . println ( "Be sure to set the 'classpathref' attribute to a classpath\n" + "that contains your projects invocation service classes." ) ; } catch ( Exception e ) { e . printStackTrace ( System . err ) ; }
public class BizwifiAPI { /** * Wi - Fi门店管理 - 修改门店网络信息 * @ param accessToken accessToken * @ param shopUpdate shopUpdate * @ return BaseResult */ public static BaseResult shopUpdate ( String accessToken , ShopUpdate shopUpdate ) { } }
return shopUpdate ( accessToken , JsonUtil . toJSONString ( shopUpdate ) ) ;
public class TraceeMessagePropertiesConverter { /** * Outgoing messages */ @ Override public AMQP . BasicProperties fromMessageProperties ( MessageProperties source , String charset ) { } }
final TraceeFilterConfiguration filterConfiguration = backend . getConfiguration ( profile ) ; if ( ! backend . isEmpty ( ) && filterConfiguration . shouldProcessContext ( AsyncDispatch ) ) { final Map < String , String > filteredParams = filterConfiguration . filterDeniedParams ( backend . copyToMap ( ) , AsyncDispatch ) ; source . getHeaders ( ) . put ( TPIC_HEADER , filteredParams ) ; } return super . fromMessageProperties ( source , charset ) ;
public class XmlSerializer { /** * Serializes the data stored within a java bean to XML . The bean and any * ancillary beans should include JAXB binding annotations . * @ param obj to serialize * @ return XML */ public String serialize ( T obj ) { } }
try { Marshaller marshaller = context . createMarshaller ( ) ; marshaller . setProperty ( Marshaller . JAXB_FORMATTED_OUTPUT , true ) ; StringWriter writer = new StringWriter ( ) ; marshaller . marshal ( obj , writer ) ; return writer . toString ( ) ; } catch ( JAXBException e ) { throw new XmlSerializationException ( "Exception encountered " + "serializing report: " + getErrorMsg ( e ) , e ) ; }
public class ArrayBlockingQueue { /** * Atomically removes all of the elements from this queue . * The queue will be empty after this call returns . */ public void clear ( ) { } }
final ReentrantLock lock = this . lock ; lock . lock ( ) ; try { int k = count ; if ( k > 0 ) { final Object [ ] items = this . items ; final int putIndex = this . putIndex ; int i = takeIndex ; do { items [ i ] = null ; if ( ++ i == items . length ) i = 0 ; } while ( i != putIndex ) ; takeIndex = putIndex ; count = 0 ; if ( itrs != null ) itrs . queueIsEmpty ( ) ; for ( ; k > 0 && lock . hasWaiters ( notFull ) ; k -- ) notFull . signal ( ) ; } } finally { lock . unlock ( ) ; }
public class MetricName { /** * Join the specified set of metric names . * @ param parts Multiple metric names to join using the separator . * @ return A newly created metric name which has the name of the specified * parts and includes all tags of all child metric names . */ public static MetricName join ( MetricName ... parts ) { } }
final StringBuilder nameBuilder = new StringBuilder ( ) ; final Map < String , String > tags = new HashMap < > ( ) ; boolean first = true ; for ( MetricName part : parts ) { final String name = part . getKey ( ) ; if ( name != null && ! name . isEmpty ( ) ) { if ( first ) { first = false ; } else { nameBuilder . append ( SEPARATOR ) ; } nameBuilder . append ( name ) ; } if ( ! part . getTags ( ) . isEmpty ( ) ) tags . putAll ( part . getTags ( ) ) ; } return new MetricName ( nameBuilder . toString ( ) , tags ) ;
public class MmtfStructureReader { /** * / * ( non - Javadoc ) * @ see org . rcsb . mmtf . decoder . StructureDecoderInter * face # setGroupBonds ( int , int , int ) */ @ Override public void setGroupBond ( int indOne , int indTwo , int bondOrder ) { } }
// Get the atom Atom atomOne = atomsInGroup . get ( indOne ) ; Atom atomTwo = atomsInGroup . get ( indTwo ) ; // set the new bond @ SuppressWarnings ( "unused" ) BondImpl bond = new BondImpl ( atomOne , atomTwo , bondOrder ) ;
public class Lifecycle { /** * If the location of { @ code jenkins . war } is known in this life cycle , * return it location . Otherwise return null to indicate that it is unknown . * When a non - null value is returned , Hudson will offer an upgrade UI * to a newer version . */ public File getHudsonWar ( ) { } }
String war = SystemProperties . getString ( "executable-war" ) ; if ( war != null && new File ( war ) . exists ( ) ) return new File ( war ) ; return null ;
public class TypeHandlerUtils { /** * Transfers data from String into sql . Blob * @ param conn connection for which sql . Blob object would be created * @ param value String * @ return sql . Blob from String * @ throws SQLException */ public static Object convertBlob ( Connection conn , String value ) throws SQLException { } }
return convertBlob ( conn , value . getBytes ( ) ) ;
public class BigtableConfiguration { /** * < p > connect . < / p > * @ param conf a { @ link org . apache . hadoop . conf . Configuration } object . * @ return a { @ link org . apache . hadoop . hbase . client . Connection } object . */ public static Connection connect ( Configuration conf ) { } }
Class < ? extends Connection > connectionClass = getConnectionClass ( ) ; try { return connectionClass . getConstructor ( Configuration . class ) . newInstance ( conf ) ; } catch ( Exception e ) { throw new IllegalStateException ( "Could not find an appropriate constructor for " + CONNECTION_CLASS . getCanonicalName ( ) , e ) ; }
public class CalendarEntryTypeField { /** * Convert this index to a display field . * @ param index The index to convert . * @ return The display string . */ public String convertIndexToDisStr ( int index ) { } }
if ( index == CalendarEntry . ANNIVERSARY_ID ) return ANNIVERSARY ; if ( index == CalendarEntry . APPOINTMENT_ID ) return APPOINTMENT ; return null ;
public class WShuffler { /** * { @ inheritDoc } */ @ Override public List < ? > getRequestValue ( final Request request ) { } }
if ( isPresent ( request ) ) { return getNewOptions ( request . getParameterValues ( getId ( ) ) ) ; } else { return getOptions ( ) ; }
public class RangeUtils { /** * Creates a new instance of the cassandra partitioner configured in the configuration object . * @ param config the Deep configuration object . * @ return an instance of the cassandra partitioner configured in the configuration object . */ public static IPartitioner getPartitioner ( ICassandraDeepJobConfig config ) { } }
try { return ( IPartitioner ) Class . forName ( config . getPartitionerClassName ( ) ) . newInstance ( ) ; } catch ( ClassNotFoundException | InstantiationException | IllegalAccessException e ) { throw new DeepGenericException ( e ) ; }
public class GraphUtils { /** * Removes vertices from graph * @ param graph raph to mutate * @ param vertices vertices to remove */ public static < V > void removeVertices ( DirectedGraph < V , DefaultEdge > graph , Set < V > vertices ) { } }
for ( V vertex : vertices ) { if ( graph . containsVertex ( vertex ) ) { graph . removeVertex ( vertex ) ; } }
public class Immutables { /** * Wraps a { @ link Map . Entry } } with an immutable { @ link Map . Entry } } . There is no copying involved . * @ param entry the mapping to wrap . * @ return an immutable { @ link Map . Entry } } wrapper that delegates to the original mapping . */ public static < K , V > Map . Entry < K , V > immutableEntry ( Map . Entry < K , V > entry ) { } }
return new ImmutableEntry < > ( entry ) ;
public class ArtifactResource { /** * Update an artifact download url . * This method is call via GET < grapes _ url > / artifact / < gavc > / downloadurl ? url = < targetUrl > * @ param credential DbCredential * @ param gavc String * @ param downLoadUrl String * @ return Response */ @ POST @ Path ( "/{gavc}" + ServerAPI . GET_DOWNLOAD_URL ) public Response updateDownloadUrl ( @ Auth final DbCredential credential , @ PathParam ( "gavc" ) final String gavc , @ QueryParam ( ServerAPI . URL_PARAM ) final String downLoadUrl ) { } }
if ( ! credential . getRoles ( ) . contains ( AvailableRoles . DATA_UPDATER ) ) { throw new WebApplicationException ( Response . status ( Response . Status . UNAUTHORIZED ) . build ( ) ) ; } if ( LOG . isInfoEnabled ( ) ) { LOG . info ( String . format ( "Got an update downloadUrl request [%s] [%s]" , gavc , downLoadUrl ) ) ; } if ( gavc == null || downLoadUrl == null ) { return Response . serverError ( ) . status ( HttpStatus . NOT_ACCEPTABLE_406 ) . build ( ) ; } getArtifactHandler ( ) . updateDownLoadUrl ( gavc , downLoadUrl ) ; return Response . ok ( "done" ) . build ( ) ;
public class ExecutionEnvironment { /** * Registers a file at the distributed cache under the given name . The file will be accessible * from any user - defined function in the ( distributed ) runtime under a local path . Files * may be local files ( which will be distributed via BlobServer ) , or files in a distributed file system . * The runtime will copy the files temporarily to a local cache , if needed . * < p > The { @ link org . apache . flink . api . common . functions . RuntimeContext } can be obtained inside UDFs via * { @ link org . apache . flink . api . common . functions . RichFunction # getRuntimeContext ( ) } and provides access * { @ link org . apache . flink . api . common . cache . DistributedCache } via * { @ link org . apache . flink . api . common . functions . RuntimeContext # getDistributedCache ( ) } . * @ param filePath The path of the file , as a URI ( e . g . " file : / / / some / path " or " hdfs : / / host : port / and / path " ) * @ param name The name under which the file is registered . * @ param executable flag indicating whether the file should be executable */ public void registerCachedFile ( String filePath , String name , boolean executable ) { } }
this . cacheFile . add ( new Tuple2 < > ( name , new DistributedCacheEntry ( filePath , executable ) ) ) ;
public class OptimizedGetImpl { /** * Add a new GetOperation to get . */ public void addOperation ( GetOperation o ) { } }
pcb . addCallbacks ( o ) ; for ( String k : o . getKeys ( ) ) { addKey ( k ) ; setVBucket ( k , ( ( VBucketAware ) o ) . getVBucket ( k ) ) ; }
public class JCAQuiesceListener { /** * Invoked when server is quiescing . Deactivate all endpoints . */ @ Override public void serverStopping ( ) { } }
BundleContext bundleContext = componentContext . getBundleContext ( ) ; Collection < ServiceReference < EndpointActivationService > > refs ; try { refs = bundleContext . getServiceReferences ( EndpointActivationService . class , null ) ; } catch ( InvalidSyntaxException x ) { FFDCFilter . processException ( x , getClass ( ) . getName ( ) , "61" , this ) ; throw new RuntimeException ( x ) ; } for ( ServiceReference < EndpointActivationService > ref : refs ) { EndpointActivationService eas = bundleContext . getService ( ref ) ; try { for ( ActivationParams a ; null != ( a = eas . endpointActivationParams . poll ( ) ) ; ) try { eas . endpointDeactivation ( ( ActivationSpec ) a . activationSpec , a . messageEndpointFactory ) ; } catch ( Throwable x ) { FFDCFilter . processException ( x , getClass ( ) . getName ( ) , "71" , this ) ; } } finally { bundleContext . ungetService ( ref ) ; } }
public class DefaultCloseUpAlgorithm { /** * { @ inheritDoc } */ @ Override public int getIdleFinalY ( ScrollableLayout layout , int nowY , int maxY ) { } }
final boolean shouldScrollToTop = nowY < ( maxY / 2 ) ; return shouldScrollToTop ? 0 : maxY ;