signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class RythmEngine { /** * Render template by string parameter and an array of * template args . The string parameter could be either * a path point to the template source file , or the inline * template source content . The render result is output * to the specified character based writer * < p > See { @ link # getTemplate ( java . io . File , Object . . . ) } for note on * render args < / p > * @ param w the writer * @ param template either the path of template source file or inline template content * @ param args render args array */ public void render ( Writer w , String template , Object ... args ) { } }
outputMode . set ( OutputMode . writer ) ; try { ITemplate t = getTemplate ( template , args ) ; t . render ( w ) ; } finally { renderCleanUp ( ) ; }
public class BsonReader { /** * 判断对象是否存在下一个属性或者数组是否存在下一个元素 * @ param startPosition 起始位置 * @ param contentLength 内容大小 , 不确定的传 - 1 * @ return 是否存在 */ @ Override public boolean hasNext ( int startPosition , int contentLength ) { } }
byte b = readByte ( ) ; if ( b == SIGN_HASNEXT ) return true ; if ( b != SIGN_NONEXT ) throw new ConvertException ( "hasNext option must be (" + ( SIGN_HASNEXT ) + " or " + ( SIGN_NONEXT ) + ") but '" + b + "' at position(" + this . position + ")" ) ; return false ;
public class RingBufferSeqnoLockless { /** * Adds a new element to the buffer * @ param seqno The seqno of the element * @ param element The element * @ param block If true , add ( ) will block when the buffer is full until there is space . Else , add ( ) will * return immediately , either successfully or unsuccessfully ( if the buffer is full ) * @ return True if the element was added , false otherwise . */ public boolean add ( long seqno , T element , boolean block ) { } }
validate ( seqno ) ; if ( seqno <= hd ) // seqno already delivered , includes check seqno < = low return false ; if ( seqno - low > capacity ( ) && ( ! block || ! block ( seqno ) ) ) // seqno too big return false ; // now we can set any slow > hd and yet not overwriting low ( check # 1 above ) int index = index ( seqno ) ; // Fix for correctness check # 1 ( see doc / design / RingBuffer . txt ) if ( buf . get ( index ) != null || seqno <= hd ) return false ; if ( ! buf . compareAndSet ( index , null , element ) ) // the element at buf [ index ] was already present return false ; // now see if hr needs to moved forward , this can be concurrent as we may have multiple producers for ( ; ; ) { long current_hr = hr . get ( ) ; long new_hr = Math . max ( seqno , current_hr ) ; if ( new_hr <= current_hr || hr . compareAndSet ( current_hr , new_hr ) ) break ; } return true ;
public class SmartsheetImpl { /** * Returns the { @ link SearchResources } instance that provides access to searching resources . * @ return the search resources */ public SearchResources searchResources ( ) { } }
if ( search . get ( ) == null ) { search . compareAndSet ( null , new SearchResourcesImpl ( this ) ) ; } return search . get ( ) ;
public class HttpRequestMessageImpl { /** * Set the scheme value to the given input . * @ param scheme * @ throws UnsupportedSchemeException */ @ Override public void setScheme ( byte [ ] scheme ) throws UnsupportedSchemeException { } }
SchemeValues val = SchemeValues . find ( scheme , 0 , scheme . length ) ; if ( null == val ) { throw new UnsupportedSchemeException ( "Illegal scheme " + GenericUtils . getEnglishString ( scheme ) ) ; } setScheme ( val ) ;
public class Logging { /** * < pre > * Logging configurations for sending logs to the consumer project . * There can be multiple consumer destinations , each one must have a * different monitored resource type . A log can be used in at most * one consumer destination . * < / pre > * < code > repeated . google . api . Logging . LoggingDestination consumer _ destinations = 2 ; < / code > */ public java . util . List < com . google . api . Logging . LoggingDestination > getConsumerDestinationsList ( ) { } }
return consumerDestinations_ ;
public class RecoveryDirectorImpl { /** * Invoked by a client service to indicate that initial recovery processing for the * unit of recovery , identified by FailureScope has been attempted but failed . The * client service supplies its RecoveryAgent reference to identify itself . * Invoking this method on the local failure scope will result in the server being * termianted ( by the HA framework ) * @ param recoveryAgent The client services RecoveryAgent instance . * @ param failureScope The unit of recovery that is failed . * @ exception InvalidFailureScope The supplied FailureScope was not recognized as * outstanding unit of recovery for the client * service . */ @ Override public void initialRecoveryFailed ( RecoveryAgent recoveryAgent , FailureScope failureScope ) throws InvalidFailureScopeException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "initialRecoveryFailed" , new Object [ ] { recoveryAgent , failureScope , this } ) ; final boolean removed = removeRecoveryRecord ( recoveryAgent , failureScope ) ; if ( ! removed ) { if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "The supplied FailureScope was not recognized as outstaning work for this RecoveryAgent" ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "initialRecoveryFailed" , "InvalidFailureScopeException" ) ; throw new InvalidFailureScopeException ( null ) ; } // Once recovery processing has been completed , we need to examine the results and take appropriate // action . if ( ! recoveryOutstanding ( failureScope ) ) { // Drive the failure callback . if ( _registeredCallbacks != null ) { driveCallBacks ( CALLBACK_RECOVERYFAILED , failureScope ) ; } // Ensure the failure map is clean for this failure scope . synchronized ( _initFailedFailureScopes ) { _initFailedFailureScopes . remove ( failureScope ) ; } if ( Configuration . localFailureScope ( ) . equals ( failureScope ) ) { // This is the local failure scope . Cause server termination Configuration . getRecoveryLogComponent ( ) . localRecoveryFailed ( ) ; } else { // The is a peer failure scope . Terminate and de - activate to try and allow another member // of the cluster to recover . try { directTermination ( failureScope ) ; } catch ( Exception exc ) { } Configuration . getRecoveryLogComponent ( ) . deactivateGroup ( failureScope , 60 ) ; } } else { // Record this failure so as to ensure correct processing later . synchronized ( _initFailedFailureScopes ) { _initFailedFailureScopes . add ( failureScope ) ; } // Tell other services about this failure so they can take any required action . final int failedClientId = recoveryAgent . clientIdentifier ( ) ; // Extract the ' values ' collection from the _ registeredRecoveryAgents map and create an iterator // from it . This iterator will return ArrayList objects each containing a set of RecoveryAgent // objects . Each ArrayList corrisponds to a different sequence priority value . final Collection registeredRecoveryAgentsValues = _registeredRecoveryAgents . values ( ) ; final Iterator registeredRecoveryAgentsValuesIterator = registeredRecoveryAgentsValues . iterator ( ) ; while ( registeredRecoveryAgentsValuesIterator . hasNext ( ) ) { // Extract the next ArrayList and create an iterator from it . This iterator will return RecoveryAgent // objects that are registered at the same sequence priority value . final ArrayList registeredRecoveryAgentsArray = ( java . util . ArrayList ) registeredRecoveryAgentsValuesIterator . next ( ) ; final Iterator registeredRecoveryAgentsArrayIterator = registeredRecoveryAgentsArray . iterator ( ) ; while ( registeredRecoveryAgentsArrayIterator . hasNext ( ) ) { // Extract the next RecoveryAgent object final RecoveryAgent informRecoveryAgent = ( RecoveryAgent ) registeredRecoveryAgentsArrayIterator . next ( ) ; if ( informRecoveryAgent . clientIdentifier ( ) != failedClientId ) { informRecoveryAgent . agentReportedFailure ( failedClientId , failureScope ) ; } } } } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "initialRecoveryFailed" ) ;
public class Base { /** * Executes a raw query and returns an instance of { @ link RowProcessor } . Use it in the following pattern : * < pre > * Base . find ( " select first _ name , last _ name from really _ large _ table " ) . with ( new RowListenerAdapter ( ) { * public void onNext ( Map row ) { * / write your code here * Object o1 = row . get ( " first _ name " ) ; * Object o2 = row . get ( " last _ name " ) ; * < / pre > * @ param query raw SQL . * @ param params list of parameters if query is parametrized . * @ return instance of < code > RowProcessor < / code > which has with ( ) method for convenience . */ public static RowProcessor find ( String query , Object ... params ) { } }
return new DB ( DB . DEFAULT_NAME ) . find ( query , params ) ;
public class Interpreter { /** * Execute one or more expressions . This is slightly more complex than for * the single expression case because of the potential to encounter * " positional operands " . That is , severals which arise from executing the * same expression . * @ param expressions * @ param frame * @ return */ private RValue [ ] executeExpressions ( Tuple < Expr > expressions , CallStack frame ) { } }
RValue [ ] [ ] results = new RValue [ expressions . size ( ) ] [ ] ; int count = 0 ; for ( int i = 0 ; i != expressions . size ( ) ; ++ i ) { results [ i ] = executeMultiReturnExpression ( expressions . get ( i ) , frame ) ; count += results [ i ] . length ; } RValue [ ] rs = new RValue [ count ] ; int j = 0 ; for ( int i = 0 ; i != expressions . size ( ) ; ++ i ) { Object [ ] r = results [ i ] ; System . arraycopy ( r , 0 , rs , j , r . length ) ; j += r . length ; } return rs ;
public class ObjectUtil { /** * Beetl 本地方法调用即调用此类 , 需要考虑到多态 。 beetl自动调用第一个匹配到的函数 , 而不会像java那样 , 调用最合适的匹配到的函数 * @ param o 对象实例 * @ param methodName 方法名 * @ param paras 方法参数 * @ return * @ throws IllegalAccessException * @ throws IllegalArgumentException * @ throws InvocationTargetException */ private static Object invoke ( Class target , Object o , String methodName , Object [ ] paras ) throws IllegalAccessException , IllegalArgumentException , InvocationTargetException { } }
ObjectInfo info = getObjectInfo ( target ) ; Class [ ] parameterType = new Class [ paras . length ] ; int i = 0 ; for ( Object para : paras ) { parameterType [ i ++ ] = para == null ? null : para . getClass ( ) ; } ObjectMethodMatchConf mf = findMethod ( target , methodName , parameterType ) ; if ( mf == null ) { throw new BeetlParserException ( BeetlParserException . NATIVE_CALL_INVALID , "根据参数未找到匹配的方法" + methodName + BeetlUtil . getParameterDescription ( parameterType ) ) ; } Object result = invoke ( o , mf , paras ) ; return result ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcPlaneAngleMeasure ( ) { } }
if ( ifcPlaneAngleMeasureEClass == null ) { ifcPlaneAngleMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 851 ) ; } return ifcPlaneAngleMeasureEClass ;
public class AOStream { /** * Helper method called by the AOStream when a persistent tick representing a persistently locked * message should be removed since the message has been accepted . This method will also consume the * message * @ param t the transaction * @ param stream the stream making this call * @ param storedTick the persistent tick * @ throws Exception */ public final void consumeAcceptedTick ( TransactionCommon t , AOValue storedTick ) throws Exception { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "consumeAcceptedTick" , storedTick ) ; try { SIMPMessage msg = consumerDispatcher . getMessageByValue ( storedTick ) ; Transaction msTran = mp . resolveAndEnlistMsgStoreTransaction ( t ) ; // PK67067 We may not find a message in the store for this tick , because // it may have been removed using the SIBQueuePoint MBean if ( msg != null ) { msg . remove ( msTran , storedTick . getPLockId ( ) ) ; } storedTick . lockItemIfAvailable ( controlItemLockID ) ; // should always be successful storedTick . remove ( msTran , controlItemLockID ) ; } catch ( Exception e ) { // No FFDC code needed if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "consumeAcceptedTick" , e ) ; throw e ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "consumeAcceptedTick" ) ;
public class JCRStatisticsManager { /** * Format the name of the statistics in the target format * @ param name the name of the statistics requested * @ return the formated statistics name */ static String formatName ( String name ) { } }
return name == null ? null : name . replaceAll ( " " , "" ) . replaceAll ( "[,;]" , ", " ) ;
public class U { /** * Documented , # pluck */ public static < E > List < Object > pluck ( final List < E > list , final String propertyName ) { } }
if ( list . isEmpty ( ) ) { return Collections . emptyList ( ) ; } return map ( list , new Function < E , Object > ( ) { @ Override public Object apply ( E elem ) { try { return elem . getClass ( ) . getField ( propertyName ) . get ( elem ) ; } catch ( Exception e ) { try { return elem . getClass ( ) . getMethod ( propertyName ) . invoke ( elem ) ; } catch ( Exception ex ) { throw new IllegalArgumentException ( ex ) ; } } } } ) ;
public class DependencyInjectionUtility { /** * @ param delegate * An instance of the batch artifact * @ return An array of fields containing all declared fields and visible parent fields */ private static Field [ ] getAllFields ( Object delegate ) { } }
ArrayList < Field > fields = new ArrayList < Field > ( Arrays . asList ( delegate . getClass ( ) . getDeclaredFields ( ) ) ) ; Class superClass = delegate . getClass ( ) . getSuperclass ( ) ; while ( superClass != null ) { if ( superClass != null ) { Field [ ] parentFields = superClass . getDeclaredFields ( ) ; if ( parentFields . length != 0 ) { fields . addAll ( Arrays . asList ( superClass . getDeclaredFields ( ) ) ) ; } } superClass = superClass . getSuperclass ( ) ; } return fields . toArray ( new Field [ fields . size ( ) ] ) ;
public class BrainRegionsHelper { /** * Many pre - and postprocessing needed , see the tests */ @ Deprecated ( /* Use script directly with appropriate paths */ ) public static AnalysisEngineDescription getBrainregionRules ( ) throws ResourceInitializationException { } }
return createEngineDescription ( RutaEngine . class , PARAM_MAIN_SCRIPT , "Main" , PARAM_SCRIPT_PATHS , BRAIN_REGIONS_HOME + RESOURCES_PATH + "ruta" , PARAM_RESOURCE_PATHS , LEXICON_HOME ) ;
public class StyleUtilities { /** * Creates a default { @ link Style } for a featurecollection . * @ return the default style . */ public static Style createDefaultStyle ( SimpleFeatureCollection featureCollection ) { } }
GeometryDescriptor geometryDescriptor = featureCollection . getSchema ( ) . getGeometryDescriptor ( ) ; Style style = null ; if ( EGeometryType . isPoint ( geometryDescriptor ) ) { style = createDefaultPointStyle ( ) ; } else if ( EGeometryType . isLine ( geometryDescriptor ) ) { style = createDefaultLineStyle ( ) ; } else if ( EGeometryType . isPolygon ( geometryDescriptor ) ) { style = createDefaultPolygonStyle ( ) ; } if ( style != null ) { style . setName ( featureCollection . getSchema ( ) . getTypeName ( ) ) ; } return style ;
public class XmlPrintStream { /** * Open an XML element with the given name , and attributes . A call to closeElement ( ) will output * the appropriate XML closing tag . This class remembers the tag names . * @ param name Name of the XML element to open . * @ param attributes A map of name value pairs which will be used to add attributes to * the element . */ public void openElement ( String name , Map < String , String > attributes ) { } }
elementStack . push ( name ) ; print ( "<" + name ) ; for ( Entry < String , String > entry : attributes . entrySet ( ) ) { print ( " " + entry . getKey ( ) + "=\"" + escape ( entry . getValue ( ) ) + "\"" ) ; } if ( this . isNoNl ( ) ) { print ( ">" ) ; } else { println ( ">" ) ; } indent ( ) ;
public class Logging { /** * < pre > * Logging configurations for sending logs to the consumer project . * There can be multiple consumer destinations , each one must have a * different monitored resource type . A log can be used in at most * one consumer destination . * < / pre > * < code > repeated . google . api . Logging . LoggingDestination consumer _ destinations = 2 ; < / code > */ public com . google . api . Logging . LoggingDestination getConsumerDestinations ( int index ) { } }
return consumerDestinations_ . get ( index ) ;
public class Utility { /** * Parse a single non - whitespace character ' ch ' , optionally * preceded by whitespace . * @ param id the string to be parsed * @ param pos INPUT - OUTPUT parameter . On input , pos [ 0 ] is the * offset of the first character to be parsed . On output , pos [ 0] * is the index after the last parsed character . If the parse * fails , pos [ 0 ] will be unchanged . * @ param ch the non - whitespace character to be parsed . * @ return true if ' ch ' is seen preceded by zero or more * whitespace characters . */ public static boolean parseChar ( String id , int [ ] pos , char ch ) { } }
int start = pos [ 0 ] ; pos [ 0 ] = PatternProps . skipWhiteSpace ( id , pos [ 0 ] ) ; if ( pos [ 0 ] == id . length ( ) || id . charAt ( pos [ 0 ] ) != ch ) { pos [ 0 ] = start ; return false ; } ++ pos [ 0 ] ; return true ;
public class Matrix4d { /** * / * ( non - Javadoc ) * @ see org . joml . Matrix4dc # frustumRayDir ( double , double , org . joml . Vector3d ) */ public Vector3d frustumRayDir ( double x , double y , Vector3d dest ) { } }
/* * This method works by first obtaining the frustum plane normals , * then building the cross product to obtain the corner rays , * and finally bilinearly interpolating to obtain the desired direction . * The code below uses a condense form of doing all this making use * of some mathematical identities to simplify the overall expression . */ double a = m10 * m23 , b = m13 * m21 , c = m10 * m21 , d = m11 * m23 , e = m13 * m20 , f = m11 * m20 ; double g = m03 * m20 , h = m01 * m23 , i = m01 * m20 , j = m03 * m21 , k = m00 * m23 , l = m00 * m21 ; double m = m00 * m13 , n = m03 * m11 , o = m00 * m11 , p = m01 * m13 , q = m03 * m10 , r = m01 * m10 ; double m1x , m1y , m1z ; m1x = ( d + e + f - a - b - c ) * ( 1.0 - y ) + ( a - b - c + d - e + f ) * y ; m1y = ( j + k + l - g - h - i ) * ( 1.0 - y ) + ( g - h - i + j - k + l ) * y ; m1z = ( p + q + r - m - n - o ) * ( 1.0 - y ) + ( m - n - o + p - q + r ) * y ; double m2x , m2y , m2z ; m2x = ( b - c - d + e + f - a ) * ( 1.0 - y ) + ( a + b - c - d - e + f ) * y ; m2y = ( h - i - j + k + l - g ) * ( 1.0 - y ) + ( g + h - i - j - k + l ) * y ; m2z = ( n - o - p + q + r - m ) * ( 1.0 - y ) + ( m + n - o - p - q + r ) * y ; dest . x = m1x * ( 1.0 - x ) + m2x * x ; dest . y = m1y * ( 1.0 - x ) + m2y * x ; dest . z = m1z * ( 1.0 - x ) + m2z * x ; return dest . normalize ( dest ) ;
public class GumbelDistribution { /** * PDF of Gumbel distribution * @ param x Value * @ param mu Mode * @ param beta Shape * @ return PDF at position x . */ public static double pdf ( double x , double mu , double beta ) { } }
final double z = ( x - mu ) / beta ; if ( x == Double . NEGATIVE_INFINITY ) { return 0. ; } return FastMath . exp ( - z - FastMath . exp ( - z ) ) / beta ;
public class MethodInvocation { /** * Invokes the { @ link Method } on the given target { @ link Object } . * @ param < T > { @ link Class } type of the { @ link Method } return value . * @ param target { @ link Object } on which the { @ link Method } will be invoked . * @ return the result of the { @ link Method } invocation on the given target { @ link Object } * wrapped in a { @ link Optional } to guard against { @ literal null } . * @ throws MethodInvocationException if an error occurs during the invocation of the { @ link Method } * on the target { @ link Object } . * @ see java . lang . reflect . Method # invoke ( Object , Object . . . ) * @ see java . util . Optional * @ see # resolveTarget ( Object ) * @ see # getArguments ( ) * @ see # getMethod ( ) */ @ SuppressWarnings ( "unchecked" ) public < T > Optional < T > invoke ( Object target ) { } }
Object resolvedTarget = resolveTarget ( target ) ; Method method = getMethod ( ) ; try { return Optional . ofNullable ( ( T ) method . invoke ( resolvedTarget , getArguments ( ) ) ) ; } catch ( IllegalAccessException | InvocationTargetException e ) { throw new MethodInvocationException ( String . format ( "Failed to invoke method [%1$s] on target object [%2$s]" , method . getName ( ) , resolvedTarget ) , e ) ; }
public class WebSockets { /** * Sends a complete text message , invoking the callback when complete * Automatically frees the pooled byte buffer when done . * @ param pooledData The data to send , it will be freed when done * @ param wsChannel The web socket channel */ public static void sendTextBlocking ( final PooledByteBuffer pooledData , final WebSocketChannel wsChannel ) throws IOException { } }
sendBlockingInternal ( pooledData , WebSocketFrameType . TEXT , wsChannel ) ;
public class DataSet { protected static void checkSameExecutionContext ( DataSet < ? > set1 , DataSet < ? > set2 ) { } }
if ( set1 . getExecutionEnvironment ( ) != set2 . getExecutionEnvironment ( ) ) { throw new IllegalArgumentException ( "The two inputs have different execution contexts." ) ; }
public class VsmMain { /** * { @ inheritDoc } */ protected Properties setupProperties ( ) { } }
// use the System properties in case the user specified them as // - Dprop = < val > to the JVM directly . Properties props = System . getProperties ( ) ; if ( argOptions . hasOption ( "transform" ) ) { props . setProperty ( VectorSpaceModel . MATRIX_TRANSFORM_PROPERTY , argOptions . getStringOption ( "transform" ) ) ; } return props ;
public class PcsUtils { /** * Generate a random String * @ param values The characters list to use in the randomization * @ param len The number of characters in the output String * @ return The randomized String */ public static String randomString ( char [ ] values , int len ) { } }
Random rnd = new SecureRandom ( ) ; StringBuilder sb = new StringBuilder ( len ) ; for ( int i = 0 ; i < len ; i ++ ) { sb . append ( values [ rnd . nextInt ( values . length ) ] ) ; } return sb . toString ( ) ;
public class SpaceCharacters { /** * Write a number of whitespaces to a StringBuffer * @ param num * @ param out */ public static void indent ( int num , StringBuilder out ) { } }
if ( num <= SIXTY_FOUR ) { out . append ( SIXTY_FOUR_SPACES , 0 , num ) ; return ; } else if ( num <= 128 ) { // avoid initializing loop counters if only one iteration out . append ( SIXTY_FOUR_SPACES , 0 , SIXTY_FOUR ) ; out . append ( SIXTY_FOUR_SPACES , 0 , num - SIXTY_FOUR ) ; } else { int times = num / SIXTY_FOUR ; int rem = num % SIXTY_FOUR ; for ( int i = 0 ; i < times ; i ++ ) { out . append ( SIXTY_FOUR_SPACES , 0 , SIXTY_FOUR ) ; } out . append ( SIXTY_FOUR_SPACES , 0 , rem ) ; return ; }
public class IntuitResponseDeserializer { /** * Method to deserialize the BatchItemResponse object * @ param jsonNode the json node * @ return BatchItemResponse the batch item response */ private BatchItemResponse getBatchItemResponse ( JsonNode jsonNode ) throws IOException { } }
ObjectMapper mapper = new ObjectMapper ( ) ; SimpleModule simpleModule = new SimpleModule ( "BatchItemResponseDeserializer" , new Version ( 1 , 0 , 0 , null ) ) ; simpleModule . addDeserializer ( BatchItemResponse . class , new BatchItemResponseDeserializer ( ) ) ; mapper . registerModule ( simpleModule ) ; mapper . configure ( DeserializationFeature . FAIL_ON_UNKNOWN_PROPERTIES , false ) ; return mapper . treeToValue ( jsonNode , BatchItemResponse . class ) ;
public class LombokNode { /** * Reparses the AST node represented by this node . Any existing nodes that occupy a different space in the AST are rehomed , any * nodes that no longer exist are removed , and new nodes are created . * Careful - the node you call this on must not itself have been removed or rehomed - it rebuilds < i > all children < / i > . */ public void rebuild ( ) { } }
Map < N , L > oldNodes = new IdentityHashMap < N , L > ( ) ; gatherAndRemoveChildren ( oldNodes ) ; L newNode = getAst ( ) . buildTree ( get ( ) , kind ) ; getAst ( ) . setChanged ( ) ; getAst ( ) . replaceNewWithExistingOld ( oldNodes , newNode ) ;
public class SpotifyApi { /** * Get the top tracks of an artist in a specific country . * @ param id The Spotify ID of the artist . * @ param country The ISO 3166-1 alpha - 2 country code of the specific country . * @ return A { @ link GetArtistsTopTracksRequest . Builder } . * @ see < a href = " https : / / developer . spotify . com / web - api / user - guide / # spotify - uris - and - ids " > Spotify : URLs & amp ; IDs < / a > * @ see < a href = " https : / / en . wikipedia . org / wiki / ISO _ 3166-1 _ alpha - 2 " > Wikipedia : ISO 3166-1 alpha - 2 country codes < / a > */ public GetArtistsTopTracksRequest . Builder getArtistsTopTracks ( String id , CountryCode country ) { } }
return new GetArtistsTopTracksRequest . Builder ( accessToken ) . setDefaults ( httpManager , scheme , host , port ) . id ( id ) . country ( country ) ;
public class XmlTransformer { /** * Transforms the source XML to the result . * If no stylesheet was specified , the result is * identical with the source . * @ param is * the stream which the source XML is read from . * @ param os * the stream which the transformed result is write to . * @ throws XmlException * if the transformation fails . */ public void transform ( final InputStream is , final OutputStream os ) { } }
transform ( new StreamSource ( is ) , new StreamResult ( os ) ) ;
public class TaskIOMetricGroup { /** * Initialize Buffer Metrics for a task . */ public void initializeBufferMetrics ( Task task ) { } }
final MetricGroup buffers = addGroup ( "buffers" ) ; buffers . gauge ( "inputQueueLength" , new InputBuffersGauge ( task ) ) ; buffers . gauge ( "outputQueueLength" , new OutputBuffersGauge ( task ) ) ; buffers . gauge ( "inPoolUsage" , new InputBufferPoolUsageGauge ( task ) ) ; buffers . gauge ( "outPoolUsage" , new OutputBufferPoolUsageGauge ( task ) ) ;
public class HostServerGroupTracker { /** * Only call with monitor for ' this ' held */ private void map ( Resource root ) { } }
for ( Resource . ResourceEntry serverGroup : root . getChildren ( SERVER_GROUP ) ) { String serverGroupName = serverGroup . getName ( ) ; ModelNode serverGroupModel = serverGroup . getModel ( ) ; String profile = serverGroupModel . require ( PROFILE ) . asString ( ) ; store ( serverGroupName , profile , profilesToGroups ) ; String socketBindingGroup = serverGroupModel . require ( SOCKET_BINDING_GROUP ) . asString ( ) ; store ( serverGroupName , socketBindingGroup , socketsToGroups ) ; for ( Resource . ResourceEntry deployment : serverGroup . getChildren ( DEPLOYMENT ) ) { store ( serverGroupName , deployment . getName ( ) , deploymentsToGroups ) ; } for ( Resource . ResourceEntry overlay : serverGroup . getChildren ( DEPLOYMENT_OVERLAY ) ) { store ( serverGroupName , overlay . getName ( ) , overlaysToGroups ) ; } } for ( Resource . ResourceEntry host : root . getChildren ( HOST ) ) { String hostName = host . getPathElement ( ) . getValue ( ) ; for ( Resource . ResourceEntry serverConfig : host . getChildren ( SERVER_CONFIG ) ) { ModelNode serverConfigModel = serverConfig . getModel ( ) ; String serverGroupName = serverConfigModel . require ( GROUP ) . asString ( ) ; store ( serverGroupName , hostName , hostsToGroups ) ; } }
public class OracleNamedParameterJdbcTemplate { /** * { @ inheritDoc } */ @ Override protected PreparedStatementCreator getPreparedStatementCreator ( String sql , SqlParameterSource parameterSource ) { } }
return new NamedPreparedStatementCreator ( sql , parameterSource ) ;
public class ProviderConfig { /** * add server . * @ param server ServerConfig * @ return the ProviderConfig */ public ProviderConfig < T > setServer ( ServerConfig server ) { } }
if ( this . server == null ) { this . server = new ArrayList < ServerConfig > ( ) ; } this . server . add ( server ) ; return this ;
public class MisplacedClassProcessorFactory { /** * Creates a MisplacedClassProcessor according for the given strategy name . * @ param name The case - insensitive user - level strategy name ( see the STRATEGY _ * constants ) . * @ return The MisplacedClassProcessor corresponding to the strategy name , or the result of * getDefaultProcessor ( ) if name is null . * @ throws IllegalArgumentException if an unrecognized non - null strategy name is specified . */ public MisplacedClassProcessor getProcessorForName ( String name ) { } }
if ( name == null ) { return getDefaultProcessor ( ) ; } switch ( Strategy . valueOf ( name . toUpperCase ( ) ) ) { case FATAL : return new FatalMisplacedClassProcessor ( ) ; case MOVE : return new MoveMisplacedClassProcessor ( ) ; case OMIT : return new OmitMisplacedClassProcessor ( ) ; case SKIP : return new SkipMisplacedClassProcessor ( ) ; } throw new IllegalArgumentException ( "Unrecognized strategy name \"" + name + "\"." ) ;
public class IpcLogEntry { /** * Set the client zone for the request . In the case of the server side this will be * automatically filled in if the { @ link NetflixHeader # Zone } is specified on the client * request . */ public IpcLogEntry withClientZone ( String zone ) { } }
this . clientZone = zone ; if ( clientRegion == null ) { clientRegion = extractRegionFromZone ( zone ) ; } return this ;
public class JsonPath { /** * Creates a new JsonPath and applies it to the provided Json object * @ param jsonInputStream json input stream * @ param jsonPath the json path * @ param filters filters to be applied to the filter place holders [ ? ] in the path * @ param < T > expected return type * @ return list of objects matched by the given path */ @ SuppressWarnings ( { } }
"unchecked" } ) public static < T > T read ( InputStream jsonInputStream , String jsonPath , Predicate ... filters ) throws IOException { return new ParseContextImpl ( ) . parse ( jsonInputStream ) . read ( jsonPath , filters ) ;
public class IRDImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . IRD__IMDATA : setIMdata ( ( byte [ ] ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class ConfiguredInstantiator { /** * This instantiates a KryoInstantiator by : * 1 ) checking if it has a constructor that takes Config * 2 ) checking for a no - arg constructor */ static KryoInstantiator reflect ( Class < ? extends KryoInstantiator > instClass , Config optConf ) throws ConfigurationException { } }
try { try { return instClass . getConstructor ( Config . class ) . newInstance ( optConf ) ; } catch ( NoSuchMethodException ex3 ) { return instClass . newInstance ( ) ; } } catch ( InstantiationException x ) { throw new ConfigurationException ( x ) ; } catch ( IllegalAccessException x ) { throw new ConfigurationException ( x ) ; } catch ( InvocationTargetException x ) { throw new ConfigurationException ( x ) ; }
public class EnhancerHelper { /** * Creates a new dynamoDBUpdateObject enhancer for the given class . If the class already implements * { @ link UpdateObject } , then a special " no - op " enhancer will be returned that * doesn ' t do any special enhancement . Otherwise , a byte - code enhancer is returned . * @ param baseClass class for which to create an enhancer * @ return new enhancer */ @ SuppressWarnings ( { } }
"unchecked" } ) public static < T > Enhancer < T > getUpdateObjectEnhancer ( Class < T > baseClass ) { if ( updateObjectEnhancers . containsKey ( baseClass ) ) { return ( Enhancer < T > ) updateObjectEnhancers . get ( baseClass ) ; } synchronized ( updateObjectEnhancers ) { Enhancer < T > enhancer = updateObjectEnhancers . get ( baseClass ) ; if ( enhancer != null ) { return enhancer ; } if ( UpdateObject . class . isAssignableFrom ( baseClass ) ) { enhancer = new NoOpEnhancer < T > ( baseClass ) ; } else { enhancer = new UpdateObjectVelocityEnhancer < T > ( baseClass ) { // Implementation - Enhancer @ Override public boolean needsEnhancement ( Object object ) { return object != null && ! ( object instanceof UpdateObject ) ; } // Implementation - VelocityEnhancer @ Override protected String getTemplateLocation ( ) { return "org/iternine/jeppetto/dao/enhance/updateObject.vm" ; } @ Override protected Map < String , Object > getAdditionalContextItems ( ) { Map < String , Object > contextItems = super . getAdditionalContextItems ( ) ; contextItems . put ( "updateObjectHelper" , new DynamoDBUpdateObjectHelper ( ) ) ; return contextItems ; } } ; } updateObjectEnhancers . put ( baseClass , enhancer ) ; return enhancer ; }
public class ByteArrayUtil { /** * Read an unsigned integer . * @ param buffer Buffer to read from * @ return Integer value */ public static int readUnsignedVarint ( ByteBuffer buffer ) throws IOException { } }
int val = 0 ; int bits = 0 ; while ( true ) { final int data = buffer . get ( ) ; val |= ( data & 0x7F ) << bits ; if ( ( data & 0x80 ) == 0 ) { return val ; } bits += 7 ; if ( bits > 35 ) { throw new IOException ( "Variable length quantity is too long for expected integer." ) ; } }
public class ServerBaseImpl { /** * { @ inheritDoc } */ @ Override public void run ( ) { } }
if ( ! isRegistered ( ) ) { try { register ( ) ; } catch ( IOException ex ) { throw new RuntimeException ( "Failed to register prefix, aborting." , ex ) ; } } // continuously serve packets while ( true ) { try { synchronized ( face ) { face . processEvents ( ) ; } } catch ( IOException ex ) { logger . log ( Level . SEVERE , "Failed to process events." , ex ) ; } catch ( EncodingException ex ) { logger . log ( Level . SEVERE , "Failed to parse bytes." , ex ) ; } }
public class PmiRegistry { /** * Initialize : module configs , module aggregates */ public static synchronized void init ( ) { } }
if ( initialized ) return ; if ( tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "init" ) ; } initialized = true ; // defaultLevel = StatConstants . STATISTIC _ SET _ EXTENDED ; defaultLevel = StatConstants . STATISTIC_SET_BASIC ; moduleRoot = new ModuleItem ( ) ; setInstrumentationLevel ( defaultLevel ) ; // disabled = false ; try { MBeanServer mServer = ManagementFactory . getPlatformMBeanServer ( ) ; ObjectName pmiMBean = new ObjectName ( PmiConstants . MBEAN_NAME ) ; mServer . registerMBean ( PmiCollaboratorFactory . getPmiCollaborator ( ) , pmiMBean ) ; printAllMBeans ( ) ; } catch ( Exception e ) { Tr . error ( tc , "Unable to create Perf MBean." ) ; FFDCFilter . processException ( e , "com.ibm.ws.pmi.server.PmiRegistry" , "Init" ) ; } if ( tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "init" ) ; }
public class KernelResolverRepository { /** * Get a feature by name , but without going and checking the remote repository if we don ' t know about it * @ see # getFeature ( String ) */ private ProvisioningFeatureDefinition getCachedFeature ( String featureName ) { } }
List < ProvisioningFeatureDefinition > featureList = symbolicNameToFeature . get ( featureName ) ; if ( featureList == null ) { featureName = publicNameToSymbolicName . get ( featureName . toLowerCase ( ) ) ; if ( featureName != null ) { featureList = symbolicNameToFeature . get ( featureName ) ; } } if ( featureList == null || featureList . isEmpty ( ) ) { return null ; } return getPreferredVersion ( featureName , featureList ) ;
public class OXInstantMessagingManager { /** * Add an OX - IM message element to a message . * @ param message message * @ param contact recipient of the message * @ param payload payload which will be encrypted and signed * @ return { @ link OpenPgpMetadata } about the messages encryption + metadata . * @ throws SmackException . NotLoggedInException in case we are not logged in * @ throws PGPException in case something goes wrong during encryption * @ throws IOException IO is dangerous ( we need to read keys ) */ public OpenPgpMetadata addOxMessage ( Message message , OpenPgpContact contact , List < ExtensionElement > payload ) throws SmackException . NotLoggedInException , PGPException , IOException { } }
return addOxMessage ( message , Collections . singleton ( contact ) , payload ) ;
public class AmazonDynamoDBClient { /** * Returns the current provisioned - capacity limits for your AWS account in a region , both for the region as a whole * and for any one DynamoDB table that you create there . * When you establish an AWS account , the account has initial limits on the maximum read capacity units and write * capacity units that you can provision across all of your DynamoDB tables in a given region . Also , there are * per - table limits that apply when you create a table there . For more information , see < a * href = " https : / / docs . aws . amazon . com / amazondynamodb / latest / developerguide / Limits . html " > Limits < / a > page in the * < i > Amazon DynamoDB Developer Guide < / i > . * Although you can increase these limits by filing a case at < a * href = " https : / / console . aws . amazon . com / support / home # / " > AWS Support Center < / a > , obtaining the increase is not * instantaneous . The < code > DescribeLimits < / code > action lets you write code to compare the capacity you are * currently using to those limits imposed by your account so that you have enough time to apply for an increase * before you hit a limit . * For example , you could use one of the AWS SDKs to do the following : * < ol > * < li > * Call < code > DescribeLimits < / code > for a particular region to obtain your current account limits on provisioned * capacity there . * < / li > * < li > * Create a variable to hold the aggregate read capacity units provisioned for all your tables in that region , and * one to hold the aggregate write capacity units . Zero them both . * < / li > * < li > * Call < code > ListTables < / code > to obtain a list of all your DynamoDB tables . * < / li > * < li > * For each table name listed by < code > ListTables < / code > , do the following : * < ul > * < li > * Call < code > DescribeTable < / code > with the table name . * < / li > * < li > * Use the data returned by < code > DescribeTable < / code > to add the read capacity units and write capacity units * provisioned for the table itself to your variables . * < / li > * < li > * If the table has one or more global secondary indexes ( GSIs ) , loop over these GSIs and add their provisioned * capacity values to your variables as well . * < / li > * < / ul > * < / li > * < li > * Report the account limits for that region returned by < code > DescribeLimits < / code > , along with the total current * provisioned capacity levels you have calculated . * < / li > * < / ol > * This will let you see whether you are getting close to your account - level limits . * The per - table limits apply only when you are creating a new table . They restrict the sum of the provisioned * capacity of the new table itself and all its global secondary indexes . * For existing tables and their GSIs , DynamoDB will not let you increase provisioned capacity extremely rapidly , * but the only upper limit that applies is that the aggregate provisioned capacity over all your tables and GSIs * cannot exceed either of the per - account limits . * < note > * < code > DescribeLimits < / code > should only be called periodically . You can expect throttling errors if you call it * more than once in a minute . * < / note > * The < code > DescribeLimits < / code > Request element has no content . * @ param describeLimitsRequest * Represents the input of a < code > DescribeLimits < / code > operation . Has no content . * @ return Result of the DescribeLimits operation returned by the service . * @ throws InternalServerErrorException * An error occurred on the server side . * @ sample AmazonDynamoDB . DescribeLimits * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / dynamodb - 2012-08-10 / DescribeLimits " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DescribeLimitsResult describeLimits ( DescribeLimitsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeLimits ( request ) ;
public class Matrix3d { /** * Set the values of this matrix by reading 9 double values from the given { @ link DoubleBuffer } in column - major order , * starting at its current position . * The DoubleBuffer is expected to contain the values in column - major order . * The position of the DoubleBuffer will not be changed by this method . * @ param buffer * the DoubleBuffer to read the matrix values from in column - major order * @ return this */ public Matrix3d set ( DoubleBuffer buffer ) { } }
MemUtil . INSTANCE . get ( this , buffer . position ( ) , buffer ) ; return this ;
public class CLI { /** * Main parsing evaluation entry point . * @ throws IOException * throws exception if test set not available */ private void parseval ( ) throws IOException { } }
final String lang = this . parsedArguments . getString ( "language" ) ; final String model = this . parsedArguments . getString ( "model" ) ; final String testset = this . parsedArguments . getString ( "testset" ) ; final Properties props = setParsevalProperties ( lang , model , testset ) ; final ParserEvaluate parserEvaluator = new ParserEvaluate ( props ) ; parserEvaluator . evaluate ( ) ;
public class RandomVideoCollectionGenerator { /** * Generate a VideoCollection with random data obtained form VIDEO _ INFO map . You don ' t need o * create your own AdapteeCollections . Review ListAdapteeCollection if needed . * @ param videoCount size of the collection . * @ return VideoCollection generated . */ public VideoCollection generate ( final int videoCount ) { } }
List < Video > videos = new LinkedList < Video > ( ) ; for ( int i = 0 ; i < videoCount ; i ++ ) { Video video = generateRandomVideo ( ) ; videos . add ( video ) ; } return new VideoCollection ( videos ) ;
public class NikeFS2BlockProvider { /** * Reads from the specified block . * @ param blockNumber Internal number of the block in question . * @ param blockOffset Offset , in bytes , within this block . * @ param buffer Buffer to store read data in . * @ return The number of read bytes . */ public synchronized int read ( int blockNumber , int blockOffset , byte [ ] buffer ) throws IOException { } }
return read ( blockNumber , blockOffset , buffer , 0 , buffer . length ) ;
public class SSLWriteServiceContext { /** * @ see com . ibm . wsspi . tcpchannel . TCPWriteCompletedCallback # error ( com . ibm . wsspi . channelfw . VirtualConnection , com . ibm . wsspi . tcpchannel . TCPWriteRequestContext , * java . io . IOException ) */ @ Override public void error ( VirtualConnection vc , TCPWriteRequestContext wsc , IOException ioe ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "error, vc=" + getVCHash ( ) ) ; } // Nothing else needs to be done here . Report error up the chain . this . asyncBytesToWrite = 0L ; this . callback . error ( vc , this , ioe ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "error" ) ; }
public class SeleniumActionBuilder { /** * Set input action . */ public ElementActionBuilder setInput ( String value ) { } }
SetInputAction action = new SetInputAction ( ) ; action . setValue ( value ) ; action ( action ) ; return new ElementActionBuilder ( action ) ;
public class AutoComplete { /** * TODO : Return a simple String , and have the CliShell convert empty strings to empty values ? */ public Opt < String > getAutoCompleteSuffix ( ) { } }
if ( possibilities . isEmpty ( ) ) { // There are no auto - complete possibilities . return Opt . absent ( ) ; } if ( possibilities . size ( ) > 1 ) { // Multiple auto complete results are possible . // AutoComplete as much as is possible - until the longest common prefix . final String longestPrefix = possibilities . getLongestPrefix ( ) ; return StringUtils . getNonEmptyString ( calcAutoCompleteSuffix ( longestPrefix ) ) ; } // TODO : Only 1 possibility , boundParams should be updated to show it . . . // Only a single auto complete result is possible , append it to the command line . // Let ' s be helpful - depending on the autoCompleteType , // add the suffix that each valueType must have . final Entry < String , CliValueType > entry = possibilities . entrySet ( ) . iterator ( ) . next ( ) ; final String singlePossibility = entry . getKey ( ) ; final CliValueType type = entry . getValue ( ) ; final String suffix = calcAutoCompleteSuffix ( singlePossibility ) ; return Opt . of ( suffix + type . getSuffix ( ) ) ;
public class Parser { /** * 12.2 Variable Statement */ private VariableStatementTree parseVariableStatement ( ) { } }
SourcePosition start = getTreeStartLocation ( ) ; VariableDeclarationListTree declarations = parseVariableDeclarationList ( ) ; eatPossibleImplicitSemiColon ( ) ; return new VariableStatementTree ( getTreeLocation ( start ) , declarations ) ;
public class WhileyFileParser { /** * Skip over any whitespace characters , starting from a given index and * returning the first index passed any whitespace encountered . */ private int skipWhiteSpace ( int index ) { } }
while ( index < tokens . size ( ) && isWhiteSpace ( tokens . get ( index ) ) ) { index ++ ; } return index ;
public class CRC32CDigest { /** * This method calculates the CRC with Slicing - by - 4 algorithm . * @ param data An integer array , which elements all are added to the CRC . */ protected final void slicingBy4 ( final int [ ] data ) { } }
final int n = data . length ; for ( int i = 0 ; i < n ; i ++ ) { slicingBy4 ( data [ i ] ) ; } crc ^= INT_FLAG_MASK_LONG ;
public class StringUtil { /** * Replace a string with another * @ param s string to replace into * @ param find string to be replaced * @ param replace new string * @ return the string with replacements */ public static ReplacedString replaceString ( String s , String find , String replace ) { } }
if ( replace == null ) replace = "-" ; int index = - 1 ; int l = find . length ( ) ; boolean replaced = false ; do { index = s . indexOf ( find , index ) ; if ( index >= 0 ) { replaced = true ; s = s . substring ( 0 , index ) + replace + s . substring ( index + l ) ; } } while ( index >= 0 ) ; return new ReplacedString ( s , replaced ) ;
public class RTMP { /** * Setter for last read packet . * @ param channelId * Channel id * @ param packet * Packet */ public void setLastReadPacket ( int channelId , Packet packet ) { } }
final ChannelInfo info = getChannelInfo ( channelId ) ; // grab last packet Packet prevPacket = info . getReadPacket ( ) ; // set new one info . setReadPacket ( packet ) ; // free the previous packet freePacket ( prevPacket ) ;
public class CreateVpcEndpointConnectionNotificationRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < CreateVpcEndpointConnectionNotificationRequest > getDryRunRequest ( ) { } }
Request < CreateVpcEndpointConnectionNotificationRequest > request = new CreateVpcEndpointConnectionNotificationRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class Flow { /** * Create a new Flow . * @ return Flow . */ public static Flow create ( ) { } }
return new Flow ( ID_GENERATOR . updateAndGet ( i -> i == Integer . MAX_VALUE ? 0 : i + 1 ) , 0 ) ;
public class IOUtilities { /** * Transfers a byte [ ] to the output stream of a URLConnection * @ param c Connection to transfer output * @ param bytes the bytes to send * @ throws IOException */ public static void transfer ( URLConnection c , byte [ ] bytes ) throws IOException { } }
try ( OutputStream out = new BufferedOutputStream ( c . getOutputStream ( ) ) ) { out . write ( bytes ) ; }
public class Shell { /** * Return true if the parsed input ends up being empty ( < em > e . g . < / em > hitting ENTER on an * empty line or blank space ) . * Also returns true ( < em > i . e . < / em > ask to ignore ) when input starts with { @ literal / / } , * which is used for comments . */ private boolean noInput ( Input input ) { } }
return input . words ( ) . isEmpty ( ) || ( input . words ( ) . size ( ) == 1 && input . words ( ) . get ( 0 ) . trim ( ) . isEmpty ( ) ) || ( input . words ( ) . iterator ( ) . next ( ) . matches ( "\\s*//.*" ) ) ;
public class VisualizationTree { /** * Add a listener . * @ param listener Listener to add */ public void addVisualizationListener ( VisualizationListener listener ) { } }
for ( int i = 0 ; i < vlistenerList . size ( ) ; i ++ ) { if ( vlistenerList . get ( i ) == listener ) { return ; } } vlistenerList . add ( listener ) ;
public class RmiJournalReceiver { /** * Request to write text to the current journal file . Check that : * < ul > * < li > a file is open , < / li > * < li > the supplied indexedHash matches the one we calculate , < / li > * < li > the write is successful . < / li > * < / ul > * Increment the itemIndex after a successful write . */ public void writeText ( String indexedHash , String text ) throws JournalException { } }
if ( journalFile == null ) { throw logAndGetException ( "Attempting to write when no file " + "is open." ) ; } String calculatedHash = RmiJournalReceiverHelper . figureIndexedHash ( currentRepositoryHash , itemIndex ) ; if ( ! calculatedHash . equals ( indexedHash ) ) { logger . debug ( "calculatedHash='" + calculatedHash + "', providedHash='" + indexedHash + "'" ) ; throw logAndGetException ( "indexed hash is incorrect." ) ; } try { writer . append ( text ) ; writer . flush ( ) ; } catch ( IOException e ) { throw logAndGetException ( "Failed to write to '" + journalFile . getName ( ) + "'" , e ) ; } logger . debug ( "Wrote item #" + itemIndex + " to file '" + journalFile . getName ( ) + "'" ) ; itemIndex ++ ;
public class DeploymentOperationsInner { /** * Gets a deployments operation . * @ param resourceGroupName The name of the resource group . The name is case insensitive . * @ param deploymentName The name of the deployment . * @ param operationId The ID of the operation to get . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the DeploymentOperationInner object */ public Observable < DeploymentOperationInner > getAsync ( String resourceGroupName , String deploymentName , String operationId ) { } }
return getWithServiceResponseAsync ( resourceGroupName , deploymentName , operationId ) . map ( new Func1 < ServiceResponse < DeploymentOperationInner > , DeploymentOperationInner > ( ) { @ Override public DeploymentOperationInner call ( ServiceResponse < DeploymentOperationInner > response ) { return response . body ( ) ; } } ) ;
public class KuraCloudConsumer { /** * CloudClientListener callbacks */ @ Override public void onControlMessageArrived ( String deviceId , String appTopic , KuraPayload msg , int qos , boolean retain ) { } }
onInternalMessageArrived ( deviceId , appTopic , msg , qos , retain , true ) ;
public class SGraphSegment { /** * Set the user data at the given index . * @ param index is the index of the data . * @ param data is the data */ public void setUserDataAt ( int index , Object data ) { } }
if ( this . userData == null ) { throw new IndexOutOfBoundsException ( ) ; } this . userData . set ( index , data ) ;
public class task_device_log { /** * Use this operation to get task log for each device . */ public static task_device_log get ( nitro_service client , task_device_log resource ) throws Exception { } }
resource . validate ( "get" ) ; return ( ( task_device_log [ ] ) resource . get_resources ( client ) ) [ 0 ] ;
public class Instructions { /** * Utility method to merge instructions from the given file into the given set of instructions . The instructions * from the given file override the existing instructions ( when both contain the same instruction ) * @ param properties the current instructions * @ param extra the file * @ return the new set of instructions * @ throws IOException if the file cannot be read */ public static Properties merge ( Properties properties , File extra ) throws IOException { } }
Properties props = load ( extra ) ; return mergeAndOverrideExisting ( properties , props ) ;
public class DiscordApiImpl { /** * Gets a map with all registered listeners that implement one or more { @ code ObjectAttachableListener } s and their * assigned listener classes they listen to . * @ param objectClass The class of the object . * @ param objectId The id of the object . * @ param < T > The type of the listeners . * @ return A map with all registered listeners that implement one or more { @ code ObjectAttachableListener } s and * their assigned listener classes they listen to . */ @ SuppressWarnings ( "unchecked" ) public < T extends ObjectAttachableListener > Map < T , List < Class < T > > > getObjectListeners ( Class < ? > objectClass , long objectId ) { } }
return Collections . unmodifiableMap ( Optional . ofNullable ( objectClass ) . map ( objectListeners :: get ) . map ( objectListener -> objectListener . get ( objectId ) ) . map ( Map :: entrySet ) . map ( Set :: stream ) . map ( entryStream -> entryStream . flatMap ( entry -> entry . getValue ( ) . keySet ( ) . stream ( ) . map ( listener -> new SimpleEntry < > ( ( T ) listener , ( Class < T > ) entry . getKey ( ) ) ) ) ) . map ( entryStream -> entryStream . collect ( Collectors . groupingBy ( Entry :: getKey , Collectors . mapping ( Entry :: getValue , Collectors . toList ( ) ) ) ) ) . orElseGet ( HashMap :: new ) ) ;
public class WSubordinateControlRenderer { /** * Paint a standard action - where a single item or single group is targeted . * @ param action the action to paint * @ param elementName the enclosing element name ( " ui : onFalse " or " ui : onTrue " ) . * @ param xml the output response */ private void paintStandardAction ( final Action action , final String elementName , final XmlStringBuilder xml ) { } }
xml . appendTagOpen ( elementName ) ; xml . appendAttribute ( "action" , getActionTypeName ( action . getActionType ( ) ) ) ; xml . appendClose ( ) ; xml . appendTagOpen ( "ui:target" ) ; SubordinateTarget target = action . getTarget ( ) ; if ( target instanceof WComponentGroup < ? > ) { xml . appendAttribute ( "groupId" , target . getId ( ) ) ; } else { xml . appendAttribute ( "id" , target . getId ( ) ) ; } xml . appendEnd ( ) ; xml . appendEndTag ( elementName ) ;
public class CommerceAccountUserRelPersistenceImpl { /** * Removes all the commerce account user rels where commerceAccountId = & # 63 ; from the database . * @ param commerceAccountId the commerce account ID */ @ Override public void removeByCommerceAccountId ( long commerceAccountId ) { } }
for ( CommerceAccountUserRel commerceAccountUserRel : findByCommerceAccountId ( commerceAccountId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( commerceAccountUserRel ) ; }
public class PathUtils { /** * returns the absolute path or the local path is the clone git project * which is the url parameter * @ param rootLocation * @ param url * @ param project * @ param rootLocation * @ return */ public static String getAbsoluteProjectPath ( Project project , String rootLocation ) { } }
StringBuilder retval = new StringBuilder ( rootLocation ) ; PathUtils . addEndingSlashIfNeeded ( retval ) ; retval . append ( GitHelper . extractRepositoryNameFromUrl ( String . valueOf ( project . getUri ( ) ) ) ) ; PathUtils . addEndingSlashIfNeeded ( retval ) ; // construction du chemin local vers le clone return retval . toString ( ) ;
public class Base64 { /** * Encodes a byte array into Base64 notation . * Example options : * < pre > * GZIP : gzip - compresses object before encoding it . * DO _ BREAK _ LINES : break lines at 76 characters * < i > Note : Technically , this makes your encoding non - compliant . < / i > * < / pre > * Example : < code > encodeBytes ( myData , Base64 . GZIP ) < / code > or * Example : < code > encodeBytes ( myData , Base64 . GZIP | Base64 . DO _ BREAK _ LINES ) < / code > * As of v 2.3 , if there is an error with the GZIP stream , the method will throw an java . io . IOException . < b > This is new to * v2.3 ! < / b > In earlier versions , it just returned a null value , but in retrospect that ' s a pretty poor way to handle it . * @ param source The data to convert * @ param options Specified options * @ return The Base64 - encoded data as a String * @ see Base64 # GZIP * @ see Base64 # DO _ BREAK _ LINES * @ throws java . io . IOException if there is an error * @ throws NullPointerException if source array is null * @ since 2.0 */ public static String encodeBytes ( byte [ ] source , int options ) throws java . io . IOException { } }
return encodeBytes ( source , 0 , source . length , options ) ;
public class StreamPersistedValueData { /** * { @ inheritDoc } */ @ Override public long getLength ( ) { } }
if ( file != null ) { return PrivilegedFileHelper . length ( file ) ; } else if ( tempFile != null ) { return PrivilegedFileHelper . length ( tempFile ) ; } else if ( stream instanceof FileInputStream ) { try { return ( ( FileInputStream ) stream ) . getChannel ( ) . size ( ) ; } catch ( IOException e ) { return - 1 ; } } else if ( url != null ) { try { URLConnection connection = url . openConnection ( ) ; return connection . getContentLength ( ) ; } catch ( IOException e ) { return - 1 ; } } else { try { return stream . available ( ) ; } catch ( IOException e ) { return - 1 ; } }
public class AccountsInner { /** * Create Cognitive Services Account . Accounts is a resource group wide resource type . It holds the keys for developer to access intelligent APIs . It ' s also the resource type for billing . * @ param resourceGroupName The name of the resource group within the user ' s subscription . * @ param accountName The name of Cognitive Services account . * @ param parameters The parameters to provide for the created account . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < CognitiveServicesAccountInner > createAsync ( String resourceGroupName , String accountName , CognitiveServicesAccountCreateParameters parameters , final ServiceCallback < CognitiveServicesAccountInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( createWithServiceResponseAsync ( resourceGroupName , accountName , parameters ) , serviceCallback ) ;
public class StopStreamEncryptionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( StopStreamEncryptionRequest stopStreamEncryptionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( stopStreamEncryptionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( stopStreamEncryptionRequest . getStreamName ( ) , STREAMNAME_BINDING ) ; protocolMarshaller . marshall ( stopStreamEncryptionRequest . getEncryptionType ( ) , ENCRYPTIONTYPE_BINDING ) ; protocolMarshaller . marshall ( stopStreamEncryptionRequest . getKeyId ( ) , KEYID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PasswordUtil { /** * Return the algorithm tag of the provided string . * For example , if the password is { xor } CDo9Hgw = , " { xor } " will be returned . * @ param password the encoded string with encoding algorithm . * @ return The encoding algorithm with algorithm tags . Null if not present . */ public static String getCryptoAlgorithmTag ( String password ) { } }
if ( null == password ) { return null ; } String tag = null ; String data = password . trim ( ) ; if ( data . length ( ) >= 2 ) { if ( '{' == data . charAt ( 0 ) ) { int end = data . indexOf ( '}' , 1 ) ; if ( end > 0 ) { end ++ ; // we want to include the end marker if ( end == data . length ( ) ) { tag = data ; } else { tag = data . substring ( 0 , end ) . trim ( ) ; } } } } return tag ;
public class PostgresDdlParser { /** * { @ inheritDoc } * @ see org . modeshape . sequencer . ddl . StandardDdlParser # parseGrantPrivileges ( org . modeshape . sequencer . ddl . DdlTokenStream , * java . util . List ) */ @ Override protected void parseGrantPrivileges ( DdlTokenStream tokens , List < AstNode > privileges ) throws ParsingException { } }
// privilege - types // ALL PRIVILEGES | privilege - list // privilege - list // table - privilege { , table - privilege } * // table - privilege // SELECT [ < left paren > < privilege column list > < right paren > ] // | DELETE // | INSERT [ < left paren > < privilege column list > < right paren > ] // | UPDATE [ < left paren > < privilege column list > < right paren > ] // | REFERENCES [ < left paren > < privilege column list > < right paren > ] // | USAGE // | TRIGGER // | TRUNCATE // | CREATE // | CONNECT // | TEMPORARY // | TEMP // | EXECUTE // POSTGRES has the following Privileges : // GRANT { { SELECT | INSERT | UPDATE | DELETE | TRUNCATE | REFERENCES | TRIGGER } do { AstNode node = null ; if ( tokens . canConsume ( DELETE ) ) { node = nodeFactory ( ) . node ( "privilege" ) ; node . setProperty ( TYPE , DELETE ) ; } else if ( tokens . canConsume ( INSERT ) ) { node = nodeFactory ( ) . node ( "privilege" ) ; node . setProperty ( TYPE , INSERT ) ; parseColumnNameList ( tokens , node , TYPE_COLUMN_REFERENCE ) ; } else if ( tokens . canConsume ( "REFERENCES" ) ) { node = nodeFactory ( ) . node ( "privilege" ) ; node . setProperty ( TYPE , "REFERENCES" ) ; parseColumnNameList ( tokens , node , TYPE_COLUMN_REFERENCE ) ; } else if ( tokens . canConsume ( SELECT ) ) { node = nodeFactory ( ) . node ( "privilege" ) ; node . setProperty ( TYPE , SELECT ) ; // Could have columns here // GRANT SELECT ( col1 ) , UPDATE ( col1 ) ON mytable TO miriam _ rw ; // Let ' s just swallow the column data . consumeParenBoundedTokens ( tokens , true ) ; } else if ( tokens . canConsume ( "USAGE" ) ) { node = nodeFactory ( ) . node ( "privilege" ) ; node . setProperty ( TYPE , "USAGE" ) ; } else if ( tokens . canConsume ( UPDATE ) ) { node = nodeFactory ( ) . node ( "privilege" ) ; node . setProperty ( TYPE , UPDATE ) ; parseColumnNameList ( tokens , node , TYPE_COLUMN_REFERENCE ) ; } else if ( tokens . canConsume ( "TRIGGER" ) ) { node = nodeFactory ( ) . node ( "privilege" ) ; node . setProperty ( TYPE , "TRIGGER" ) ; } else if ( tokens . canConsume ( "TRUNCATE" ) ) { node = nodeFactory ( ) . node ( "privilege" ) ; node . setProperty ( TYPE , "TRUNCATE" ) ; } else if ( tokens . canConsume ( "CREATE" ) ) { node = nodeFactory ( ) . node ( "privilege" ) ; node . setProperty ( TYPE , "CREATE" ) ; } else if ( tokens . canConsume ( "CONNECT" ) ) { node = nodeFactory ( ) . node ( "privilege" ) ; node . setProperty ( TYPE , "CONNECT" ) ; } else if ( tokens . canConsume ( "TEMPORARY" ) ) { node = nodeFactory ( ) . node ( "privilege" ) ; node . setProperty ( TYPE , "TEMPORARY" ) ; } else if ( tokens . canConsume ( "TEMP" ) ) { node = nodeFactory ( ) . node ( "privilege" ) ; node . setProperty ( TYPE , "TEMP" ) ; } else if ( tokens . canConsume ( "EXECUTE" ) ) { node = nodeFactory ( ) . node ( "privilege" ) ; node . setProperty ( TYPE , "EXECUTE" ) ; } if ( node == null ) { break ; } nodeFactory ( ) . setType ( node , GRANT_PRIVILEGE ) ; privileges . add ( node ) ; } while ( tokens . canConsume ( COMMA ) ) ;
public class DefaultRoleManager { /** * hasLink determines whether role : name1 inherits role : name2. * domain is a prefix to the roles . */ @ Override public boolean hasLink ( String name1 , String name2 , String ... domain ) { } }
if ( domain . length == 1 ) { name1 = domain [ 0 ] + "::" + name1 ; name2 = domain [ 0 ] + "::" + name2 ; } else if ( domain . length > 1 ) { throw new Error ( "error: domain should be 1 parameter" ) ; } if ( name1 . equals ( name2 ) ) { return true ; } if ( ! hasRole ( name1 ) || ! hasRole ( name2 ) ) { return false ; } Role role1 = createRole ( name1 ) ; return role1 . hasRole ( name2 , maxHierarchyLevel ) ;
public class XPathAPI { /** * Use an XPath string to select a nodelist . * XPath namespace prefixes are resolved from the contextNode . * @ param contextNode The node to start searching from . * @ param str A valid XPath string . * @ return A NodeIterator , should never be null . * @ throws TransformerException */ public static NodeList selectNodeList ( Node contextNode , String str ) throws TransformerException { } }
return selectNodeList ( contextNode , str , contextNode ) ;
public class DataModelFactory { /** * Generates an artifact regarding the parameters . * < P > < b > WARNING : < / b > The parameters grId / arId / version should be filled ! ! ! Only classifier and type are not mandatory . * @ param groupId String * @ param artifactId String * @ param version String * @ param classifier String * @ param type String * @ param extension String * @ return Artifact */ public static Artifact createArtifact ( final String groupId , final String artifactId , final String version , final String classifier , final String type , final String extension , final String origin ) { } }
final Artifact artifact = new Artifact ( ) ; artifact . setGroupId ( groupId ) ; artifact . setArtifactId ( artifactId ) ; artifact . setVersion ( version ) ; if ( classifier != null ) { artifact . setClassifier ( classifier ) ; } if ( type != null ) { artifact . setType ( type ) ; } if ( extension != null ) { artifact . setExtension ( extension ) ; } artifact . setOrigin ( origin == null ? "maven" : origin ) ; return artifact ;
public class MtasSolrCollectionCache { /** * Delete by id . * @ param id the id */ public void deleteById ( String id ) { } }
if ( idToVersion . containsKey ( id ) ) { String version = idToVersion . remove ( id ) ; expirationVersion . remove ( version ) ; versionToItem . remove ( version ) ; if ( collectionCachePath != null && ! collectionCachePath . resolve ( version ) . toFile ( ) . delete ( ) ) { log . debug ( "couldn't delete " + version ) ; } }
public class TermsHistogramResult { /** * Extract from and to fields from the built query to determine * histogram boundaries . */ @ Nullable public AbsoluteRange getHistogramBoundaries ( ) { } }
if ( boundaries == null ) { boundaries = Tools . extractHistogramBoundaries ( getBuiltQuery ( ) ) . orElse ( null ) ; } return boundaries ;
public class Models { /** * Saves the json object to the given file */ public static < T > List < T > loadJsonValues ( File json , Class < T > clazz ) throws IOException { } }
List < T > answer = new ArrayList < > ( ) ; if ( json . exists ( ) && json . isFile ( ) ) { MappingIterator < T > iter = objectMapper . readerFor ( clazz ) . readValues ( json ) ; while ( iter . hasNext ( ) ) { answer . add ( iter . next ( ) ) ; } } return answer ;
public class GetReservationCoverageResult { /** * The amount of time that your reservations covered . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setCoveragesByTime ( java . util . Collection ) } or { @ link # withCoveragesByTime ( java . util . Collection ) } if you * want to override the existing values . * @ param coveragesByTime * The amount of time that your reservations covered . * @ return Returns a reference to this object so that method calls can be chained together . */ public GetReservationCoverageResult withCoveragesByTime ( CoverageByTime ... coveragesByTime ) { } }
if ( this . coveragesByTime == null ) { setCoveragesByTime ( new java . util . ArrayList < CoverageByTime > ( coveragesByTime . length ) ) ; } for ( CoverageByTime ele : coveragesByTime ) { this . coveragesByTime . add ( ele ) ; } return this ;
public class CertificatesInner { /** * Gets a list of integration account certificates . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; IntegrationAccountCertificateInner & gt ; object */ public Observable < Page < IntegrationAccountCertificateInner > > listByIntegrationAccountsNextAsync ( final String nextPageLink ) { } }
return listByIntegrationAccountsNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < IntegrationAccountCertificateInner > > , Page < IntegrationAccountCertificateInner > > ( ) { @ Override public Page < IntegrationAccountCertificateInner > call ( ServiceResponse < Page < IntegrationAccountCertificateInner > > response ) { return response . body ( ) ; } } ) ;
public class PackageManagerUtils { /** * Checks if the device has a barometer sensor . * @ param context the context . * @ return { @ code true } if the device has a barometer sensor . */ @ TargetApi ( Build . VERSION_CODES . GINGERBREAD ) public static boolean hasBarometerSensorFeature ( Context context ) { } }
return hasBarometerSensorFeature ( context . getPackageManager ( ) ) ;
public class AppServiceEnvironmentsInner { /** * Create or update a worker pool . * Create or update a worker pool . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of the App Service Environment . * @ param workerPoolName Name of the worker pool . * @ param workerPoolEnvelope Properties of the worker pool . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < WorkerPoolResourceInner > createOrUpdateWorkerPoolAsync ( String resourceGroupName , String name , String workerPoolName , WorkerPoolResourceInner workerPoolEnvelope , final ServiceCallback < WorkerPoolResourceInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( createOrUpdateWorkerPoolWithServiceResponseAsync ( resourceGroupName , name , workerPoolName , workerPoolEnvelope ) , serviceCallback ) ;
public class AbstractHttpFileBuilder { /** * Sets the function which generates the entity tag that ' s used for setting the { @ code " etag " } header * automatically . * @ param entityTagFunction the entity tag function that generates the entity tag , or { @ code null } * to disable setting the { @ code " etag " } header . */ public final B entityTag ( BiFunction < String , HttpFileAttributes , String > entityTagFunction ) { } }
this . entityTagFunction = requireNonNull ( entityTagFunction , "entityTagFunction" ) ; return self ( ) ;
public class DoCopy { /** * copies the specified resource ( s ) to the specified destination . preconditions must be handled by the caller . Standard status * codes must be handled by the caller . a multi status report in case of errors is created here . * @ param transaction indicates that the method is within the scope of a WebDAV transaction * @ param sourcePath path from where to read * @ param destinationPath path where to write * @ param errorList * @ param req HttpServletRequest * @ param resp HttpServletResponse * @ throws WebdavException if an error in the underlying store occurs * @ throws IOException */ private void copy ( ITransaction transaction , String sourcePath , String destinationPath , Hashtable < String , Integer > errorList , HttpServletRequest req , HttpServletResponse resp ) throws WebdavException , IOException { } }
StoredObject sourceSo = store . getStoredObject ( transaction , sourcePath ) ; if ( sourceSo == null ) { resp . setStatus ( WebdavStatus . SC_NOT_FOUND ) ; return ; } if ( sourceSo . isResource ( ) ) { store . createResource ( transaction , destinationPath ) ; long resourceLength = store . setResourceContent ( transaction , destinationPath , store . getResourceContent ( transaction , sourcePath ) , null , null ) ; if ( resourceLength != - 1 ) { StoredObject destinationSo = store . getStoredObject ( transaction , destinationPath ) ; destinationSo . setResourceLength ( resourceLength ) ; } } else { if ( sourceSo . isFolder ( ) ) { copyFolder ( transaction , sourcePath , destinationPath , errorList , req , resp ) ; } else { resp . sendError ( WebdavStatus . SC_NOT_FOUND ) ; } }
public class PairtreeFactory { /** * Creates a Pairtree using the supplied bucket and AWS credentials . * @ param aBucket An S3 bucket * @ param aAccessKey An AWS access key * @ param aSecretKey An AWS secret key * @ return A Pairtree */ public Pairtree getPairtree ( final String aBucket , final String aAccessKey , final String aSecretKey ) { } }
return new S3Pairtree ( myVertx , aBucket , aAccessKey , aSecretKey ) ;
public class SynthesisTaskMarshaller { /** * Marshall the given parameter object . */ public void marshall ( SynthesisTask synthesisTask , ProtocolMarshaller protocolMarshaller ) { } }
if ( synthesisTask == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( synthesisTask . getTaskId ( ) , TASKID_BINDING ) ; protocolMarshaller . marshall ( synthesisTask . getTaskStatus ( ) , TASKSTATUS_BINDING ) ; protocolMarshaller . marshall ( synthesisTask . getTaskStatusReason ( ) , TASKSTATUSREASON_BINDING ) ; protocolMarshaller . marshall ( synthesisTask . getOutputUri ( ) , OUTPUTURI_BINDING ) ; protocolMarshaller . marshall ( synthesisTask . getCreationTime ( ) , CREATIONTIME_BINDING ) ; protocolMarshaller . marshall ( synthesisTask . getRequestCharacters ( ) , REQUESTCHARACTERS_BINDING ) ; protocolMarshaller . marshall ( synthesisTask . getSnsTopicArn ( ) , SNSTOPICARN_BINDING ) ; protocolMarshaller . marshall ( synthesisTask . getLexiconNames ( ) , LEXICONNAMES_BINDING ) ; protocolMarshaller . marshall ( synthesisTask . getOutputFormat ( ) , OUTPUTFORMAT_BINDING ) ; protocolMarshaller . marshall ( synthesisTask . getSampleRate ( ) , SAMPLERATE_BINDING ) ; protocolMarshaller . marshall ( synthesisTask . getSpeechMarkTypes ( ) , SPEECHMARKTYPES_BINDING ) ; protocolMarshaller . marshall ( synthesisTask . getTextType ( ) , TEXTTYPE_BINDING ) ; protocolMarshaller . marshall ( synthesisTask . getVoiceId ( ) , VOICEID_BINDING ) ; protocolMarshaller . marshall ( synthesisTask . getLanguageCode ( ) , LANGUAGECODE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JvmFormalParameterImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case TypesPackage . JVM_FORMAL_PARAMETER__NAME : setName ( ( String ) newValue ) ; return ; case TypesPackage . JVM_FORMAL_PARAMETER__PARAMETER_TYPE : setParameterType ( ( JvmTypeReference ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class JobEnvironmentCache { /** * 创建缓存JobBean实例的工厂 * @ param jarFilePath Jar包本地路径 * @ param isSpring 是否spring环境 * @ return 创建好的JobBean工厂 * @ throws Exception 当出现未检查的异常时抛出 */ protected JobBeanFactory createJobBeanFactory ( String jarFilePath , boolean isSpring ) throws Exception { } }
String jobBeanFactoryClassName ; if ( isSpring ) { jobBeanFactoryClassName = "com.zuoxiaolong.niubi.job.spring.bean.SpringJobBeanFactory" ; } else { jobBeanFactoryClassName = "com.zuoxiaolong.niubi.job.scheduler.bean.DefaultJobBeanFactory" ; } ClassLoader jarApplicationClassLoader = ApplicationClassLoaderFactory . getJarApplicationClassLoader ( jarFilePath ) ; Class < ? extends JobBeanFactory > jobBeanFactoryClass = ( Class < ? extends JobBeanFactory > ) jarApplicationClassLoader . loadClass ( jobBeanFactoryClassName ) ; Class < ? > [ ] parameterTypes = new Class [ ] { ClassLoader . class } ; Constructor < ? extends JobBeanFactory > jobBeanFactoryConstructor = jobBeanFactoryClass . getConstructor ( parameterTypes ) ; return jobBeanFactoryConstructor . newInstance ( jarApplicationClassLoader ) ;
public class BatchDetectDominantLanguageResult { /** * A list containing one object for each document that contained an error . The results are sorted in ascending order * by the < code > Index < / code > field and match the order of the documents in the input list . If there are no errors in * the batch , the < code > ErrorList < / code > is empty . * @ param errorList * A list containing one object for each document that contained an error . The results are sorted in * ascending order by the < code > Index < / code > field and match the order of the documents in the input list . If * there are no errors in the batch , the < code > ErrorList < / code > is empty . */ public void setErrorList ( java . util . Collection < BatchItemError > errorList ) { } }
if ( errorList == null ) { this . errorList = null ; return ; } this . errorList = new java . util . ArrayList < BatchItemError > ( errorList ) ;
public class AtlasClient { /** * Register the given type ( meta model ) * @ param typeDef type definition * @ return result json object * @ throws AtlasServiceException */ public List < String > updateType ( TypesDef typeDef ) throws AtlasServiceException { } }
return updateType ( TypesSerialization . toJson ( typeDef ) ) ;
public class DescribeDhcpOptionsResult { /** * Information about one or more DHCP options sets . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setDhcpOptions ( java . util . Collection ) } or { @ link # withDhcpOptions ( java . util . Collection ) } if you want to * override the existing values . * @ param dhcpOptions * Information about one or more DHCP options sets . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeDhcpOptionsResult withDhcpOptions ( DhcpOptions ... dhcpOptions ) { } }
if ( this . dhcpOptions == null ) { setDhcpOptions ( new com . amazonaws . internal . SdkInternalList < DhcpOptions > ( dhcpOptions . length ) ) ; } for ( DhcpOptions ele : dhcpOptions ) { this . dhcpOptions . add ( ele ) ; } return this ;