signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ExtraLanguageFeatureNameConverter { /** * Convert a full call to a feature . * < p > This function is supposed to change the two list parameters for reflecting the conversion . * @ param simpleName the simple name of the feature to be called . * @ param calledFeature the called feature . * @ param leftOperand the description of the elements into the left operand ( usually , before assignment sign ) . * @ param receiver the description of the receiver , i . e . the object on which the feature is called . * @ param arguments the list of the arguments . * @ return a description of the conversion ; or { @ code null } for ignoring the call . */ public ConversionResult convertFeatureCall ( String simpleName , JvmIdentifiableElement calledFeature , List < Object > leftOperand , List < Object > receiver , List < XExpression > arguments ) { } }
if ( this . conversions == null ) { this . conversions = initMapping ( ) ; } final List < Pair < FeaturePattern , FeatureReplacement > > struct = this . conversions . get ( getKey ( simpleName ) ) ; if ( struct != null ) { final String replacementId = calledFeature . getIdentifier ( ) ; final FeatureReplacement replacement = matchFirstPattern ( struct , replacementId , simpleName , receiver ) ; if ( replacement != null ) { if ( replacement . hasReplacement ( ) ) { return replacement . replace ( calledFeature , leftOperand , receiver , arguments ) ; } return null ; } } return new ConversionResult ( simpleName ) ;
public class CloseableReference { /** * Close ( or free ) the reference * @ throws IOException if any exception occurs */ @ Override public void close ( ) throws IOException { } }
try { if ( reference != null ) { closer . close ( reference ) ; } } catch ( Exception e ) { throw new IOException ( e ) ; } finally { this . wasClosed = true ; }
public class Es6RewriteDestructuring { /** * Convert " rest " of object destructuring lhs by making a clone and deleting any properties that * were stated in the original object pattern . * < p > Nodes in statedProperties that are a stringKey will be used in a getprop when deleting . All * other types will be used in a getelem such as what is done for computed properties . * < pre > * { a , [ foo ( ) ] : b , . . . x } = rhs ; * becomes * var temp = rhs ; * var temp1 = Object . assign ( { } , temp ) ; * var temp2 = foo ( ) * a = temp . a * b = temp [ foo ( ) ] * x = ( delete temp1 . a , delete temp1 [ temp2 ] , temp1 ) ; * < / pre > * @ param rest node representing the " . . . rest " of objectPattern * @ param restTempVarName name of var containing clone of result of rhs evaluation * @ param statedProperties list of properties to delete from the clone */ private Node objectPatternRestRHS ( Node objectPattern , Node rest , String restTempVarName , ArrayList < Node > statedProperties ) { } }
checkArgument ( objectPattern . getLastChild ( ) == rest ) ; Node restTempVarModel = astFactory . createName ( restTempVarName , objectPattern . getJSType ( ) ) ; Node result = restTempVarModel . cloneNode ( ) ; if ( ! statedProperties . isEmpty ( ) ) { Iterator < Node > propItr = statedProperties . iterator ( ) ; Node comma = deletionNodeForRestProperty ( restTempVarModel . cloneNode ( ) , propItr . next ( ) ) ; while ( propItr . hasNext ( ) ) { comma = astFactory . createComma ( comma , deletionNodeForRestProperty ( restTempVarModel . cloneNode ( ) , propItr . next ( ) ) ) ; } result = astFactory . createComma ( comma , result ) ; } result . useSourceInfoIfMissingFromForTree ( rest ) ; return result ;
public class ContentKeyPoliciesInner { /** * List Content Key Policies . * Lists the Content Key Policies in the account . * @ param resourceGroupName The name of the resource group within the Azure subscription . * @ param accountName The Media Services account name . * @ param filter Restricts the set of items returned . * @ param top Specifies a non - negative integer n that limits the number of items returned from a collection . The service returns the number of available items up to but not greater than the specified value n . * @ param orderby Specifies the the key by which the result collection should be ordered . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; ContentKeyPolicyInner & gt ; object */ public Observable < Page < ContentKeyPolicyInner > > listAsync ( final String resourceGroupName , final String accountName , final String filter , final Integer top , final String orderby ) { } }
return listWithServiceResponseAsync ( resourceGroupName , accountName , filter , top , orderby ) . map ( new Func1 < ServiceResponse < Page < ContentKeyPolicyInner > > , Page < ContentKeyPolicyInner > > ( ) { @ Override public Page < ContentKeyPolicyInner > call ( ServiceResponse < Page < ContentKeyPolicyInner > > response ) { return response . body ( ) ; } } ) ;
public class ValidationObjUtil { /** * Copy from to . * @ param from the from * @ param to the to * @ param fieldNames the field names */ public static void copyFromTo ( Object from , Object to , String ... fieldNames ) { } }
for ( String field : fieldNames ) { copyFromTo ( from , to , field ) ; }
public class YearMonthDay { /** * Converts this YearMonthDay to a full datetime at midnight using the * specified time zone . * This method uses the chronology from this instance plus the time zone * specified . * @ param zone the zone to use , null means default * @ return this date as a datetime at midnight */ public DateTime toDateTimeAtMidnight ( DateTimeZone zone ) { } }
Chronology chrono = getChronology ( ) . withZone ( zone ) ; return new DateTime ( getYear ( ) , getMonthOfYear ( ) , getDayOfMonth ( ) , 0 , 0 , 0 , 0 , chrono ) ;
public class UseEnumCollections { /** * returns whether the collection has already been reported on * @ param stackPos * the position on the opstack to check * @ return whether the collection has already been reported . */ private boolean alreadyReported ( int stackPos ) { } }
if ( stack . getStackDepth ( ) <= stackPos ) { return false ; } OpcodeStack . Item item = stack . getStackItem ( stackPos ) ; XField field = item . getXField ( ) ; if ( field == null ) { return false ; } String fieldName = field . getName ( ) ; return ! checkedFields . add ( fieldName ) ;
public class Serializer { /** * Reads a writable object . * @ param buffer The buffer from which to read the object . * @ param < T > The object type . * @ return The read object . */ @ SuppressWarnings ( "unchecked" ) private < T > T readByClass ( BufferInput < ? > buffer ) { } }
String name = buffer . readUTF8 ( ) ; if ( whitelistRequired . get ( ) ) throw new SerializationException ( "cannot deserialize unregistered type: " + name ) ; Class < T > type = ( Class < T > ) types . get ( name ) ; if ( type == null ) { try { type = ( Class < T > ) Class . forName ( name ) ; if ( type == null ) throw new SerializationException ( "cannot deserialize: unknown type" ) ; types . put ( name , type ) ; } catch ( ClassNotFoundException e ) { throw new SerializationException ( "object class not found: " + name , e ) ; } } TypeSerializer < T > serializer = getSerializer ( type ) ; if ( serializer == null ) throw new SerializationException ( "cannot deserialize unregistered type: " + name ) ; return serializer . read ( type , buffer , this ) ;
public class InternalRoute { /** * Matches / index to / index or / person / 1 to / person / { id } * @ return True if the actual route matches a raw route . False if not . */ public boolean matches ( String requestUri ) { } }
Matcher matcher = regex . matcher ( requestUri ) ; return matcher . matches ( ) ;
public class Matrix3f { /** * Set the column at the given < code > column < / code > index , starting with < code > 0 < / code > . * @ param column * the column index in < code > [ 0 . . 2 ] < / code > * @ param src * the column components to set * @ return this * @ throws IndexOutOfBoundsException if < code > column < / code > is not in < code > [ 0 . . 2 ] < / code > */ public Matrix3f setColumn ( int column , Vector3fc src ) throws IndexOutOfBoundsException { } }
return setColumn ( column , src . x ( ) , src . y ( ) , src . z ( ) ) ;
public class InstructionView { /** * Sets up the { @ link RecyclerView } that is used to display the turn lanes . */ private void initializeTurnLaneRecyclerView ( ) { } }
turnLaneAdapter = new TurnLaneAdapter ( ) ; rvTurnLanes . setAdapter ( turnLaneAdapter ) ; rvTurnLanes . setHasFixedSize ( true ) ; rvTurnLanes . setLayoutManager ( new LinearLayoutManager ( getContext ( ) , LinearLayoutManager . HORIZONTAL , false ) ) ;
public class ELParser { /** * BracketSuffix * Sub Expression Suffix */ final public void BracketSuffix ( ) throws ParseException { } }
/* @ bgen ( jjtree ) BracketSuffix */ AstBracketSuffix jjtn000 = new AstBracketSuffix ( JJTBRACKETSUFFIX ) ; boolean jjtc000 = true ; jjtree . openNodeScope ( jjtn000 ) ; try { jj_consume_token ( LBRACK ) ; Expression ( ) ; jj_consume_token ( RBRACK ) ; } catch ( Throwable jjte000 ) { if ( jjtc000 ) { jjtree . clearNodeScope ( jjtn000 ) ; jjtc000 = false ; } else { jjtree . popNode ( ) ; } if ( jjte000 instanceof RuntimeException ) { { if ( true ) throw ( RuntimeException ) jjte000 ; } } if ( jjte000 instanceof ParseException ) { { if ( true ) throw ( ParseException ) jjte000 ; } } { if ( true ) throw ( Error ) jjte000 ; } } finally { if ( jjtc000 ) { jjtree . closeNodeScope ( jjtn000 , true ) ; } }
public class ChronoFormatter { /** * used by CustomizedProcessor */ ChronoFormatter < T > with ( Map < ChronoElement < ? > , Object > outerDefaults , AttributeSet outerAttrs ) { } }
AttributeSet merged = AttributeSet . merge ( outerAttrs , this . globalAttributes ) ; return new ChronoFormatter < > ( new ChronoFormatter < > ( this , outerDefaults ) , merged , merged . get ( HistoricAttribute . CALENDAR_HISTORY , null ) ) ;
public class Decoder { /** * Reads a code of given length and at given index in an array of bits */ private static int readCode ( boolean [ ] rawbits , int startIndex , int length ) { } }
int res = 0 ; for ( int i = startIndex ; i < startIndex + length ; i ++ ) { res <<= 1 ; if ( rawbits [ i ] ) { res |= 0x01 ; } } return res ;
public class HttpDispatcherChannel { /** * @ see com . ibm . wsspi . channelfw . Channel # destroy ( ) */ @ Override public void destroy ( ) throws ChannelException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Destroy channel: " + this ) ; } if ( null != this . myFactory ) { this . myFactory . removeChannel ( getName ( ) ) ; this . myFactory = null ; }
public class Graph { /** * Removes the given vertex and its edges from the graph . * @ param vertex the vertex to remove * @ return the new graph containing the existing vertices and edges without * the removed vertex and its edges */ public Graph < K , VV , EV > removeVertex ( Vertex < K , VV > vertex ) { } }
List < Vertex < K , VV > > vertexToBeRemoved = new ArrayList < > ( ) ; vertexToBeRemoved . add ( vertex ) ; return removeVertices ( vertexToBeRemoved ) ;
public class Consortium { /** * Returns for given parameter < i > _ id < / i > the instance of class * { @ link Consortium } . * @ param _ id id to search in the cache * @ return instance of class { @ link Consortium } * @ throws CacheReloadException on error * @ see # getCache */ public static Consortium get ( final long _id ) throws CacheReloadException { } }
final Cache < Long , Consortium > cache = InfinispanCache . get ( ) . < Long , Consortium > getCache ( Consortium . IDCACHE ) ; if ( ! cache . containsKey ( _id ) && ! Consortium . getConsortiumFromDB ( Consortium . SQL_ID , _id ) ) { cache . put ( _id , Consortium . NULL , 100 , TimeUnit . SECONDS ) ; } final Consortium ret = cache . get ( _id ) ; return ret . equals ( Consortium . NULL ) ? null : ret ;
public class Journal { /** * Complete the upgrade for local image storage with the given namespace . */ private void completeUpgradeImage ( NamespaceInfo nsInfo ) throws IOException { } }
Preconditions . checkState ( nsInfo . getNamespaceID ( ) != 0 , "can't upgrade with uninitialized namespace info: %s" , nsInfo . toColonSeparatedString ( ) ) ; LOG . info ( "Completing Upgrading image " + this . getJournalId ( ) + " with namespace info: (" + nsInfo . toColonSeparatedString ( ) + ")" ) ; // Do something about checkpoint image digests . imageStorage . completeUpgrade ( nsInfo ) ;
public class CmsXmlDisplayFormatterValue { /** * Returns the formatter config id . < p > * @ return the formatter config id */ public CmsUUID getFormatterId ( ) { } }
String value = getStringValue ( null ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( value ) ) { String [ ] parts = value . split ( SEPARATOR ) ; if ( parts . length == 2 ) { return new CmsUUID ( parts [ 1 ] ) ; } } return null ;
public class PercentileBuckets { /** * Returns a copy of the bucket values array . */ public static long [ ] asArray ( ) { } }
long [ ] values = new long [ BUCKET_VALUES . length ] ; System . arraycopy ( BUCKET_VALUES , 0 , values , 0 , BUCKET_VALUES . length ) ; return values ;
public class DesignDocumentManager { /** * Removes a design document from the database . * @ param id the document id ( optionally prefixed with " _ design / " ) * @ return { @ link DesignDocument } */ public Response remove ( String id ) { } }
assertNotEmpty ( id , "id" ) ; id = ensureDesignPrefix ( id ) ; String revision = null ; // Get the revision ID from ETag , removing leading and trailing " revision = client . executeRequest ( Http . HEAD ( new DatabaseURIHelper ( db . getDBUri ( ) ) . documentUri ( id ) ) ) . getConnection ( ) . getHeaderField ( "ETag" ) ; if ( revision != null ) { revision = revision . substring ( 1 , revision . length ( ) - 1 ) ; return db . remove ( id , revision ) ; } else { throw new CouchDbException ( "No ETag header found for design document with id " + id ) ; }
public class Assistant { /** * List workspaces . * List the workspaces associated with a Watson Assistant service instance . * This operation is limited to 500 requests per 30 minutes . For more information , see * * Rate limiting * * . * @ param listWorkspacesOptions the { @ link ListWorkspacesOptions } containing the options for the call * @ return a { @ link ServiceCall } with a response type of { @ link WorkspaceCollection } */ public ServiceCall < WorkspaceCollection > listWorkspaces ( ListWorkspacesOptions listWorkspacesOptions ) { } }
String [ ] pathSegments = { "v1/workspaces" } ; RequestBuilder builder = RequestBuilder . get ( RequestBuilder . constructHttpUrl ( getEndPoint ( ) , pathSegments ) ) ; builder . query ( "version" , versionDate ) ; Map < String , String > sdkHeaders = SdkCommon . getSdkHeaders ( "conversation" , "v1" , "listWorkspaces" ) ; for ( Entry < String , String > header : sdkHeaders . entrySet ( ) ) { builder . header ( header . getKey ( ) , header . getValue ( ) ) ; } builder . header ( "Accept" , "application/json" ) ; if ( listWorkspacesOptions != null ) { if ( listWorkspacesOptions . pageLimit ( ) != null ) { builder . query ( "page_limit" , String . valueOf ( listWorkspacesOptions . pageLimit ( ) ) ) ; } if ( listWorkspacesOptions . includeCount ( ) != null ) { builder . query ( "include_count" , String . valueOf ( listWorkspacesOptions . includeCount ( ) ) ) ; } if ( listWorkspacesOptions . sort ( ) != null ) { builder . query ( "sort" , listWorkspacesOptions . sort ( ) ) ; } if ( listWorkspacesOptions . cursor ( ) != null ) { builder . query ( "cursor" , listWorkspacesOptions . cursor ( ) ) ; } if ( listWorkspacesOptions . includeAudit ( ) != null ) { builder . query ( "include_audit" , String . valueOf ( listWorkspacesOptions . includeAudit ( ) ) ) ; } } return createServiceCall ( builder . build ( ) , ResponseConverterUtils . getObject ( WorkspaceCollection . class ) ) ;
public class Field { /** * Find the right converter for this field on the given operation . * @ param operation * @ return a converter */ public Converter getConverter ( String operation ) { } }
// First we check " covnerters " list Converter c = getConverters ( ) . getConverterForOperation ( operation ) ; if ( c == null ) { // if not found , we check configs for ( FieldOperationConfig config : getConfigs ( ) ) { if ( config . includes ( operation ) ) { c = getPm ( ) . findExternalConverter ( config . getEconverter ( ) ) ; break ; } } } if ( c == null ) { // If not found , we check class level converters final String _property = getProperty ( ) ; try { final String [ ] _properties = _property . split ( "[.]" ) ; Class < ? > clazz = Class . forName ( getEntity ( ) . getClazz ( ) ) ; for ( int i = 0 ; i < _properties . length - 1 ; i ++ ) { clazz = FieldUtils . getField ( clazz , _properties [ i ] , true ) . getType ( ) ; } final String className = FieldUtils . getField ( clazz , _properties [ _properties . length - 1 ] , true ) . getType ( ) . getName ( ) ; c = getPm ( ) . getClassConverters ( ) . getConverter ( operation , className ) ; } catch ( Exception ex ) { getPm ( ) . info ( String . format ( "Unable to introspect field '%s' on entity '%s'" , _property , getEntity ( ) . getId ( ) ) ) ; } } return c ;
public class TokenStream { /** * Attempt to consume this current token and the next tokens if and only if they match the expected values , and return whether * this method was indeed able to consume all of the supplied tokens . * This is < i > not < / i > the same as calling { @ link # canConsume ( String ) } for each of the supplied arguments , since this method * ensures that < i > all < / i > of the supplied values can be consumed . * This method < i > is < / i > equivalent to calling the following : * < pre > * if ( tokens . matches ( currentExpected , expectedForNextTokens ) ) { * tokens . consume ( currentExpected , expectedForNextTokens ) ; * < / pre > * The { @ link # ANY _ VALUE ANY _ VALUE } constant can be used in the expected values as a wildcard . * @ param nextTokens the expected values of the next tokens * @ return true if the current token did match and was consumed , or false if the current token did not match and therefore was * not consumed * @ throws IllegalStateException if this method was called before the stream was { @ link # start ( ) started } */ public boolean canConsume ( String [ ] nextTokens ) throws IllegalStateException { } }
if ( completed ) return false ; ListIterator < Token > iter = tokens . listIterator ( tokenIterator . previousIndex ( ) ) ; Token token = null ; for ( String nextExpected : nextTokens ) { if ( ! iter . hasNext ( ) ) return false ; token = iter . next ( ) ; if ( nextExpected == ANY_VALUE ) continue ; if ( ! token . matches ( nextExpected ) ) return false ; } this . tokenIterator = iter ; this . currentToken = tokenIterator . hasNext ( ) ? tokenIterator . next ( ) : null ; this . completed = this . currentToken == null ; return true ;
public class FnLocalDate { /** * It converts a { @ link Calendar } into a { @ link LocalDate } in the given { @ link DateTimeZone } * @ param dateTimeZone the the time zone ( { @ link DateTimeZone } ) to be used * @ return the { @ link LocalDate } created from the input and arguments */ public static final < T extends Calendar > Function < T , LocalDate > calendarToLocalDate ( DateTimeZone dateTimeZone ) { } }
return new CalendarToLocalDate < T > ( dateTimeZone ) ;
public class ChecksumFileSystem { /** * Report a checksum error to the file system . * @ param f the file name containing the error * @ param in the stream open on the file * @ param inPos the position of the beginning of the bad data in the file * @ param sums the stream open on the checksum file * @ param sumsPos the position of the beginning of the bad data in the checksum file * @ return if retry is neccessary */ public boolean reportChecksumFailure ( Path f , FSDataInputStream in , long inPos , FSDataInputStream sums , long sumsPos ) { } }
return false ;
public class KeyUtil { /** * 生成用于非对称加密的公钥和私钥 < br > * 密钥对生成算法见 : https : / / docs . oracle . com / javase / 7 / docs / technotes / guides / security / StandardNames . html # KeyPairGenerator * @ param algorithm 非对称加密算法 * @ param keySize 密钥模 ( modulus ) 长度 * @ param seed 种子 * @ param params { @ link AlgorithmParameterSpec } * @ return { @ link KeyPair } * @ since 4.3.3 */ public static KeyPair generateKeyPair ( String algorithm , int keySize , byte [ ] seed , AlgorithmParameterSpec ... params ) { } }
algorithm = getAlgorithmAfterWith ( algorithm ) ; final KeyPairGenerator keyPairGen = getKeyPairGenerator ( algorithm ) ; // 密钥模 ( modulus ) 长度初始化定义 if ( keySize > 0 ) { // key长度适配修正 if ( "EC" . equalsIgnoreCase ( algorithm ) && keySize > 256 ) { // 对于EC算法 , 密钥长度有限制 , 在此使用默认256 keySize = 256 ; } if ( null != seed ) { keyPairGen . initialize ( keySize , new SecureRandom ( seed ) ) ; } else { keyPairGen . initialize ( keySize ) ; } } // 自定义初始化参数 if ( ArrayUtil . isNotEmpty ( params ) ) { for ( AlgorithmParameterSpec param : params ) { if ( null == param ) { continue ; } try { if ( null != seed ) { keyPairGen . initialize ( param , new SecureRandom ( seed ) ) ; } else { keyPairGen . initialize ( param ) ; } } catch ( InvalidAlgorithmParameterException e ) { throw new CryptoException ( e ) ; } } } return keyPairGen . generateKeyPair ( ) ;
public class JsonParser { /** * 将Object序列化 ( 对于byte [ ] , 会将byte [ ] 用Base64编码一下 , 然后返回 , 相当于对该byte调用Base64的encode方法然后将结果返回 , 对于String * 会直接将String返回 ) * @ param obj 要序列化的对象 * @ param ignoreNull 是否忽略空元素 , 如果为true为忽略 * @ return 序列化失败将返回空字符串 */ public String toJson ( Object obj , boolean ignoreNull ) { } }
if ( obj == null ) { return null ; } if ( obj instanceof String ) { return ( String ) obj ; } try { ObjectMapper mapper ; if ( ignoreNull ) { mapper = MAPPER_IGNORE_NULL ; } else { mapper = MAPPER ; } return mapper . writeValueAsString ( obj ) ; } catch ( Exception e ) { log . error ( "序列化失败,失败原因:" , e ) ; return "" ; }
public class PlayEngine { /** * Send seek status notification * @ param item * Playlist item * @ param position * Seek position */ private void sendSeekStatus ( IPlayItem item , int position ) { } }
Status seek = new Status ( StatusCodes . NS_SEEK_NOTIFY ) ; seek . setClientid ( streamId ) ; seek . setDetails ( item . getName ( ) ) ; seek . setDesciption ( String . format ( "Seeking %d (stream ID: %d)." , position , streamId ) ) ; doPushMessage ( seek ) ;
public class ProjectReactorBuilder { /** * Transforms a comma - separated list String property in to a array of trimmed strings . * This works even if they are separated by whitespace characters ( space char , EOL , . . . ) */ static String [ ] getListFromProperty ( Map < String , String > properties , String key ) { } }
String propValue = properties . get ( key ) ; if ( propValue != null ) { return parseAsCsv ( key , propValue ) ; } return new String [ 0 ] ;
public class LanguageJSONImpl { /** * / * package */ static ResponseList < HelpResources . Language > createLanguageList ( JSONArray list , HttpResponse res , Configuration conf ) throws TwitterException { } }
if ( conf . isJSONStoreEnabled ( ) ) { TwitterObjectFactory . clearThreadLocalMap ( ) ; } try { int size = list . length ( ) ; ResponseList < HelpResources . Language > languages = new ResponseListImpl < HelpResources . Language > ( size , res ) ; for ( int i = 0 ; i < size ; i ++ ) { JSONObject json = list . getJSONObject ( i ) ; HelpResources . Language language = new LanguageJSONImpl ( json ) ; languages . add ( language ) ; if ( conf . isJSONStoreEnabled ( ) ) { TwitterObjectFactory . registerJSONObject ( language , json ) ; } } if ( conf . isJSONStoreEnabled ( ) ) { TwitterObjectFactory . registerJSONObject ( languages , list ) ; } return languages ; } catch ( JSONException jsone ) { throw new TwitterException ( jsone ) ; }
public class AbstractGenericTreeNode { /** * This method adds the given { @ code child } to the { @ link # getChildren ( ) children } of this * { @ link GenericTreeNode } . * @ param child is the { @ link # getChildren ( ) child } to add . It ' s { @ link # getParent ( ) parent } has to be * identical to this { @ link GenericTreeNode } . */ protected void addChild ( CHILD child ) { } }
Objects . requireNonNull ( child , "child" ) ; if ( child . getParent ( ) != this ) { throw new IllegalArgumentException ( child . toString ( ) ) ; } this . mutableChildList . add ( child ) ;
public class NameNode { /** * add new replica blocks to the Inode to target mapping * also add the Inode file to DataNodeDesc */ public void blocksBeingWrittenReport ( DatanodeRegistration nodeReg , BlockReport blocks ) throws IOException { } }
verifyRequest ( nodeReg ) ; long [ ] blocksAsLong = blocks . getBlockReportInLongs ( ) ; BlockListAsLongs blist = new BlockListAsLongs ( blocksAsLong ) ; boolean processed = namesystem . processBlocksBeingWrittenReport ( nodeReg , blist ) ; String message = "*BLOCK* NameNode.blocksBeingWrittenReport: " + "from " + nodeReg . getName ( ) + " " + blist . getNumberOfBlocks ( ) + " blocks" ; if ( ! processed ) { message += " was discarded." ; } stateChangeLog . info ( message ) ;
public class ModelAdapter { /** * Turns a changeset into a composite write attribute operation . * @ param changeSet * @ param bindings * @ return composite operation */ @ Deprecated public static ModelNode detypedFromChangeset ( ModelNode prototype , Map < String , Object > changeSet , List < PropertyBinding > bindings ) { } }
// pre requesites prototype . require ( ADDRESS ) ; prototype . require ( OP ) ; ModelNode operation = new ModelNode ( ) ; operation . get ( OP ) . set ( COMPOSITE ) ; operation . get ( ADDRESS ) . setEmptyList ( ) ; List < ModelNode > steps = new ArrayList < ModelNode > ( ) ; for ( PropertyBinding binding : bindings ) { Object value = changeSet . get ( binding . getJavaName ( ) ) ; if ( value != null ) { ModelNode step = prototype . clone ( ) ; step . get ( NAME ) . set ( binding . getDetypedName ( ) ) ; Class type = value . getClass ( ) ; if ( FormItem . VALUE_SEMANTICS . class == type ) { // skip , it would be persisted as UNDEFINED otherwise } else if ( String . class == type ) { step . get ( VALUE ) . set ( ( String ) value ) ; } else if ( Boolean . class == type ) { step . get ( VALUE ) . set ( ( Boolean ) value ) ; } else if ( Integer . class == type ) { step . get ( VALUE ) . set ( ( Integer ) value ) ; } else if ( Double . class == type ) { step . get ( VALUE ) . set ( ( Double ) value ) ; } else { throw new RuntimeException ( "Unsupported type: " + type ) ; } steps . add ( step ) ; } } operation . get ( STEPS ) . set ( steps ) ; return operation ;
public class ManagedObject { /** * Convert serialized bytes back into a managed object . * @ param serializedBytes the bytes that have been formed by previously serializing * the ManagedObject . * @ param objectManagerState of the objectManager reconstructing the ManagedObject . * @ return ManagedObject that is deserialized . * @ throws ObjectManagerException */ protected static final ManagedObject restoreFromSerializedBytes ( byte serializedBytes [ ] , ObjectManagerState objectManagerState ) throws ObjectManagerException { } }
String methodName = "restoreFromSerializedBytes" ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( cclass , methodName , new Object [ ] { serializedBytes , objectManagerState } ) ; ManagedObject managedObjectToReturn = null ; java . io . ByteArrayInputStream byteArrayInputStream = new java . io . ByteArrayInputStream ( serializedBytes ) ; // Discover what to deserialize and how to deserialize it . int objectSignature ; java . io . DataInputStream dataInputStream = new java . io . DataInputStream ( byteArrayInputStream ) ; try { objectSignature = dataInputStream . readInt ( ) ; } catch ( java . io . IOException exception ) { // No FFDC Code Needed . ObjectManager . ffdc . processException ( cclass , "restoreFromSerializedBytes" , exception , "1:310:1.34" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( cclass , methodName , exception ) ; throw new PermanentIOException ( cclass , exception ) ; } // catch . switch ( objectSignature ) { case SimplifiedSerialization . signature_DefaultSerialization : managedObjectToReturn = restoreSerializedDefault ( byteArrayInputStream , objectManagerState ) ; break ; case SimplifiedSerialization . signature_ConcurrentSublist_Link : managedObjectToReturn = new ConcurrentSubList . Link ( ) ; ( ( ConcurrentSubList . Link ) managedObjectToReturn ) . readObject ( dataInputStream , objectManagerState ) ; break ; case SimplifiedSerialization . signature_ConcurrentSublist : managedObjectToReturn = new ConcurrentSubList ( ) ; ( ( ConcurrentSubList ) managedObjectToReturn ) . readObject ( dataInputStream , objectManagerState ) ; break ; case SimplifiedSerialization . signature_LinkedList_Link : managedObjectToReturn = new LinkedList . Link ( ) ; ( ( LinkedList . Link ) managedObjectToReturn ) . readObject ( dataInputStream , objectManagerState ) ; break ; case SimplifiedSerialization . signature_LinkedList : managedObjectToReturn = new LinkedList ( ) ; ( ( LinkedList ) managedObjectToReturn ) . readObject ( dataInputStream , objectManagerState ) ; break ; case SimplifiedSerialization . signature_TreeMap : managedObjectToReturn = new TreeMap ( ) ; ( ( TreeMap ) managedObjectToReturn ) . readObject ( dataInputStream , objectManagerState ) ; break ; case SimplifiedSerialization . signature_TreeMap_Entry : managedObjectToReturn = new TreeMap . Entry ( ) ; ( ( TreeMap . Entry ) managedObjectToReturn ) . readObject ( dataInputStream , objectManagerState ) ; break ; case SimplifiedSerialization . signature_ConcurrentLinkedList : managedObjectToReturn = new ConcurrentLinkedList ( ) ; ( ( ConcurrentLinkedList ) managedObjectToReturn ) . readObject ( dataInputStream , objectManagerState ) ; break ; case SimplifiedSerialization . signature_ObjectManagerState : managedObjectToReturn = new ObjectManagerState ( ) ; ( ( ObjectManagerState ) managedObjectToReturn ) . readObject ( dataInputStream , objectManagerState ) ; break ; case SimplifiedSerialization . signature_Generic : final String className ; try { className = dataInputStream . readUTF ( ) ; } catch ( java . io . IOException exception ) { // No FFDC Code Needed . ObjectManager . ffdc . processException ( cclass , methodName , exception , "1:390:1.34" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( cclass , methodName , exception ) ; throw new PermanentIOException ( cclass , exception ) ; } // catch . try { Constructor constructor = ( Constructor ) _genericConstructors . get ( className ) ; if ( constructor == null ) { // Defect 609434 // If we need a constructor then create one in a doPrivileged block // to allow us to set access to the constructor with system level // privileges . constructor = ( Constructor ) AccessController . doPrivileged ( new PrivilegedExceptionAction ( ) { public Object run ( ) throws Exception { Class classToInstantiate = Class . forName ( className ) ; Constructor retval = classToInstantiate . getDeclaredConstructor ( new Class [ 0 ] ) ; retval . setAccessible ( true ) ; _genericConstructors . put ( className , retval ) ; return retval ; } } ) ; } managedObjectToReturn = ( ManagedObject ) constructor . newInstance ( new Object [ 0 ] ) ; } catch ( java . security . PrivilegedActionException exception ) { // No FFDC Code Needed . Throwable cause = exception . getCause ( ) ; if ( cause instanceof java . lang . ClassNotFoundException ) { ObjectManager . ffdc . processException ( cclass , methodName , cause , "1:424:1.34" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( cclass , methodName , cause ) ; throw new com . ibm . ws . objectManager . ClassNotFoundException ( cclass , ( java . lang . ClassNotFoundException ) cause ) ; } else if ( cause instanceof Exception ) { ObjectManager . ffdc . processException ( cclass , methodName , cause , "1:430:1.34" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( cclass , methodName , cause ) ; throw new UnexpectedExceptionException ( cclass , ( Exception ) cause ) ; } else { ObjectManager . ffdc . processException ( cclass , methodName , cause , "1:436:1.34" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( cclass , methodName , cause ) ; throw ( Error ) cause ; } } catch ( Exception exception ) { // No FFDC Code Needed . ObjectManager . ffdc . processException ( cclass , methodName , exception , "1:444:1.34" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( cclass , methodName , "via UnexpectedExceptionException" ) ; throw new UnexpectedExceptionException ( cclass , exception ) ; } // catch . // readObject is allowed to Throw java . io . IOException because it is a public interface // and may not want to throw ObjectManagerException . try { ( ( SimplifiedSerialization ) managedObjectToReturn ) . readObject ( dataInputStream , objectManagerState ) ; } catch ( java . io . IOException exception ) { // No FFDC Code Needed . ObjectManager . ffdc . processException ( cclass , methodName , exception , "1:459:1.34" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( cclass , methodName , "via PermanentIOException" ) ; throw new PermanentIOException ( cclass , exception ) ; } // try . break ; default : ObjectSignatureNotFoundException objectSignatureNotFoundException = new ObjectSignatureNotFoundException ( cclass , objectSignature ) ; ObjectManager . ffdc . processException ( cclass , methodName , objectSignatureNotFoundException , "1:471:1.34" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) { trace . bytes ( cclass , serializedBytes , 0 , Math . min ( serializedBytes . length , 1000 ) ) ; trace . exit ( cclass , methodName , objectSignatureNotFoundException ) ; } // if ( Tracing . isAnyTracingEnabled ( ) & & trace . isEntryEnabled ( ) ) . throw objectSignatureNotFoundException ; } // Switch ( objectSignature ) . if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( cclass , methodName , new Object [ ] { managedObjectToReturn } ) ; return managedObjectToReturn ;
public class JobStatistics { /** * Print the Job Execution Statistics * TODO : split to pring job , map / reduce task list and individual map / reduce task stats */ public void printJobExecutionStatistics ( ) { } }
/* * Print Job Counters */ System . out . println ( "JOB COUNTERS *********************************************" ) ; int size = this . _job . size ( ) ; java . util . Iterator < Map . Entry < Enum , String > > kv = this . _job . entrySet ( ) . iterator ( ) ; for ( int i = 0 ; i < size ; i ++ ) { Map . Entry < Enum , String > entry = ( Map . Entry < Enum , String > ) kv . next ( ) ; Enum key = entry . getKey ( ) ; String value = entry . getValue ( ) ; System . out . println ( "Key:<" + key . name ( ) + ">, value:<" + value + ">" ) ; } System . out . println ( "MAP COUNTERS *********************************************" ) ; int size1 = this . _mapTaskList . size ( ) ; for ( int i = 0 ; i < size1 ; i ++ ) { System . out . println ( "MAP TASK *********************************************" ) ; this . _mapTaskList . get ( i ) . printKeys ( ) ; } System . out . println ( "REDUCE COUNTERS *********************************************" ) ; int size2 = this . _mapTaskList . size ( ) ; for ( int i = 0 ; i < size2 ; i ++ ) { System . out . println ( "REDUCE TASK *********************************************" ) ; this . _reduceTaskList . get ( i ) . printKeys ( ) ; }
public class ProcessController { /** * Start an instance * @ param home The home directory * @ param options The options * @ return True if started successfully ; otherwise false */ public boolean start ( String home , File options ) { } }
File homeDirectory = new File ( home ) ; if ( ! homeDirectory . exists ( ) ) return false ; stop ( home ) ; try { List < String > command = new ArrayList < String > ( ) ; command . add ( java ) ; command . add ( "-Dironjacamar.home=" + home ) ; if ( options != null && options . exists ( ) ) command . add ( "-Dironjacamar.options=" + options . getAbsolutePath ( ) ) ; command . add ( "-Djava.net.preferIPv4Stack=true" ) ; command . add ( "-Djgroups.bind_addr=127.0.0.1" ) ; command . add ( "-Dorg.jboss.logging.Logger.pluginClass=org.jboss.logging.logmanager.LoggerPluginImpl" ) ; command . add ( "-Dlog4j.defaultInitOverride=true" ) ; command . add ( "-jar" ) ; command . add ( home + "/bin/ironjacamar-sjc.jar" ) ; ProcessBuilder pb = new ProcessBuilder ( command ) ; pb . redirectErrorStream ( true ) ; Map < String , String > environment = pb . environment ( ) ; environment . put ( "ironjacamar.home" , home ) ; Process p = pb . start ( ) ; instances . put ( home , p ) ; return true ; } catch ( Throwable t ) { // Ignore } return false ;
public class AWSWAFRegionalClient { /** * Attaches a IAM policy to the specified resource . The only supported use for this action is to share a RuleGroup * across accounts . * The < code > PutPermissionPolicy < / code > is subject to the following restrictions : * < ul > * < li > * You can attach only one policy with each < code > PutPermissionPolicy < / code > request . * < / li > * < li > * The policy must include an < code > Effect < / code > , < code > Action < / code > and < code > Principal < / code > . * < / li > * < li > * < code > Effect < / code > must specify < code > Allow < / code > . * < / li > * < li > * The < code > Action < / code > in the policy must be < code > waf : UpdateWebACL < / code > , * < code > waf - regional : UpdateWebACL < / code > , < code > waf : GetRuleGroup < / code > and < code > waf - regional : GetRuleGroup < / code > * . Any extra or wildcard actions in the policy will be rejected . * < / li > * < li > * The policy cannot include a < code > Resource < / code > parameter . * < / li > * < li > * The ARN in the request must be a valid WAF RuleGroup ARN and the RuleGroup must exist in the same region . * < / li > * < li > * The user making the request must be the owner of the RuleGroup . * < / li > * < li > * Your policy must be composed using IAM Policy version 2012-10-17. * < / li > * < / ul > * For more information , see < a href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / access _ policies . html " > IAM * Policies < / a > . * An example of a valid policy parameter is shown in the Examples section below . * @ param putPermissionPolicyRequest * @ return Result of the PutPermissionPolicy operation returned by the service . * @ throws WAFInternalErrorException * The operation failed because of a system problem , even though the request was valid . Retry your request . * @ throws WAFStaleDataException * The operation failed because you tried to create , update , or delete an object by using a change token * that has already been used . * @ throws WAFNonexistentItemException * The operation failed because the referenced object doesn ' t exist . * @ throws WAFInvalidPermissionPolicyException * The operation failed because the specified policy is not in the proper format . < / p > * The policy is subject to the following restrictions : * < ul > * < li > * You can attach only one policy with each < code > PutPermissionPolicy < / code > request . * < / li > * < li > * The policy must include an < code > Effect < / code > , < code > Action < / code > and < code > Principal < / code > . * < / li > * < li > * < code > Effect < / code > must specify < code > Allow < / code > . * < / li > * < li > * The < code > Action < / code > in the policy must be < code > waf : UpdateWebACL < / code > , * < code > waf - regional : UpdateWebACL < / code > , < code > waf : GetRuleGroup < / code > and * < code > waf - regional : GetRuleGroup < / code > . Any extra or wildcard actions in the policy will be rejected . * < / li > * < li > * The policy cannot include a < code > Resource < / code > parameter . * < / li > * < li > * The ARN in the request must be a valid WAF RuleGroup ARN and the RuleGroup must exist in the same region . * < / li > * < li > * The user making the request must be the owner of the RuleGroup . * < / li > * < li > * Your policy must be composed using IAM Policy version 2012-10-17. * < / li > * @ sample AWSWAFRegional . PutPermissionPolicy * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / waf - regional - 2016-11-28 / PutPermissionPolicy " * target = " _ top " > AWS API Documentation < / a > */ @ Override public PutPermissionPolicyResult putPermissionPolicy ( PutPermissionPolicyRequest request ) { } }
request = beforeClientExecution ( request ) ; return executePutPermissionPolicy ( request ) ;
public class HintManager { /** * Get table sharding values . * @ param logicTable logic table name * @ return table sharding values */ public static Collection < Comparable < ? > > getTableShardingValues ( final String logicTable ) { } }
return null == HINT_MANAGER_HOLDER . get ( ) ? Collections . < Comparable < ? > > emptyList ( ) : HINT_MANAGER_HOLDER . get ( ) . tableShardingValues . get ( logicTable ) ;
public class ContentTypeNormaliserImpl { /** * EG : Content - Type : text / html ; charset = utf - 8 */ private String getEncoding ( final String contentType ) { } }
if ( contentType == null || ! contentType . contains ( CHARSET ) ) { return DEFAULT_ENCODING ; } String encoding = null ; try { encoding = contentType . substring ( contentType . indexOf ( CHARSET ) + CHARSET . length ( ) ) ; encoding = encoding . toLowerCase ( ) . replaceAll ( "\"" , "" ) ; } catch ( Exception e ) { // Extraction from the string failed . } if ( ! validEncodings . contains ( encoding ) ) { LOGGER . warn ( "Invalid Encoding '{}' - using default - '{}'" , encoding , DEFAULT_ENCODING ) ; encoding = DEFAULT_ENCODING ; } return encoding ;
public class Response { public ChannelFuture respond ( ) throws Exception { } }
// For chunked response , this method is only called to respond the 1st chunk , // next chunks are responded directly by respondXXX if ( nonChunkedResponseOrFirstChunkSent ) throwDoubleResponseError ( ) ; // Run after filter if ( server . after ( ) != null ) { server . after ( ) . run ( request , this ) ; } ChannelFuture future = channel . writeAndFlush ( response ) ; // Do not handle keep alive : // * If XSendFile or XSendResource is used , because they will handle keep alive in their own way // * If the response is chunked , because respondLastChunk will be handle keep alive if ( // ! XSendFile . isHeaderSet ( response ) & & // ! XSendResource . isHeaderSet ( response ) & & ! HttpUtil . isTransferEncodingChunked ( response ) ) { NoRealPipelining . if_keepAliveRequest_then_resumeReading_else_closeOnComplete ( request , channel , future ) ; } nonChunkedResponseOrFirstChunkSent = true ; if ( ! HttpUtil . isTransferEncodingChunked ( response ) ) { doneResponding = true ; } return future ;
public class InjectionProviders { /** * Creates new supplier containing all providers in a new set . * @ param suppliers * vararg array of existing suppliers * @ return new instance containing all providers */ private static InjectionProviderInstancesSupplier mergeSuppliers ( final InjectionProviderInstancesSupplier ... suppliers ) { } }
final Set < InjectionProvider < ? > > result = new LinkedHashSet < InjectionProvider < ? > > ( ) ; if ( suppliers != null && suppliers . length > 0 ) { for ( final InjectionProviderInstancesSupplier supplier : suppliers ) { result . addAll ( supplier . get ( ) ) ; } } return new InjectionProviderInstancesSupplier ( ) { @ Override public Set < InjectionProvider < ? > > get ( ) { return result ; } } ;
public class ScriptBuilderFragment { /** * Generate the script builder default implementation . */ protected void generateScriptBuilderImpl ( ) { } }
final List < StringConcatenationClient > topElements = generateTopElements ( false , false ) ; final TypeReference script = getScriptBuilderImpl ( ) ; final TypeReference scriptInterface = getScriptBuilderInterface ( ) ; final StringConcatenationClient content = new StringConcatenationClient ( ) { @ Override protected void appendTo ( TargetStringConcatenation it ) { it . append ( "@SuppressWarnings(\"all\")" ) ; // $ NON - NLS - 1 $ it . newLine ( ) ; it . append ( "public class " ) ; // $ NON - NLS - 1 $ it . append ( script . getSimpleName ( ) ) ; it . append ( " extends " ) ; // $ NON - NLS - 1 $ it . append ( getAbstractBuilderImpl ( ) ) ; it . append ( " implements " ) ; // $ NON - NLS - 1 $ it . append ( scriptInterface ) ; it . append ( " {" ) ; // $ NON - NLS - 1 $ it . newLineIfNotEmpty ( ) ; it . newLine ( ) ; it . append ( generateFieldsAndMethods ( false , false ) ) ; for ( final StringConcatenationClient element : topElements ) { it . append ( element ) ; } it . append ( "}" ) ; // $ NON - NLS - 1 $ it . newLineIfNotEmpty ( ) ; } } ; final JavaFileAccess javaFile = getFileAccessFactory ( ) . createJavaFile ( script , content ) ; javaFile . writeTo ( getSrcGen ( ) ) ;
public class CacheInstance { /** * produces nice ascii text */ public String fancyFormat ( ) { } }
StringWriter sw = new StringWriter ( ) ; PrintWriter pw = new PrintWriter ( sw ) ; pw . println ( "[" + name + "]" ) ; for ( int i = 0 ; configEntries != null && i < configEntries . length ; i ++ ) { pw . println ( "[CacheEntry " + i + "]" ) ; pw . println ( configEntries [ i ] . fancyFormat ( ) ) ; } return sw . toString ( ) ;
public class Quicksortables { /** * Returns the index of the median of the three indexed integers . */ private static int med3 ( Quicksortable q , int a , int b , int c ) { } }
return ( q . compare ( a , b ) < 0 ? ( q . compare ( b , c ) < 0 ? b : q . compare ( a , c ) < 0 ? c : a ) : ( q . compare ( b , c ) > 0 ? b : q . compare ( a , c ) > 0 ? c : a ) ) ;
public class Client { /** * Make a PUT API call . * @ param path * The GET path . Include the GET parameters after ? * @ param params * The parameters to be passed in the body of the call as JSON * @ return A JSON object . * @ throws APIError * If an error occurs . */ public JSONObject put ( String path , Map < String , Object > params ) throws APIError { } }
return request ( Verb . PUT , path , params ) ;
public class KeyEvent { /** * Get the special key representation , { @ link Keys } , of the supplied character if there is one . If * there is no special key tied to this character , null will be returned . * @ param key unicode character code * @ return special key linked to the character code , or null if character is not a special key */ private static Keys getKeyFromUnicode ( char key ) { } }
for ( Keys unicodeKey : Keys . values ( ) ) { if ( unicodeKey . charAt ( 0 ) == key ) { return unicodeKey ; } } return null ;
public class ParserTrainer { /** * 保存模型 * 以序列化的方式保存模型 * @ param models * 模型参数 * @ param factory * @ throws IOException */ public static void saveModels ( String modelfile , Linear [ ] models , AlphabetFactory factory ) throws IOException { } }
ObjectOutputStream outstream = new ObjectOutputStream ( new GZIPOutputStream ( new FileOutputStream ( modelfile ) ) ) ; outstream . writeObject ( factory ) ; outstream . writeObject ( models ) ; outstream . close ( ) ;
public class PreferenceFragment { /** * Initializes the preference , which allows to display the applications , which are suited for * handling an intent . */ private void initializeShowIntentBottmSheetPreference ( ) { } }
Preference showIntentBottomSheetPreference = findPreference ( getString ( R . string . show_intent_bottom_sheet_preference_key ) ) ; showIntentBottomSheetPreference . setOnPreferenceClickListener ( new OnPreferenceClickListener ( ) { @ Override public boolean onPreferenceClick ( Preference preference ) { initializeIntentBottomSheet ( ) ; intentBottomSheet . show ( ) ; return true ; } } ) ;
public class MigrationManager { /** * Sets the active migration if none is set and returns { @ code null } , otherwise returns the currently set active migration . * Acquires the partition service lock . */ public MigrationInfo setActiveMigration ( MigrationInfo migrationInfo ) { } }
partitionServiceLock . lock ( ) ; try { if ( activeMigrationInfo == null ) { activeMigrationInfo = migrationInfo ; return null ; } if ( ! activeMigrationInfo . equals ( migrationInfo ) ) { if ( logger . isFineEnabled ( ) ) { logger . fine ( "Active migration is not set: " + migrationInfo + ". Existing active migration: " + activeMigrationInfo ) ; } } return activeMigrationInfo ; } finally { partitionServiceLock . unlock ( ) ; }
public class WikibaseDataEditor { /** * Updates statements of the given document . The document should be the * current revision of the data that is to be updated . The updates are * computed with respect to the data found in the document , making sure that * no redundant deletions or duplicate insertions happen . The references of * duplicate statements will be merged . * The generic type T of this method must be a general interface such as * { @ link ItemDocument } , { @ link PropertyDocument } , or * { @ link StatementDocument } . Specific implementations of these interfaces * are not permitted . * @ param currentDocument * the document that is to be updated ; needs to have a correct * revision id and entity id * @ param addStatements * the list of statements to be added or updated ; statements with * empty statement id will be added ; statements with non - empty * statement id will be updated ( if such a statement exists ) * @ param deleteStatements * the list of statements to be deleted ; statements will only be * deleted if they are present in the current document ( in * exactly the same form , with the same id ) * @ param summary * short edit summary * @ return the updated document * @ throws MediaWikiApiErrorException * if the API returns errors * @ throws IOException * if there are IO problems , such as missing network connection */ @ SuppressWarnings ( "unchecked" ) public < T extends StatementDocument > T updateStatements ( T currentDocument , List < Statement > addStatements , List < Statement > deleteStatements , String summary ) throws MediaWikiApiErrorException , IOException { } }
StatementUpdate statementUpdate = new StatementUpdate ( currentDocument , addStatements , deleteStatements ) ; statementUpdate . setGuidGenerator ( guidGenerator ) ; if ( statementUpdate . isEmptyEdit ( ) ) { return currentDocument ; } else { return ( T ) this . wbEditingAction . wbEditEntity ( currentDocument . getEntityId ( ) . getId ( ) , null , null , null , statementUpdate . getJsonUpdateString ( ) , false , this . editAsBot , currentDocument . getRevisionId ( ) , summary ) ; }
public class MaskConverter { /** * Convert and move string to this field . * @ param strString the state to set the data to . * @ param bDisplayOption Display the data on the screen if true . * @ param iMoveMode INIT , SCREEN , or READ move mode . * @ return The error code ( or NORMAL _ RETURN ) . */ public int setString ( String strValue , boolean bDisplayOption , int iMoveMode ) { } }
if ( ( strValue == null ) || ( strValue . length ( ) == 0 ) ) return super . setString ( strValue , bDisplayOption , iMoveMode ) ; // Don ' t trip change or display if ( strValue . charAt ( 0 ) == FILLER ) return DBConstants . NORMAL_RETURN ; return super . setString ( strValue , bDisplayOption , iMoveMode ) ;
public class StatementFusion { /** * Given a block , fuse a list of statements with comma ' s . * @ param parent The parent that contains the statements . * @ param first The first statement to fuse ( inclusive ) * @ param last The last statement to fuse ( exclusive ) * @ return A single statement that contains all the fused statement as one . */ private static Node fuseIntoOneStatement ( Node parent , Node first , Node last ) { } }
// Nothing to fuse if there is only one statement . if ( first . getNext ( ) == last ) { return first ; } // Step one : Create a comma tree that contains all the statements . Node commaTree = first . removeFirstChild ( ) ; Node next = null ; for ( Node cur = first . getNext ( ) ; cur != last ; cur = next ) { commaTree = fuseExpressionIntoExpression ( commaTree , cur . removeFirstChild ( ) ) ; next = cur . getNext ( ) ; parent . removeChild ( cur ) ; } // Step two : The last EXPR _ RESULT will now hold the comma tree with all // the fused statements . first . addChildToBack ( commaTree ) ; return first ;
public class Selector { /** * Find elements matching selector . * @ param query CSS selector * @ param roots root elements to descend into * @ return matching elements , empty if none */ public static Elements select ( String query , Iterable < Element > roots ) { } }
Validate . notEmpty ( query ) ; Validate . notNull ( roots ) ; Evaluator evaluator = QueryParser . parse ( query ) ; ArrayList < Element > elements = new ArrayList < > ( ) ; IdentityHashMap < Element , Boolean > seenElements = new IdentityHashMap < > ( ) ; // dedupe elements by identity , not equality for ( Element root : roots ) { final Elements found = select ( evaluator , root ) ; for ( Element el : found ) { if ( ! seenElements . containsKey ( el ) ) { elements . add ( el ) ; seenElements . put ( el , Boolean . TRUE ) ; } } } return new Elements ( elements ) ;
public class GetUrlTaskRunner { /** * Build URL */ public String performTask ( String taskParameters ) { } }
GetUrlTaskParameters taskParams = GetUrlTaskParameters . deserialize ( taskParameters ) ; String spaceId = taskParams . getSpaceId ( ) ; String contentId = taskParams . getContentId ( ) ; String resourcePrefix = taskParams . getResourcePrefix ( ) ; log . info ( "Performing " + TASK_NAME + " task with parameters: spaceId=" + spaceId + ", contentId=" + contentId + ", resourcePrefix=" + resourcePrefix ) ; // Will throw if bucket does not exist String bucketName = unwrappedS3Provider . getBucketName ( spaceId ) ; GetUrlTaskResult taskResult = new GetUrlTaskResult ( ) ; // Ensure that streaming service is on checkThatStreamingServiceIsEnabled ( spaceId , TASK_NAME ) ; // Retrieve the existing distribution for the given space StreamingDistributionSummary existingDist = getExistingDistribution ( bucketName ) ; if ( null == existingDist ) { throw new UnsupportedTaskException ( TASK_NAME , "The " + TASK_NAME + " task can only be used after a space has " + "been configured to enable open streaming. Use " + StorageTaskConstants . ENABLE_STREAMING_TASK_NAME + " to enable open streaming on this space." ) ; } String domainName = existingDist . getDomainName ( ) ; // Verify that this is an open distribution if ( ! existingDist . getTrustedSigners ( ) . getItems ( ) . isEmpty ( ) ) { throw new UnsupportedTaskException ( TASK_NAME , "The " + TASK_NAME + " task cannot be used to request a stream " + "from a secure distribution. Use " + StorageTaskConstants . GET_SIGNED_URL_TASK_NAME + " instead." ) ; } // Create the resource Id , which may or may not require a prefix // ( such as " mp4 : " for an mp4 file ) depending on the intended player String resourceId = contentId ; if ( null != resourcePrefix && ! resourcePrefix . equals ( "" ) ) { resourceId = resourcePrefix + contentId ; } taskResult . setStreamUrl ( "rtmp://" + domainName + "/cfx/st/" + resourceId ) ; String toReturn = taskResult . serialize ( ) ; log . info ( "Result of " + TASK_NAME + " task: " + toReturn ) ; return toReturn ;
public class RqAuth { /** * Authenticated user . * @ return User identity * @ throws IOException If fails */ public Identity identity ( ) throws IOException { } }
final Iterator < String > headers = new RqHeaders . Base ( this ) . header ( this . header ) . iterator ( ) ; final Identity user ; if ( headers . hasNext ( ) ) { user = new CcPlain ( ) . decode ( new Utf8String ( headers . next ( ) ) . asBytes ( ) ) ; } else { user = Identity . ANONYMOUS ; } return user ;
public class PrincipalNameTransformerUtils { /** * New principal name transformer . * @ param p the p * @ return the principal name transformer */ public static PrincipalNameTransformer newPrincipalNameTransformer ( final PrincipalTransformationProperties p ) { } }
val chain = new ChainingPrincipalNameTransformer ( ) ; if ( p . getGroovy ( ) . getLocation ( ) != null ) { val t = new GroovyPrincipalNameTransformer ( p . getGroovy ( ) . getLocation ( ) ) ; chain . addTransformer ( t ) ; } if ( StringUtils . isNotBlank ( p . getPattern ( ) ) ) { val t = new RegexPrincipalNameTransformer ( p . getPattern ( ) ) ; chain . addTransformer ( t ) ; } if ( StringUtils . isNotBlank ( p . getPrefix ( ) ) || StringUtils . isNotBlank ( p . getSuffix ( ) ) ) { val t = new PrefixSuffixPrincipalNameTransformer ( ) ; t . setPrefix ( p . getPrefix ( ) ) ; t . setSuffix ( p . getSuffix ( ) ) ; chain . addTransformer ( t ) ; } else { chain . addTransformer ( new NoOpPrincipalNameTransformer ( ) ) ; } if ( p . getCaseConversion ( ) == PrincipalTransformationProperties . CaseConversion . UPPERCASE ) { val t = new ConvertCasePrincipalNameTransformer ( ) ; t . setToUpperCase ( true ) ; chain . addTransformer ( t ) ; } if ( p . getCaseConversion ( ) == PrincipalTransformationProperties . CaseConversion . LOWERCASE ) { val t = new ConvertCasePrincipalNameTransformer ( ) ; t . setToUpperCase ( false ) ; chain . addTransformer ( t ) ; } return chain ;
public class IdGenerator { /** * Generates a 64 - bit id . * Format : { @ code < 41 - bit : timestamp > < 10 - bit : node - id > < 13 - bit : sequence - number > } . Where * { @ code timestamp } is in milliseconds , minus the epoch . * @ return */ synchronized public long generateId64 ( ) { } }
long timestamp = System . currentTimeMillis ( ) ; long sequence = 0 ; boolean done = false ; while ( ! done ) { done = true ; while ( timestamp < lastTimestampMillisec . get ( ) ) { timestamp = waitTillNextMillisec ( timestamp ) ; } if ( timestamp == lastTimestampMillisec . get ( ) ) { // increase sequence sequence = sequenceMillisec . incrementAndGet ( ) ; if ( sequence > MAX_SEQUENCE_64 ) { // reset sequence sequenceMillisec . set ( sequence = 0 ) ; timestamp = waitTillNextMillisec ( timestamp ) ; done = false ; } } } sequenceMillisec . set ( sequence ) ; lastTimestampMillisec . set ( timestamp ) ; timestamp = ( timestamp - TIMESTAMP_EPOCH ) & MASK_TIMESTAMP_64 ; return timestamp << SHIFT_TIMESTAMP_64 | template64 | ( sequence & MASK_SEQUENCE_64 ) ;
public class Offer { /** * Accepts the offer . * @ throws NotConnectedException * @ throws InterruptedException */ public void accept ( ) throws NotConnectedException , InterruptedException { } }
Stanza acceptPacket = new AcceptPacket ( this . session . getWorkgroupJID ( ) ) ; connection . sendStanza ( acceptPacket ) ; // TODO : listen for a reply . accepted = true ;
public class CommerceUserSegmentCriterionLocalServiceBaseImpl { /** * Returns the number of rows matching the dynamic query . * @ param dynamicQuery the dynamic query * @ param projection the projection to apply to the query * @ return the number of rows matching the dynamic query */ @ Override public long dynamicQueryCount ( DynamicQuery dynamicQuery , Projection projection ) { } }
return commerceUserSegmentCriterionPersistence . countWithDynamicQuery ( dynamicQuery , projection ) ;
public class NCBIQBlastService { /** * Converts given GenBank GID to String and calls * { @ link # sendAlignmentRequest ( String , RemotePairwiseAlignmentProperties ) } */ public String sendAlignmentRequest ( int gid , RemotePairwiseAlignmentProperties rpa ) throws Exception { } }
return sendAlignmentRequest ( Integer . toString ( gid ) , rpa ) ;
public class HttpHeaders { /** * @ deprecated Use { @ link # set ( CharSequence , Object ) } instead . * @ see # setHeader ( HttpMessage , CharSequence , Object ) */ @ Deprecated public static void setHeader ( HttpMessage message , String name , Object value ) { } }
message . headers ( ) . set ( name , value ) ;
public class Envelope { /** * / * When you set a point , all subsequent points are reset . */ public void set_point ( int point , int tick , int ampl ) { } }
if ( point >= 0 && point < ticks . length ) { if ( point == 0 ) { tick = 0 ; } if ( point > 0 ) { if ( tick < ticks [ point - 1 ] ) { /* Simple guess at where the point is supposed to be . */ tick += 256 ; } if ( tick <= ticks [ point - 1 ] ) { System . out . println ( "Envelope: Point not valid (" + tick + " <= " + ticks [ point - 1 ] + ")" ) ; tick = ticks [ point - 1 ] + 1 ; } } ticks [ point ] = tick ; ampls [ point ] = ampl ; point += 1 ; while ( point < ticks . length ) { ticks [ point ] = ticks [ point - 1 ] + 1 ; ampls [ point ] = 0 ; point += 1 ; } }
public class CsvFileExtensions { /** * Read an csv - file and puts them in a String - array . * @ param csvData * The csv - file with the data . * @ param encoding * The encoding to read . * @ return The data from the csv - file as a String - array . * @ throws FileNotFoundException * the file not found exception * @ throws IOException * When an io - error occurs . */ public static String [ ] sortData ( final File csvData , final String encoding ) throws FileNotFoundException , IOException { } }
final List < String > fn = new ArrayList < > ( ) ; try ( BufferedReader reader = ( BufferedReader ) StreamExtensions . getReader ( csvData , encoding , false ) ) { // the line . String line = null ; int index , last ; // read all lines from the file do { line = reader . readLine ( ) ; // if null break the loop if ( line == null ) { break ; } // initialize the last last = 0 ; // get the index from the comma index = line . indexOf ( ',' ) ; while ( index != - 1 ) { // get the next firstname and remove the whitespaces . final String firstname = line . substring ( last , index ) . trim ( ) ; // added to the list fn . add ( firstname ) ; // set last to the next position last = index + 1 ; // get the next index from the comma in the line index = line . indexOf ( ',' , last ) ; } } while ( true ) ; } catch ( final IOException e ) { throw e ; } // convert the list to a String array . final String data [ ] = fn . toArray ( new String [ fn . size ( ) ] ) ; // and sort the array . Arrays . sort ( data ) ; return data ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIOBXoaOrentToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class DateFormat { /** * Format a date using { @ link JsonSerializerParameters } or default values : { @ link # DATE _ FORMAT _ STR _ ISO8601 } and { @ link # UTC _ TIMEZONE } * @ param date date to format * @ return the formatted date * @ param params a { @ link com . github . nmorel . gwtjackson . client . JsonSerializerParameters } object . */ public static String format ( JsonSerializerParameters params , Date date ) { } }
DateTimeFormat format ; if ( null == params . getPattern ( ) ) { format = DateFormat . DATE_FORMAT_STR_ISO8601 ; } else { format = DateTimeFormat . getFormat ( params . getPattern ( ) ) ; } TimeZone timeZone ; if ( null == params . getTimezone ( ) ) { timeZone = DateFormat . UTC_TIMEZONE ; } else { timeZone = params . getTimezone ( ) ; } return format ( format , timeZone , date ) ;
public class ProgramChromosome { /** * Create a new program chromosome with the defined depth . This method will * create a < em > full < / em > program tree . * @ param depth the depth of the created ( full ) program tree * @ param operations the allowed non - terminal operations * @ param terminals the allowed terminal operations * @ param < A > the operation type * @ return a new program chromosome from the given ( flattened ) program tree * @ throws NullPointerException if one of the parameters is { @ code null } * @ throws IllegalArgumentException if the { @ code depth } is smaller than zero */ public static < A > ProgramChromosome < A > of ( final int depth , final ISeq < ? extends Op < A > > operations , final ISeq < ? extends Op < A > > terminals ) { } }
return of ( depth , ( Predicate < ? super ProgramChromosome < A > > & Serializable ) ProgramChromosome :: isSuperValid , operations , terminals ) ;
public class IntTupleStreams { /** * Returns a stream that returns { @ link MutableIntTuple } s in the given * range , in colexicographical iteration order . < br > * < br > * Copies of the given tuples will be stored internally . < br > * < br > * Also see < a href = " . . / . . / package - summary . html # IterationOrder " > * Iteration Order < / a > * @ param min The minimum values , inclusive * @ param max The maximum values , exclusive * @ return The stream * @ throws IllegalArgumentException If the given tuples do not * have the same { @ link Tuple # getSize ( ) size } */ public static Stream < MutableIntTuple > colexicographicalStream ( IntTuple min , IntTuple max ) { } }
return stream ( Order . COLEXICOGRAPHICAL , min , max ) ;
public class ExtForwFeatureImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . map . primitives . MAPAsnPrimitive # encodeData ( org . mobicents . protocols . asn . AsnOutputStream ) */ public void encodeData ( AsnOutputStream asnOs ) throws MAPException { } }
if ( this . ssStatus == null ) throw new MAPException ( "Error while encoding " + _PrimitiveName + ": ssStatus required." ) ; try { if ( this . basicService != null ) ( ( ExtBasicServiceCodeImpl ) this . basicService ) . encodeAll ( asnOs ) ; ( ( ExtSSStatusImpl ) this . ssStatus ) . encodeAll ( asnOs , Tag . CLASS_CONTEXT_SPECIFIC , _TAG_ss_Status ) ; if ( this . forwardedToNumber != null ) ( ( ISDNAddressStringImpl ) this . forwardedToNumber ) . encodeAll ( asnOs , Tag . CLASS_CONTEXT_SPECIFIC , _TAG_forwardedToNumber ) ; if ( this . forwardedToSubaddress != null ) ( ( ISDNSubaddressStringImpl ) this . forwardedToSubaddress ) . encodeAll ( asnOs , Tag . CLASS_CONTEXT_SPECIFIC , _TAG_forwardedToSubaddress ) ; if ( this . forwardingOptions != null ) ( ( ExtForwOptionsImpl ) this . forwardingOptions ) . encodeAll ( asnOs , Tag . CLASS_CONTEXT_SPECIFIC , _TAG_forwardingOptions ) ; if ( this . noReplyConditionTime != null ) asnOs . writeInteger ( Tag . CLASS_CONTEXT_SPECIFIC , _TAG_noReplyConditionTime , this . noReplyConditionTime ) ; if ( this . extensionContainer != null ) ( ( MAPExtensionContainerImpl ) this . extensionContainer ) . encodeAll ( asnOs , Tag . CLASS_CONTEXT_SPECIFIC , _TAG_extensionContainer ) ; if ( this . longForwardedToNumber != null ) ( ( FTNAddressStringImpl ) this . longForwardedToNumber ) . encodeAll ( asnOs , Tag . CLASS_CONTEXT_SPECIFIC , _TAG_longForwardedToNumber ) ; } catch ( IOException e ) { throw new MAPException ( "IOException when encoding " + _PrimitiveName + ": " + e . getMessage ( ) , e ) ; } catch ( AsnException e ) { throw new MAPException ( "AsnException when encoding " + _PrimitiveName + ": " + e . getMessage ( ) , e ) ; }
public class LittleEndianDataInputStream { /** * / * ( non - Javadoc ) * @ see java . io . DataInput # readFully ( byte [ ] ) */ @ Override public void readFully ( byte [ ] b ) throws IOException { } }
if ( inner . read ( b ) < b . length ) { throw new EOFException ( ) ; }
public class SystemPropertiesEnvHolder { /** * 设置配置信息 */ public String set ( String key , String value ) { } }
return System . setProperty ( key , value ) ;
public class PatternStream { /** * Applies a process function to the detected pattern sequence . For each pattern sequence the * provided { @ link PatternProcessFunction } is called . In order to process timed out partial matches as well one can * use { @ link TimedOutPartialMatchHandler } as additional interface . * @ param patternProcessFunction The pattern process function which is called for each detected * pattern sequence . * @ param < R > Type of the resulting elements * @ return { @ link DataStream } which contains the resulting elements from the pattern process * function . */ public < R > SingleOutputStreamOperator < R > process ( final PatternProcessFunction < T , R > patternProcessFunction ) { } }
final TypeInformation < R > returnType = TypeExtractor . getUnaryOperatorReturnType ( patternProcessFunction , PatternProcessFunction . class , 0 , 1 , TypeExtractor . NO_INDEX , builder . getInputType ( ) , null , false ) ; return process ( patternProcessFunction , returnType ) ;
public class GAEUtils { /** * Look in all jars located inside / WEB - INF / lib / folder for files that has * some specified prefix and suffix . It is a simplification that can be done * in GAE , because no JSF libraries are outside / WEB - INF / lib * @ param context * @ param classloader * @ param prefix * @ param suffix * @ return * @ throws IOException */ public static Collection < URL > searchInWebLib ( ExternalContext context , ClassLoader classloader , String filter , String prefix , String suffix ) throws IOException { } }
if ( ! filter . equals ( "none" ) ) { String [ ] jarFilesToScan = StringUtils . trim ( StringUtils . splitLongString ( filter , ',' ) ) ; Set < URL > urlSet = null ; Set < String > paths = context . getResourcePaths ( WEB_LIB_PREFIX ) ; if ( paths != null ) { for ( Object pathObject : paths ) { String path = ( String ) pathObject ; if ( path . endsWith ( ".jar" ) && wildcardMatch ( path , jarFilesToScan , WEB_LIB_PREFIX ) ) { // GAE does not use WAR format , so the app is just uncompressed in a directory // What we need here is just take the path of the file , and open the file as a // jar file . Then , if the jar should be scanned , try to find the required file . URL jarUrl = new URL ( "jar:" + context . getResource ( path ) . toExternalForm ( ) + "!/" ) ; JarFile jarFile = JarUtils . getJarFile ( jarUrl ) ; Enumeration < JarEntry > entries = jarFile . entries ( ) ; while ( entries . hasMoreElements ( ) ) { JarEntry entry = entries . nextElement ( ) ; if ( entry . isDirectory ( ) ) { continue ; // This is a directory } String name = entry . getName ( ) ; if ( ! name . startsWith ( prefix ) ) { continue ; // Attribute files } if ( name . endsWith ( suffix ) ) { // Get it from classloader , because no URL can be // derived from JarEntry Enumeration < URL > alternateFacesConfigs = classloader . getResources ( name ) ; while ( alternateFacesConfigs . hasMoreElements ( ) ) { if ( urlSet == null ) { urlSet = new TreeSet < URL > ( ) ; } urlSet . add ( alternateFacesConfigs . nextElement ( ) ) ; } } } } } } return urlSet ; } return null ;
public class CmsSecurityManager { /** * Moves a resource . < p > * You must ensure that the destination path is an absolute , valid and * existing VFS path . Relative paths from the source are currently not supported . < p > * The moved resource will always be locked to the current user * after the move operation . < p > * In case the target resource already exists , it is overwritten with the * source resource . < p > * @ param context the current request context * @ param source the resource to copy * @ param destination the name of the copy destination with complete path * @ throws CmsException if something goes wrong * @ throws CmsSecurityException if resource could not be copied * @ see CmsObject # moveResource ( String , String ) * @ see org . opencms . file . types . I _ CmsResourceType # moveResource ( CmsObject , CmsSecurityManager , CmsResource , String ) */ public void moveResource ( CmsRequestContext context , CmsResource source , String destination ) throws CmsException , CmsSecurityException { } }
CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; try { checkOfflineProject ( dbc ) ; // checking if the destination folder exists and is not marked as deleted readResource ( context , CmsResource . getParentFolder ( destination ) , CmsResourceFilter . IGNORE_EXPIRATION ) ; checkPermissions ( dbc , source , CmsPermissionSet . ACCESS_READ , true , CmsResourceFilter . ALL ) ; checkPermissions ( dbc , source , CmsPermissionSet . ACCESS_WRITE , true , CmsResourceFilter . ALL ) ; checkSystemLocks ( dbc , source ) ; // check write permissions for subresources in case of moving a folder if ( source . isFolder ( ) ) { dbc . getRequestContext ( ) . setAttribute ( I_CmsVfsDriver . REQ_ATTR_CHECK_PERMISSIONS , Boolean . TRUE ) ; try { m_driverManager . getVfsDriver ( dbc ) . moveResource ( dbc , dbc . currentProject ( ) . getUuid ( ) , source , destination ) ; } catch ( CmsDataAccessException e ) { // unwrap the permission violation exception if ( e . getCause ( ) instanceof CmsPermissionViolationException ) { throw ( CmsPermissionViolationException ) e . getCause ( ) ; } else { throw e ; } } dbc . getRequestContext ( ) . removeAttribute ( I_CmsVfsDriver . REQ_ATTR_CHECK_PERMISSIONS ) ; } moveResource ( dbc , source , destination ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_MOVE_RESOURCE_2 , dbc . removeSiteRoot ( source . getRootPath ( ) ) , dbc . removeSiteRoot ( destination ) ) , e ) ; } finally { dbc . clear ( ) ; }
public class AbstractAzkabanServlet { /** * Creates a new velocity page to use . */ protected Page newPage ( final HttpServletRequest req , final HttpServletResponse resp , final String template ) { } }
final Page page = new Page ( req , resp , getApplication ( ) . getVelocityEngine ( ) , template ) ; page . add ( "version" , jarVersion ) ; page . add ( "azkaban_name" , this . name ) ; page . add ( "azkaban_label" , this . label ) ; page . add ( "azkaban_color" , this . color ) ; page . add ( "note_type" , NoteServlet . type ) ; page . add ( "note_message" , NoteServlet . message ) ; page . add ( "note_url" , NoteServlet . url ) ; page . add ( "timezone" , TimeZone . getDefault ( ) . getID ( ) ) ; page . add ( "currentTime" , ( new DateTime ( ) ) . getMillis ( ) ) ; page . add ( "size" , getDisplayExecutionPageSize ( ) ) ; // @ TODO , allow more than one type of viewer . For time sake , I only install // the first one if ( this . viewerPlugins != null && ! this . viewerPlugins . isEmpty ( ) ) { page . add ( "viewers" , this . viewerPlugins ) ; final ViewerPlugin plugin = this . viewerPlugins . get ( 0 ) ; page . add ( "viewerName" , plugin . getPluginName ( ) ) ; page . add ( "viewerPath" , plugin . getPluginPath ( ) ) ; } if ( this . triggerPlugins != null && ! this . triggerPlugins . isEmpty ( ) ) { page . add ( "triggers" , this . triggerPlugins ) ; } return page ;
public class RedundentExprEliminator { /** * Create a new WalkingIterator from the steps in another WalkingIterator . * @ param wi The iterator from where the steps will be taken . * @ param numSteps The number of steps from the first to copy into the new * iterator . * @ return The new iterator . */ protected WalkingIterator createIteratorFromSteps ( final WalkingIterator wi , int numSteps ) { } }
WalkingIterator newIter = new WalkingIterator ( wi . getPrefixResolver ( ) ) ; try { AxesWalker walker = ( AxesWalker ) wi . getFirstWalker ( ) . clone ( ) ; newIter . setFirstWalker ( walker ) ; walker . setLocPathIterator ( newIter ) ; for ( int i = 1 ; i < numSteps ; i ++ ) { AxesWalker next = ( AxesWalker ) walker . getNextWalker ( ) . clone ( ) ; walker . setNextWalker ( next ) ; next . setLocPathIterator ( newIter ) ; walker = next ; } walker . setNextWalker ( null ) ; } catch ( CloneNotSupportedException cnse ) { throw new WrappedRuntimeException ( cnse ) ; } return newIter ;
public class AnnotationVisitor { /** * Visit annotation on a method parameter * @ param p * parameter number , starting at zero ( " this " parameter is not * counted ) * @ param annotationClass * class of annotation * @ param map * map from names to values * @ param runtimeVisible * true if annotation is runtime visible */ public void visitParameterAnnotation ( int p , @ DottedClassName String annotationClass , Map < String , ElementValue > map , boolean runtimeVisible ) { } }
public class Item { /** * Returns the value of the specified attribute in the current item as a * string ; or null if the attribute either doesn ' t exist or the attribute * value is null . * @ see # isNull ( String ) # isNull ( String ) to check if the attribute value is * null . * @ see # isPresent ( String ) # isPresent ( String ) to check if the attribute * value is present . */ public String getString ( String attrName ) { } }
Object val = attributes . get ( attrName ) ; return valToString ( val ) ;
public class VaadinForHeroku { /** * Add an application listener to the configuration of the server . * @ param listeners the application listener ( s ) to add to the server configuration . * @ since 0.3 */ public VaadinForHeroku withApplicationListener ( final String ... listeners ) { } }
checkVarArgsArguments ( listeners ) ; this . applicationListeners . addAll ( Arrays . asList ( listeners ) ) ; return self ( ) ;
public class ChainWriter { /** * Encodes a javascript string for use in an XML attribute context . Quotes * are added around the string . Also , if the string is translated , comments * will be added giving the translation lookup id to aid in translation of * server - translated values in JavaScript . * @ see Coercion # toString ( java . lang . Object , com . aoindustries . util . i18n . BundleLookup . MarkupType ) */ public ChainWriter encodeJavaScriptStringInXmlAttribute ( Object value ) throws IOException { } }
// Two stage encoding : // 1 ) Text - > JavaScript ( with quotes added ) // 2 ) JavaScript - > XML Attribute if ( value instanceof Writable && ! ( ( Writable ) value ) . isFastToString ( ) ) { // Avoid unnecessary toString calls textInJavaScriptEncoder . writePrefixTo ( javaScriptInXhtmlAttributeWriter ) ; Coercion . write ( value , textInJavaScriptEncoder , javaScriptInXhtmlAttributeWriter ) ; textInJavaScriptEncoder . writeSuffixTo ( javaScriptInXhtmlAttributeWriter ) ; } else { String str = Coercion . toString ( value ) ; BundleLookupMarkup lookupMarkup ; BundleLookupThreadContext threadContext = BundleLookupThreadContext . getThreadContext ( false ) ; if ( threadContext != null ) { lookupMarkup = threadContext . getLookupMarkup ( str ) ; } else { lookupMarkup = null ; } if ( lookupMarkup != null ) lookupMarkup . appendPrefixTo ( MarkupType . JAVASCRIPT , javaScriptInXhtmlAttributeWriter ) ; textInJavaScriptEncoder . writePrefixTo ( javaScriptInXhtmlAttributeWriter ) ; textInJavaScriptEncoder . write ( str , javaScriptInXhtmlAttributeWriter ) ; textInJavaScriptEncoder . writeSuffixTo ( javaScriptInXhtmlAttributeWriter ) ; if ( lookupMarkup != null ) lookupMarkup . appendSuffixTo ( MarkupType . JAVASCRIPT , javaScriptInXhtmlAttributeWriter ) ; } return this ;
public class ButtonRenderer { /** * Renders the Javascript code dealing with the click event . If the * developer provides their own onclick handler , is precedes the generated * Javascript code . * @ param context * The current FacesContext . * @ param attrs * the attribute list * @ return some Javascript code , such as * " window . location . href = ' / targetView . jsf ' ; " */ private String encodeClick ( FacesContext context , Button button ) { } }
String js ; String userClick = button . getOnclick ( ) ; if ( userClick != null ) { js = userClick ; } // + COLON ; } else { js = "" ; } String fragment = button . getFragment ( ) ; String outcome = button . getOutcome ( ) ; if ( null != outcome && outcome . contains ( "#" ) ) { if ( null != fragment && fragment . length ( ) > 0 ) { throw new FacesException ( "Please define the URL fragment either in the fragment attribute or in the outcome attribute, but not both" ) ; } int pos = outcome . indexOf ( "#" ) ; fragment = outcome . substring ( pos ) ; outcome = outcome . substring ( 0 , pos ) ; } if ( outcome == null || outcome . equals ( "" ) ) { if ( null != fragment && fragment . length ( ) > 0 ) { if ( ! fragment . startsWith ( "#" ) ) { fragment = "#" + fragment ; } js += "window.open('" + fragment + "', '" ; if ( button . getTarget ( ) != null ) js += button . getTarget ( ) ; else js += "_self" ; js += "');" ; return js ; } } if ( outcome == null || outcome . equals ( "" ) || outcome . equals ( "@none" ) ) return js ; if ( canOutcomeBeRendered ( button , fragment , outcome ) ) { String url = determineTargetURL ( context , button , outcome ) ; if ( url != null ) { if ( url . startsWith ( "alert(" ) ) { js = url ; } else { if ( fragment != null ) { if ( fragment . startsWith ( "#" ) ) { url += fragment ; } else { url += "#" + fragment ; } } js += "window.open('" + url + "', '" ; if ( button . getTarget ( ) != null ) js += button . getTarget ( ) ; else js += "_self" ; js += "');" ; } } } return js ;
public class ArrowButtonPainter { /** * Paint the arrow in disabled state . * @ param g the Graphics2D context to paint with . * @ param width the width . * @ param height the height . */ private void paintForegroundDisabled ( Graphics2D g , int width , int height ) { } }
Shape s = decodeArrowPath ( width , height ) ; g . setPaint ( disabledColor ) ; g . fill ( s ) ;
public class GrailsClassUtils { /** * Retrieves a boolean value from a Map for the given key * @ param key The key that references the boolean value * @ param map The map to look in * @ return A boolean value which will be false if the map is null , the map doesn ' t contain the key or the value is false */ public static boolean getBooleanFromMap ( String key , Map < ? , ? > map , boolean defaultValue ) { } }
if ( map == null ) return defaultValue ; if ( map . containsKey ( key ) ) { Object o = map . get ( key ) ; if ( o == null ) { return defaultValue ; } if ( o instanceof Boolean ) { return ( Boolean ) o ; } return Boolean . valueOf ( o . toString ( ) ) ; } return defaultValue ;
public class GetRepresentatives { /** * Returns a representative set of PDB protein chains at the specified sequence * identity cutoff . See http : / / www . pdb . org / pdb / statistics / clusterStatistics . do * for more information . * @ param sequenceIdentity sequence identity threshold * @ return PdbChainKey set of representatives */ public static SortedSet < StructureName > getRepresentatives ( int sequenceIdentity ) { } }
SortedSet < StructureName > representatives = new TreeSet < StructureName > ( ) ; if ( ! seqIdentities . contains ( sequenceIdentity ) ) { System . err . println ( "Error: representative chains are not available for %sequence identity: " + sequenceIdentity ) ; return representatives ; } try { URL u = new URL ( clusterUrl + sequenceIdentity ) ; InputStream stream = URLConnectionTools . getInputStream ( u , 60000 ) ; String xml = null ; if ( stream != null ) { xml = JFatCatClient . convertStreamToString ( stream ) ; SortedSet < String > reps = RepresentativeXMLConverter . fromXML ( xml ) ; for ( String s : reps ) { StructureName k = new StructureName ( s ) ; representatives . add ( k ) ; } } } catch ( Exception e ) { e . printStackTrace ( ) ; } return representatives ;
public class Cob2AvroGenerator { /** * Given an Avro schema produce java specific classes . * @ param avroSchemaFile the Avro schema file ( used by avro for timestamp * checking ) * @ param avroSchemaSource the Avro schema source * @ param javaTargetFolder the target folder for java classes * @ throws IOException if compilation fails */ private void avroCompile ( String avroSchemaSource , File avroSchemaFile , File javaTargetFolder ) throws IOException { } }
log . debug ( "Avro compiler started for: {}" , avroSchemaFile ) ; Schema . Parser parser = new Schema . Parser ( ) ; Schema schema = parser . parse ( avroSchemaSource ) ; SpecificCompiler compiler = new CustomSpecificCompiler ( schema ) ; compiler . setStringType ( StringType . CharSequence ) ; compiler . compileToDestination ( avroSchemaFile , javaTargetFolder ) ; log . debug ( "Avro compiler ended for: {}" , avroSchemaFile ) ;
public class JsonDiff { /** * Returns the comparator for the give field , and nodes . This method can be * overriden to customize comparison logic . */ public JsonComparator getComparator ( List < String > context , JsonNode node1 , JsonNode node2 ) { } }
if ( node1 == null ) { if ( node2 == null ) { return NODIFF_CMP ; } else { return null ; } } else if ( node2 == null ) { return null ; } else { if ( node1 instanceof NullNode ) { if ( node2 instanceof NullNode ) { return NODIFF_CMP ; } else { return null ; } } else if ( node2 instanceof NullNode ) { return null ; } // Nodes are not null , and they are not null node if ( node1 . isContainerNode ( ) && node2 . isContainerNode ( ) ) { if ( node1 instanceof ObjectNode ) { return objectComparator ; } else if ( node1 instanceof ArrayNode ) { return arrayComparator ; } } else if ( node1 . isValueNode ( ) && node2 . isValueNode ( ) ) { return valueComparator ; } } return null ;
public class NetUtils { /** * Returns a valid address for Akka . It returns a String of format ' host : port ' . * When an IPv6 address is specified , it normalizes the IPv6 address to avoid * complications with the exact URL match policy of Akka . * @ param host The hostname , IPv4 or IPv6 address * @ param port The port * @ return host : port where host will be normalized if it is an IPv6 address */ public static String unresolvedHostAndPortToNormalizedString ( String host , int port ) { } }
Preconditions . checkArgument ( port >= 0 && port < 65536 , "Port is not within the valid range," ) ; return unresolvedHostToNormalizedString ( host ) + ":" + port ;
public class PersonaAuthorizer { /** * / * package */ static Map < String , Object > parseAssertion ( String assertion ) { } }
// https : / / github . com / mozilla / id - specs / blob / prod / browserid / index . md // http : / / self - issued . info / docs / draft - jones - json - web - token - 04 . html Map < String , Object > result = new HashMap < String , Object > ( ) ; String [ ] components = assertion . split ( "\\." ) ; // split on " . " if ( components . length < 4 ) throw new IllegalArgumentException ( String . format ( Locale . ENGLISH , "Invalid assertion given, only %d found. Expected 4+" , components . length ) ) ; Map < ? , ? > body = decodeComponent ( components [ 1 ] ) ; Map < ? , ? > principal = ( Map < ? , ? > ) body . get ( "principal" ) ; result . put ( ASSERTION_FIELD_EMAIL , principal . get ( "email" ) ) ; body = decodeComponent ( components [ 3 ] ) ; result . put ( ASSERTION_FIELD_ORIGIN , body . get ( "aud" ) ) ; Long expObject = ( Long ) body . get ( "exp" ) ; Date expDate = new Date ( expObject . longValue ( ) ) ; result . put ( ASSERTION_FIELD_EXPIRATION , expDate ) ; return result ;
public class DynamicJasperHelper { /** * Creates a jrxml file * @ param dr * @ param layoutManager * @ param _ parameters * @ param xmlEncoding ( default is UTF - 8 ) * @ param outputStream * @ throws JRException */ public static void generateJRXML ( DynamicReport dr , LayoutManager layoutManager , Map _parameters , String xmlEncoding , OutputStream outputStream ) throws JRException { } }
JasperReport jr = generateJasperReport ( dr , layoutManager , _parameters ) ; if ( xmlEncoding == null ) xmlEncoding = DEFAULT_XML_ENCODING ; JRXmlWriter . writeReport ( jr , outputStream , xmlEncoding ) ;
public class Viewport { /** * Adds a { @ link BlurEvent } handler . * @ param handler the handler * @ return returns the handler registration */ @ Override public HandlerRegistration addBlurHandler ( BlurHandler handler ) { } }
return ensureHandlers ( ) . addHandler ( BlurEvent . getType ( ) , handler ) ;
public class CSVInputMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CSVInput cSVInput , ProtocolMarshaller protocolMarshaller ) { } }
if ( cSVInput == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( cSVInput . getFileHeaderInfo ( ) , FILEHEADERINFO_BINDING ) ; protocolMarshaller . marshall ( cSVInput . getComments ( ) , COMMENTS_BINDING ) ; protocolMarshaller . marshall ( cSVInput . getQuoteEscapeCharacter ( ) , QUOTEESCAPECHARACTER_BINDING ) ; protocolMarshaller . marshall ( cSVInput . getRecordDelimiter ( ) , RECORDDELIMITER_BINDING ) ; protocolMarshaller . marshall ( cSVInput . getFieldDelimiter ( ) , FIELDDELIMITER_BINDING ) ; protocolMarshaller . marshall ( cSVInput . getQuoteCharacter ( ) , QUOTECHARACTER_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SingleDbJDBCConnection { /** * { @ inheritDoc } */ @ Override protected int addReference ( PropertyData data ) throws SQLException , IOException , InvalidItemStateException , RepositoryException { } }
List < ValueData > values = data . getValues ( ) ; int added = 0 ; for ( int i = 0 ; i < values . size ( ) ; i ++ ) { ValueData vdata = values . get ( i ) ; String refNodeIdentifier ; try { refNodeIdentifier = ValueDataUtil . getString ( vdata ) ; } catch ( RepositoryException e ) { throw new IOException ( e . getMessage ( ) , e ) ; } added += addReference ( getInternalId ( data . getIdentifier ( ) ) , i , getInternalId ( refNodeIdentifier ) ) ; } return added ;
public class Manager { /** * Sets the target resolver . * @ param targetHandlerResolver a resolver for target handlers */ public void setTargetResolver ( ITargetHandlerResolver targetHandlerResolver ) { } }
if ( targetHandlerResolver == null ) { this . targetConfigurator . setTargetHandlerResolver ( this . defaultTargetHandlerResolver ) ; this . instancesMngr . setTargetHandlerResolver ( this . defaultTargetHandlerResolver ) ; } else { this . targetConfigurator . setTargetHandlerResolver ( targetHandlerResolver ) ; this . instancesMngr . setTargetHandlerResolver ( targetHandlerResolver ) ; }
public class CoreTransactionService { /** * Completes a transaction by modifying the transaction state to change ownership to this member and then completing * the transaction based on the existing transaction state . */ @ SuppressWarnings ( "unchecked" ) private CompletableFuture < Void > completeTransaction ( TransactionId transactionId , TransactionState expectState , Function < TransactionInfo , TransactionInfo > updateFunction , Predicate < TransactionInfo > updatedPredicate , BiFunction < TransactionId , Transactional < ? > , CompletableFuture < Void > > completionFunction ) { } }
return transactions . compute ( transactionId , ( id , info ) -> { if ( info == null ) { return null ; } else if ( info . state == expectState ) { return updateFunction . apply ( info ) ; } else { return info ; } } ) . thenCompose ( value -> { if ( value != null && updatedPredicate . test ( value . value ( ) ) ) { return Futures . allOf ( value . value ( ) . participants . stream ( ) . map ( participantInfo -> completeParticipant ( participantInfo , info -> completionFunction . apply ( transactionId , info ) ) ) ) . thenApply ( v -> null ) ; } return Futures . exceptionalFuture ( new TransactionException ( "Failed to acquire transaction lock" ) ) ; } ) ;
public class CmsEncoder { /** * Encodes all characters that are contained in the String which can not displayed * in the given encodings charset with HTML entity references * like < code > & amp ; # 8364 ; < / code > . < p > * This is required since a Java String is * internally always stored as Unicode , meaning it can contain almost every character , but * the HTML charset used might not support all such characters . < p > * @ param input the input to encode for HTML * @ param encoding the charset to encode the result with * @ return the input with the encoded HTML entities * @ see # decodeHtmlEntities ( String , String ) */ public static String encodeHtmlEntities ( String input , String encoding ) { } }
StringBuffer result = new StringBuffer ( input . length ( ) * 2 ) ; CharBuffer buffer = CharBuffer . wrap ( input . toCharArray ( ) ) ; Charset charset = Charset . forName ( encoding ) ; CharsetEncoder encoder = charset . newEncoder ( ) ; for ( int i = 0 ; i < buffer . length ( ) ; i ++ ) { int c = buffer . get ( i ) ; if ( c < 128 ) { // first 128 chars are contained in almost every charset result . append ( ( char ) c ) ; // this is intended as performance improvement since // the canEncode ( ) operation appears quite CPU heavy } else if ( encoder . canEncode ( ( char ) c ) ) { // encoder can encode this char result . append ( ( char ) c ) ; } else { // append HTML entity reference result . append ( ENTITY_PREFIX ) ; result . append ( c ) ; result . append ( ";" ) ; } } return result . toString ( ) ;
public class MultiDexHelper { /** * get all the classes name in " classes . dex " , " classes2 . dex " , . . . . * @ return all the classes name * @ throws PackageManager . NameNotFoundException * @ throws IOException */ public static List < String > getAllClasses ( ) throws PackageManager . NameNotFoundException , IOException { } }
List < String > classNames = new ArrayList < > ( ) ; for ( String path : getSourcePaths ( ) ) { try { DexFile dexfile ; if ( path . endsWith ( EXTRACTED_SUFFIX ) ) { // NOT use new DexFile ( path ) , because it will throw " permission error in / data / dalvik - cache " dexfile = DexFile . loadDex ( path , path + ".tmp" , 0 ) ; } else { dexfile = new DexFile ( path ) ; } Enumeration < String > dexEntries = dexfile . entries ( ) ; while ( dexEntries . hasMoreElements ( ) ) { classNames . add ( dexEntries . nextElement ( ) ) ; } } catch ( IOException e ) { throw new IOException ( "Error at loading dex file '" + path + "'" ) ; } } return classNames ;
public class Communications { /** * Returns received stations as per 1371-4 . pdf . * @ param extractor * @ param slotTimeout * @ param startIndex * @ return */ @ VisibleForTesting static Integer getReceivedStations ( AisExtractor extractor , int slotTimeout , int startIndex ) { } }
if ( slotTimeout == 3 || slotTimeout == 5 || slotTimeout == 7 ) return extractor . getValue ( startIndex + 5 , startIndex + 19 ) ; else return null ;
public class IteratorExtensions { /** * Returns an Iterator of Pairs where the nth pair is created by taking the nth element of the source as the value * and its 0 - based index as the key . E . g . * < code > zipWitIndex ( # [ " a " , " b " , " c " ] ) = = # [ ( 0 , " a " ) , ( 1 , " b " ) , ( 2 , " c " ) ] < / code > * If the index would overflow , { @ link Integer # MAX _ VALUE } is returned for all subsequent elements . * The resulting Iterator is a lazily computed view , so any modifications to the underlying Iterator will be * reflected on iteration . The result does not support { @ link Iterator # remove ( ) } * @ param iterator * the elements . May not be < code > null < / code > . * @ return the zipped result * @ since 2.7 */ public static < A > Iterator < Pair < Integer , A > > indexed ( final Iterator < ? extends A > iterator ) { } }
if ( iterator == null ) throw new NullPointerException ( "iterator" ) ; return new AbstractIterator < Pair < Integer , A > > ( ) { int i = 0 ; @ Override protected Pair < Integer , A > computeNext ( ) { if ( iterator . hasNext ( ) ) { Pair < Integer , A > next = new Pair < Integer , A > ( i , iterator . next ( ) ) ; if ( i != Integer . MAX_VALUE ) i ++ ; return next ; } else { return endOfData ( ) ; } } } ;
public class KickflipApiClient { /** * Send Stream Metadata for a { @ link io . kickflip . sdk . api . json . Stream } . * The target Stream must be owned by the User created with { @ link io . kickflip . sdk . api . KickflipApiClient # createNewUser ( KickflipCallback ) } * from this KickflipApiClient . * @ param stream the { @ link io . kickflip . sdk . api . json . Stream } to get Meta data for * @ param cb A callback to receive the updated Stream upon request completion */ public void setStreamInfo ( Stream stream , final KickflipCallback cb ) { } }
if ( ! assertActiveUserAvailable ( cb ) ) return ; GenericData data = new GenericData ( ) ; data . put ( "stream_id" , stream . getStreamId ( ) ) ; data . put ( "uuid" , getActiveUser ( ) . getUUID ( ) ) ; if ( stream . getTitle ( ) != null ) { data . put ( "title" , stream . getTitle ( ) ) ; } if ( stream . getDescription ( ) != null ) { data . put ( "description" , stream . getDescription ( ) ) ; } if ( stream . getExtraInfo ( ) != null ) { data . put ( "extra_info" , new Gson ( ) . toJson ( stream . getExtraInfo ( ) ) ) ; } if ( stream . getLatitude ( ) != 0 ) { data . put ( "lat" , stream . getLatitude ( ) ) ; } if ( stream . getLongitude ( ) != 0 ) { data . put ( "lon" , stream . getLongitude ( ) ) ; } if ( stream . getCity ( ) != null ) { data . put ( "city" , stream . getCity ( ) ) ; } if ( stream . getState ( ) != null ) { data . put ( "state" , stream . getState ( ) ) ; } if ( stream . getCountry ( ) != null ) { data . put ( "country" , stream . getCountry ( ) ) ; } if ( stream . getThumbnailUrl ( ) != null ) { data . put ( "thumbnail_url" , stream . getThumbnailUrl ( ) ) ; } data . put ( "private" , stream . isPrivate ( ) ) ; data . put ( "deleted" , stream . isDeleted ( ) ) ; post ( SET_META , new UrlEncodedContent ( data ) , Stream . class , cb ) ;
public class Callbacks { /** * Creates a chained callback that calls the supplied pre - operation before passing the result * on to the supplied target callback . */ public static < T > AsyncCallback < T > before ( AsyncCallback < T > target , final Function < T , Void > preOp ) { } }
return new ChainedCallback < T , T > ( target ) { @ Override public void onSuccess ( T result ) { preOp . apply ( result ) ; forwardSuccess ( result ) ; } } ;