signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class TransitionManager { /** * Sets a specific transition to occur when the given scene is entered . * @ param scene The scene which , when applied , will cause the given * transition to run . * @ param transition The transition that will play when the given scene is * entered . A value of null will result in the default behavior of * using the default transition instead . */ public void setTransition ( @ NonNull Scene scene , @ Nullable Transition transition ) { } }
mSceneTransitions . put ( scene , transition ) ;
public class BondManipulator { /** * Returns the maximum bond order for a List of bonds , given an iterator to the list . * @ param bonds An iterator for the list of bonds * @ return The maximum bond order found * @ see # getMaximumBondOrder ( java . util . List ) */ public static IBond . Order getMaximumBondOrder ( Iterator < IBond > bonds ) { } }
IBond . Order maxOrder = IBond . Order . SINGLE ; while ( bonds . hasNext ( ) ) { IBond bond = bonds . next ( ) ; if ( isHigherOrder ( bond . getOrder ( ) , maxOrder ) ) maxOrder = bond . getOrder ( ) ; } return maxOrder ;
public class NGServer { /** * Returns the current NailStats object for the specified class , creating a new one if necessary * @ param nailClass the class for which we ' re gathering stats * @ return a NailStats object for the specified class */ private NailStats getOrCreateStatsFor ( Class nailClass ) { } }
NailStats result ; synchronized ( allNailStats ) { String nailClassName = nailClass . getName ( ) ; result = allNailStats . get ( nailClassName ) ; if ( result == null ) { result = new NailStats ( nailClassName ) ; allNailStats . put ( nailClassName , result ) ; } } return result ;
public class DispatchResponse { /** * Determine the view dispatcher . * @ param activity the current Activity * @ throws ViewDispatcherException if ViewDispatcher can not be determined */ private ViewDispatcher getViewDispatcher ( Activity activity ) throws ViewDispatcherException { } }
if ( dispatchRule . getViewDispatcher ( ) != null ) { return dispatchRule . getViewDispatcher ( ) ; } try { String dispatcherName ; if ( dispatchRule . getDispatcherName ( ) != null ) { dispatcherName = dispatchRule . getDispatcherName ( ) ; } else { dispatcherName = activity . getSetting ( ViewDispatcher . VIEW_DISPATCHER_SETTING_NAME ) ; if ( dispatcherName == null ) { throw new IllegalArgumentException ( "The settings name '" + ViewDispatcher . VIEW_DISPATCHER_SETTING_NAME + "' has not been specified in the default response rule" ) ; } } ViewDispatcher viewDispatcher = cache . get ( dispatcherName ) ; if ( viewDispatcher == null ) { if ( dispatcherName . startsWith ( BeanRule . CLASS_DIRECTIVE_PREFIX ) ) { String dispatcherClassName = dispatcherName . substring ( BeanRule . CLASS_DIRECTIVE_PREFIX . length ( ) ) ; Class < ? > dispatcherClass = activity . getEnvironment ( ) . getClassLoader ( ) . loadClass ( dispatcherClassName ) ; viewDispatcher = ( ViewDispatcher ) activity . getBean ( dispatcherClass ) ; } else { viewDispatcher = activity . getBean ( dispatcherName ) ; } if ( viewDispatcher == null ) { throw new IllegalArgumentException ( "No bean named '" + dispatcherName + "' is defined" ) ; } if ( viewDispatcher . isSingleton ( ) ) { ViewDispatcher existing = cache . putIfAbsent ( dispatcherName , viewDispatcher ) ; if ( existing != null ) { viewDispatcher = existing ; } else { if ( log . isDebugEnabled ( ) ) { log . debug ( "Caching " + viewDispatcher ) ; } } } } return viewDispatcher ; } catch ( Exception e ) { throw new ViewDispatcherException ( "Unable to determine ViewDispatcher" , e ) ; }
public class GeminiAccountService { /** * This will result in a new address being created each time , and is severely rate - limited */ @ Override public String requestDepositAddress ( Currency currency , String ... arguments ) throws IOException { } }
GeminiDepositAddressResponse response = super . requestDepositAddressRaw ( currency ) ; return response . getAddress ( ) ;
public class AbstrCFMLExprTransformer { /** * Hier werden die verschiedenen Moeglichen Werte erkannt und jenachdem wird mit der passenden * Methode weitergefahren < br / > * EBNF : < br / > * < code > string | number | dynamic | sharp ; < / code > * @ return CFXD Element * @ throws TemplateException */ private Expression checker ( Data data ) throws TemplateException { } }
Expression expr = null ; // String if ( ( expr = string ( data ) ) != null ) { expr = subDynamic ( data , expr , false , false ) ; data . mode = STATIC ; // ( expr instanceof Literal ) ? STATIC : DYNAMIC ; / / STATIC return expr ; } // Number if ( ( expr = number ( data ) ) != null ) { expr = subDynamic ( data , expr , false , false ) ; data . mode = STATIC ; // ( expr instanceof Literal ) ? STATIC : DYNAMIC ; / / STATIC return expr ; } // closure if ( ( expr = closure ( data ) ) != null ) { data . mode = DYNAMIC ; return expr ; } // lambda if ( ( expr = lambda ( data ) ) != null ) { data . mode = DYNAMIC ; return expr ; } // Dynamic if ( ( expr = dynamic ( data ) ) != null ) { expr = newOp ( data , expr ) ; expr = subDynamic ( data , expr , true , false ) ; data . mode = DYNAMIC ; return expr ; } // Sharp if ( ( expr = sharp ( data ) ) != null ) { data . mode = DYNAMIC ; return expr ; } // JSON if ( ( expr = json ( data , JSON_ARRAY , '[' , ']' ) ) != null ) { expr = subDynamic ( data , expr , false , false ) ; data . mode = DYNAMIC ; return expr ; } if ( ( expr = json ( data , JSON_STRUCT , '{' , '}' ) ) != null ) { expr = subDynamic ( data , expr , false , false ) ; data . mode = DYNAMIC ; return expr ; } // else Error throw new TemplateException ( data . srcCode , "Syntax Error, Invalid Construct" ) ;
public class RBBISetBuilder { int getFirstChar ( int category ) { } }
RangeDescriptor rlRange ; int retVal = - 1 ; for ( rlRange = fRangeList ; rlRange != null ; rlRange = rlRange . fNext ) { if ( rlRange . fNum == category ) { retVal = rlRange . fStartChar ; break ; } } return retVal ;
public class AWSDirectoryServiceClient { /** * Creates an AD Connector to connect to an on - premises directory . * Before you call < code > ConnectDirectory < / code > , ensure that all of the required permissions have been explicitly * granted through a policy . For details about what permissions are required to run the * < code > ConnectDirectory < / code > operation , see < a * href = " http : / / docs . aws . amazon . com / directoryservice / latest / admin - guide / UsingWithDS _ IAM _ ResourcePermissions . html " * > AWS Directory Service API Permissions : Actions , Resources , and Conditions Reference < / a > . * @ param connectDirectoryRequest * Contains the inputs for the < a > ConnectDirectory < / a > operation . * @ return Result of the ConnectDirectory operation returned by the service . * @ throws DirectoryLimitExceededException * The maximum number of directories in the region has been reached . You can use the * < a > GetDirectoryLimits < / a > operation to determine your directory limits in the region . * @ throws InvalidParameterException * One or more parameters are not valid . * @ throws ClientException * A client exception has occurred . * @ throws ServiceException * An exception has occurred in AWS Directory Service . * @ sample AWSDirectoryService . ConnectDirectory * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ds - 2015-04-16 / ConnectDirectory " target = " _ top " > AWS API * Documentation < / a > */ @ Override public ConnectDirectoryResult connectDirectory ( ConnectDirectoryRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeConnectDirectory ( request ) ;
public class CameraEncoder { /** * Hook for Host Activity ' s onResume ( ) * Called on UI thread */ public void onHostActivityResumed ( ) { } }
synchronized ( mReadyForFrameFence ) { // Resume the GLSurfaceView ' s Renderer thread if ( mDisplayView != null ) mDisplayView . onResume ( ) ; // Re - open camera if we ' re not recording and the SurfaceTexture has already been created if ( ! mRecording && mSurfaceTexture != null ) { if ( VERBOSE ) Log . i ( "CameraRelease" , "Opening camera and attaching to SurfaceTexture" ) ; mHandler . sendMessage ( mHandler . obtainMessage ( MSG_REOPEN_CAMERA ) ) ; } else { Log . w ( "CameraRelease" , "Didn't try to open camera onHAResume. rec: " + mRecording + " mSurfaceTexture ready? " + ( mSurfaceTexture == null ? " no" : " yes" ) ) ; } }
public class ProjectiveStructureByFactorization { /** * Used to get found camera matrix for a view * @ param view Which view * @ param cameraMatrix storage for 3x4 projective camera matrix */ public void getCameraMatrix ( int view , DMatrixRMaj cameraMatrix ) { } }
cameraMatrix . reshape ( 3 , 4 ) ; CommonOps_DDRM . extract ( P , view * 3 , 0 , cameraMatrix ) ; for ( int col = 0 ; col < 4 ; col ++ ) { cameraMatrix . data [ cameraMatrix . getIndex ( 0 , col ) ] *= pixelScale ; cameraMatrix . data [ cameraMatrix . getIndex ( 1 , col ) ] *= pixelScale ; }
public class Consumers { /** * Yields nth ( 1 - based ) element of the iterator if found or nothing . * @ param < E > the iterator element type * @ param count the element cardinality * @ param iterator the iterator that will be consumed * @ return just the element or nothing */ public static < E > Optional < E > maybeNth ( long count , Iterator < E > iterator ) { } }
final Iterator < E > filtered = new FilteringIterator < E > ( iterator , new Nth < E > ( count ) ) ; return new MaybeFirstElement < E > ( ) . apply ( filtered ) ;
public class ProvisionByoipCidrRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < ProvisionByoipCidrRequest > getDryRunRequest ( ) { } }
Request < ProvisionByoipCidrRequest > request = new ProvisionByoipCidrRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class Hasher { /** * Hashes string . This method is minor modification of String . hashCode ( ) . * @ param string * String to hash . * @ return Hash of the string . */ public static long hashString ( String string ) { } }
long h = 1125899906842597L ; // prime int len = string . length ( ) ; for ( int i = 0 ; i < len ; i ++ ) { h = 31 * h + string . charAt ( i ) ; } return h ;
public class LzmaFrameEncoder { /** * Calculates maximum possible size of output buffer for not compressible data . */ private static int maxOutputBufferLength ( int inputLength ) { } }
double factor ; if ( inputLength < 200 ) { factor = 1.5 ; } else if ( inputLength < 500 ) { factor = 1.2 ; } else if ( inputLength < 1000 ) { factor = 1.1 ; } else if ( inputLength < 10000 ) { factor = 1.05 ; } else { factor = 1.02 ; } return 13 + ( int ) ( inputLength * factor ) ;
public class DeltaIteration { /** * Sets the resources for the iteration , and the minimum and preferred resources are the same by default . * The lower and upper resource limits will be considered in dynamic resource resize feature for future plan . * @ param resources The resources for the iteration . * @ return The iteration with set minimum and preferred resources . */ private DeltaIteration < ST , WT > setResources ( ResourceSpec resources ) { } }
Preconditions . checkNotNull ( resources , "The resources must be not null." ) ; Preconditions . checkArgument ( resources . isValid ( ) , "The values in resources must be not less than 0." ) ; this . minResources = resources ; this . preferredResources = resources ; return this ;
public class LevenbergMarquardt { /** * Convert a list of numbers to an array of doubles . * @ param listOfNumbers A list of numbers . * @ return A corresponding array of doubles executing < code > doubleValue ( ) < / code > on each element . */ private static double [ ] numberListToDoubleArray ( List < Number > listOfNumbers ) { } }
double [ ] arrayOfDoubles = new double [ listOfNumbers . size ( ) ] ; for ( int i = 0 ; i < arrayOfDoubles . length ; i ++ ) { arrayOfDoubles [ i ] = listOfNumbers . get ( i ) . doubleValue ( ) ; } return arrayOfDoubles ;
public class FJIterate { /** * Iterate over the collection specified , in parallel batches using default runtime parameter values . The * { @ code ObjectIntProcedure } used must be stateless , or use concurrent aware objects if they are to be shared . * e . g . * < pre > * { @ code final ConcurrentMutableMap < Integer , Object > chm = new ConcurrentHashMap < Integer , Object > ( ) ; } * FJIterate . < b > forEachWithIndex < / b > ( collection , new ObjectIntProcedure ( ) * public void value ( Object object , int index ) * chm . put ( index , object ) ; * < / pre > */ public static < T > void forEachWithIndex ( Iterable < T > iterable , ObjectIntProcedure < ? super T > procedure ) { } }
FJIterate . forEachWithIndex ( iterable , procedure , FJIterate . FORK_JOIN_POOL ) ;
public class DomainsInner { /** * Get domain name recommendations based on keywords . * Get domain name recommendations based on keywords . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ param serviceFuture the ServiceFuture object tracking the Retrofit calls * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < NameIdentifierInner > > listRecommendationsNextAsync ( final String nextPageLink , final ServiceFuture < List < NameIdentifierInner > > serviceFuture , final ListOperationCallback < NameIdentifierInner > serviceCallback ) { } }
return AzureServiceFuture . fromPageResponse ( listRecommendationsNextSinglePageAsync ( nextPageLink ) , new Func1 < String , Observable < ServiceResponse < Page < NameIdentifierInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < NameIdentifierInner > > > call ( String nextPageLink ) { return listRecommendationsNextSinglePageAsync ( nextPageLink ) ; } } , serviceCallback ) ;
public class ArrayBytesBuff { /** * 检查是否可读字节数 * @ param minimumReadableBytes */ private void checkReadableBytesUnsafe ( int minimumReadableBytes ) { } }
if ( readerIndex > writerIndex - minimumReadableBytes ) { throw new IndexOutOfBoundsException ( String . format ( "readerIndex(%d) + length(%d) exceeds writerIndex(%d): %s" , readerIndex , minimumReadableBytes , writerIndex , this ) ) ; }
public class ResourcesInner { /** * Updates a resource by ID . * @ param resourceId The fully qualified ID of the resource , including the resource name and resource type . Use the format , / subscriptions / { guid } / resourceGroups / { resource - group - name } / { resource - provider - namespace } / { resource - type } / { resource - name } * @ param apiVersion The API version to use for the operation . * @ param parameters Update resource parameters . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < GenericResourceInner > updateByIdAsync ( String resourceId , String apiVersion , GenericResourceInner parameters ) { } }
return updateByIdWithServiceResponseAsync ( resourceId , apiVersion , parameters ) . map ( new Func1 < ServiceResponse < GenericResourceInner > , GenericResourceInner > ( ) { @ Override public GenericResourceInner call ( ServiceResponse < GenericResourceInner > response ) { return response . body ( ) ; } } ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcActor ( ) { } }
if ( ifcActorEClass == null ) { ifcActorEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 2 ) ; } return ifcActorEClass ;
public class DescribeDimensionKeysRequest { /** * One or more filters to apply in the request . Restrictions : * < ul > * < li > * Any number of filters by the same dimension , as specified in the < code > GroupBy < / code > or < code > Partition < / code > * parameters . * < / li > * < li > * A single filter for any other dimension in this dimension group . * < / li > * < / ul > * @ param filter * One or more filters to apply in the request . Restrictions : < / p > * < ul > * < li > * Any number of filters by the same dimension , as specified in the < code > GroupBy < / code > or * < code > Partition < / code > parameters . * < / li > * < li > * A single filter for any other dimension in this dimension group . * < / li > * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeDimensionKeysRequest withFilter ( java . util . Map < String , String > filter ) { } }
setFilter ( filter ) ; return this ;
public class FileUtils { /** * Get the nquads file for a given moment in time . * @ param dir the directory * @ param time the time * @ return the file */ public static File getNquadsFile ( final File dir , final Instant time ) { } }
return new File ( dir , Long . toString ( time . getEpochSecond ( ) ) + ".nq" ) ;
public class EnumValidator { /** * Creates a new validator for the enum type with the allowed values defined in the { @ code allowed } parameter . * @ param enumType the type of the enum . * @ param nullable { @ code true } if the value is allowed to be { @ code null } , otherwise { @ code false } . * @ param allowed the enum values that are allowed . * @ param < E > the type of the enum . * @ return a new validator . * @ deprecated use { @ link # create ( Class , Enum [ ] ) } since { @ link org . jboss . as . controller . AttributeDefinition } handles the nullable check . */ @ SafeVarargs @ Deprecated @ SuppressWarnings ( "deprecation" ) public static < E extends Enum < E > > EnumValidator < E > create ( final Class < E > enumType , final boolean nullable , final E ... allowed ) { } }
return new EnumValidator < E > ( enumType , nullable , allowed ) ;
public class KeyVaultClientBaseImpl { /** * Lists the deleted certificates in the specified vault currently available for recovery . * The GetDeletedCertificates operation retrieves the certificates in the current vault which are in a deleted state and ready for recovery or purging . This operation includes deletion - specific information . This operation requires the certificates / get / list permission . This operation can only be enabled on soft - delete enabled vaults . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws KeyVaultErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; DeletedCertificateItem & gt ; object if successful . */ public PagedList < DeletedCertificateItem > getDeletedCertificatesNext ( final String nextPageLink ) { } }
ServiceResponse < Page < DeletedCertificateItem > > response = getDeletedCertificatesNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) ; return new PagedList < DeletedCertificateItem > ( response . body ( ) ) { @ Override public Page < DeletedCertificateItem > nextPage ( String nextPageLink ) { return getDeletedCertificatesNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class StructureTools { /** * Gets a representative atom for each group that is part of the chain * backbone . Note that modified aminoacids won ' t be returned as part of the * backbone if the { @ link org . biojava . nbio . structure . io . mmcif . ReducedChemCompProvider } was used to load the * structure . * For amino acids , the representative is a CA carbon . For nucleotides , the * representative is the { @ value # NUCLEOTIDE _ REPRESENTATIVE } . Other group * types will be ignored . * @ param s * Input structure * @ return representative Atoms of the structure backbone * @ since Biojava 4.1.0 */ public static Atom [ ] getRepresentativeAtomArray ( Structure s ) { } }
List < Atom > atoms = new ArrayList < Atom > ( ) ; for ( Chain c : s . getChains ( ) ) { Atom [ ] chainAtoms = getRepresentativeAtomArray ( c ) ; for ( Atom a : chainAtoms ) { atoms . add ( a ) ; } } return atoms . toArray ( new Atom [ atoms . size ( ) ] ) ;
public class OMapOperator { /** * Execute OMAP operator . Behaves like { @ link MapOperator # doExec ( Element , Object , String , Object . . . ) } counterpart but takes * care to create index and increment it before every key / value pair processing . * @ param element context element , * @ param scope scope object , * @ param propertyPath property path , * @ param arguments optional arguments , not used . * @ return always returns null to signal full processing . * @ throws IOException if underlying writer fails to write . * @ throws TemplateException if element has not at least two children or content map is undefined . */ @ Override protected Object doExec ( Element element , Object scope , String propertyPath , Object ... arguments ) throws IOException , TemplateException { } }
if ( ! propertyPath . equals ( "." ) && ConverterRegistry . hasType ( scope . getClass ( ) ) ) { throw new TemplateException ( "Operand is property path but scope is not an object." ) ; } Element keyTemplate = element . getFirstChild ( ) ; if ( keyTemplate == null ) { throw new TemplateException ( "Invalid map element |%s|. Missing key template." , element ) ; } Element valueTemplate = keyTemplate . getNextSibling ( ) ; if ( valueTemplate == null ) { throw new TemplateException ( "Invalid map element |%s|. Missing value template." , element ) ; } Stack < Index > indexes = serializer . getIndexes ( ) ; Index index = new Index ( ) ; indexes . push ( index ) ; Map < ? , ? > map = content . getMap ( scope , propertyPath ) ; for ( Object key : map . keySet ( ) ) { index . increment ( ) ; serializer . writeItem ( keyTemplate , key ) ; serializer . writeItem ( valueTemplate , map . get ( key ) ) ; } indexes . pop ( ) ; return null ;
public class PeriodConverter { /** * Gson invokes this call - back method during deserialization when it encounters a field of the * specified type . < p > * In the implementation of this call - back method , you should consider invoking * { @ link JsonDeserializationContext # deserialize ( JsonElement , Type ) } method to create objects * for any non - trivial field of the returned object . However , you should never invoke it on the * the same type passing { @ code json } since that will cause an infinite loop ( Gson will call your * call - back method again ) . * @ param json The Json data being deserialized * @ param typeOfT The type of the Object to deserialize to * @ return a deserialized object of the specified type typeOfT which is a subclass of { @ code T } * @ throws JsonParseException if json is not in the expected format of { @ code typeOfT } */ @ Override public Period deserialize ( JsonElement json , Type typeOfT , JsonDeserializationContext context ) throws JsonParseException { } }
// Do not try to deserialize null or empty values if ( json . getAsString ( ) == null || json . getAsString ( ) . isEmpty ( ) ) { return null ; } final PeriodFormatter fmt = ISOPeriodFormat . standard ( ) ; return fmt . parsePeriod ( json . getAsString ( ) ) ;
public class DoubleBuffer { /** * Compare the remaining doubles of this buffer to another double buffer ' s remaining doubles . * @ param otherBuffer another double buffer . * @ return a negative value if this is less than { @ code other } ; 0 if this equals to { @ code * other } ; a positive value if this is greater than { @ code other } . * @ exception ClassCastException if { @ code other } is not a double buffer . */ public int compareTo ( DoubleBuffer otherBuffer ) { } }
int compareRemaining = ( remaining ( ) < otherBuffer . remaining ( ) ) ? remaining ( ) : otherBuffer . remaining ( ) ; int thisPos = position ; int otherPos = otherBuffer . position ; // BEGIN android - changed double thisDouble , otherDouble ; while ( compareRemaining > 0 ) { thisDouble = get ( thisPos ) ; otherDouble = otherBuffer . get ( otherPos ) ; // checks for double and NaN inequality if ( ( thisDouble != otherDouble ) && ( ( thisDouble == thisDouble ) || ( otherDouble == otherDouble ) ) ) { return thisDouble < otherDouble ? - 1 : 1 ; } thisPos ++ ; otherPos ++ ; compareRemaining -- ; } // END android - changed return remaining ( ) - otherBuffer . remaining ( ) ;
public class ArchiveProperties { /** * Returns the archive properties as a Map . */ public Map < String , Collection < String > > toMap ( ) { } }
Map < String , Collection < String > > params = new HashMap < String , Collection < String > > ( ) ; if ( name != null ) { ArrayList < String > valueList = new ArrayList < String > ( ) ; valueList . add ( name ) ; params . put ( "name" , valueList ) ; } if ( resolution != null ) { ArrayList < String > valueList = new ArrayList < String > ( ) ; valueList . add ( resolution ) ; params . put ( "resolution" , valueList ) ; } ArrayList < String > valueList = new ArrayList < String > ( ) ; valueList . add ( Boolean . toString ( hasAudio ) ) ; params . put ( "hasAudio" , valueList ) ; valueList = new ArrayList < String > ( ) ; valueList . add ( Boolean . toString ( hasVideo ) ) ; params . put ( "hasVideo" , valueList ) ; valueList = new ArrayList < String > ( ) ; valueList . add ( outputMode . toString ( ) ) ; params . put ( "outputMode" , valueList ) ; if ( layout != null ) { valueList = new ArrayList < String > ( ) ; valueList . add ( layout . toString ( ) ) ; params . put ( "layout" , valueList ) ; } return params ;
public class TextSimilarity { /** * This calculates the similarity between two strings as described in Programming * Classics : Implementing the World ' s Best Algorithms by Oliver ( ISBN 0-131-00413-1 ) . * @ param text1 * @ param text2 * @ return */ public static double oliverSimilarity ( String text1 , String text2 ) { } }
preprocessDocument ( text1 ) ; preprocessDocument ( text2 ) ; String smallerDoc = text1 ; String biggerDoc = text2 ; if ( text1 . length ( ) > text2 . length ( ) ) { smallerDoc = text2 ; biggerDoc = text1 ; } double p = PHPSimilarText . similarityPercentage ( smallerDoc , biggerDoc ) ; p /= 100.0 ; return p ;
public class TangoUser { public void removeTangoUserListener ( ITangoUserListener listener ) throws DevFailed { } }
event_listeners . remove ( ITangoUserListener . class , listener ) ; if ( event_listeners . size ( ) == 0 ) unsubscribe_event ( event_identifier ) ;
public class RedisClusterClient { /** * Returns the first { @ link RedisURI } configured with this { @ link RedisClusterClient } instance . * @ return the first { @ link RedisURI } . */ protected RedisURI getFirstUri ( ) { } }
assertNotEmpty ( initialUris ) ; Iterator < RedisURI > iterator = initialUris . iterator ( ) ; return iterator . next ( ) ;
public class CmsVisitEntryFilter { /** * Returns an extended filter with the given resource restriction . < p > * @ param structureId the structure id to filter * @ return an extended filter with the given resource restriction */ public CmsVisitEntryFilter filterResource ( CmsUUID structureId ) { } }
CmsVisitEntryFilter filter = ( CmsVisitEntryFilter ) clone ( ) ; filter . m_structureId = structureId ; return filter ;
public class HibernateLayer { /** * Set the layer configuration . * @ param layerInfo * layer information * @ throws LayerException * oops * @ since 1.7.1 */ @ Api @ Override public void setLayerInfo ( VectorLayerInfo layerInfo ) throws LayerException { } }
super . setLayerInfo ( layerInfo ) ; if ( null != featureModel ) { featureModel . setLayerInfo ( getLayerInfo ( ) ) ; }
public class Label { /** * might return null */ private static Label combineLabel ( FA fa , IntBitSet states ) { } }
int si ; Label tmp ; Label result ; result = null ; for ( si = states . first ( ) ; si != - 1 ; si = states . next ( si ) ) { tmp = ( Label ) fa . get ( si ) . getLabel ( ) ; if ( tmp != null ) { if ( result == null ) { result = new Label ( ) ; } result . symbols . addAll ( tmp . symbols ) ; } } return result ;
public class CpcConfidence { /** * mergeFlag must already be checked as false */ static double getHipConfidenceLB ( final int lgK , final long numCoupons , final double hipEstAccum , final int kappa ) { } }
if ( numCoupons == 0 ) { return 0.0 ; } assert lgK >= 4 ; assert ( kappa >= 1 ) && ( kappa <= 3 ) ; double x = hipErrorConstant ; if ( lgK <= 14 ) { x = ( hipHighSideData [ ( 3 * ( lgK - 4 ) ) + ( kappa - 1 ) ] ) / 10000.0 ; } final double rel = x / sqrt ( 1 << lgK ) ; final double eps = kappa * rel ; final double est = hipEstAccum ; double result = est / ( 1.0 + eps ) ; if ( result < numCoupons ) { result = numCoupons ; } return result ;
public class TwitterEndpointServices { /** * Computes the signature for an oauth / request _ token request per * { @ link https : / / dev . twitter . com / oauth / overview / creating - signatures } . * Expects consumerSecret and tokenSecret to already be set to the desired values . * @ param requestMethod * @ param targetUrl * @ param params * @ return */ public String computeSignature ( String requestMethod , String targetUrl , Map < String , String > params ) { } }
return computeSignature ( requestMethod , targetUrl , params , consumerSecret , tokenSecret ) ;
public class ContentBasedLocalBundleRepository { /** * This method adds into the cache . Adding into the cache requires updating a map and a set . * @ param bInfo the bundle info to add into . */ private void addToCache ( BundleInfo bInfo ) { } }
List < Resource > info = _cacheBySymbolicName . get ( bInfo . symbolicName ) ; if ( info == null ) { info = new ArrayList < Resource > ( ) ; info . add ( bInfo ) ; } info = _cacheBySymbolicName . putIfAbsent ( bInfo . symbolicName , info ) ; if ( info != null ) { synchronized ( info ) { info . add ( bInfo ) ; } } _bundleLocations . add ( bInfo . file ) ;
public class LogBase { /** * Log a message , with an array of object arguments and associated Throwable information . * If the logger is currently enabled for the given message * level then the given message is forwarded to all the * registered LogTarget objects . * @ param level the level to log with * @ param throwable the Throwable associated with this log message * @ param message the message to log * @ param arguments the array of arguments for the message */ public void log ( LogLevel level , Throwable throwable , String message , Object ... arguments ) { } }
// this is copied from log ( LogEntry ) to prevent unnecessary object creation if ( level . compareTo ( this . level ) < 0 ) { return ; } this . log ( new LogEntry ( level , throwable , message , arguments , ZonedDateTime . now ( ) ) ) ;
public class TasksBase { /** * Removes the task from the specified project . The task will still exist * in the system , but it will not be in the project anymore . * Returns an empty data block . * @ param task The task to remove from a project . * @ return Request object */ public ItemRequest < Task > removeProject ( String task ) { } }
String path = String . format ( "/tasks/%s/removeProject" , task ) ; return new ItemRequest < Task > ( this , Task . class , path , "POST" ) ;
public class OutputHandler { /** * Wait for the task to finish or abort . * @ return did the task finish correctly ? * @ throws Throwable */ public synchronized boolean waitForFinish ( ) throws Throwable { } }
while ( ! done && exception == null ) { wait ( ) ; } if ( exception != null ) { throw exception ; } return done ;
public class SibRaManagedConnection { /** * Cleans up this managed connection prior to returning it to the free pool . * Invalidates any connection handles still associated with the managed * connection as , in the normal case , they would all have been dissociated * before cleanup was started . */ public void cleanup ( ) throws ResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , "cleanup" ) ; } // Invalidate any currently associated connections for ( Iterator iterator = _connections . iterator ( ) ; iterator . hasNext ( ) ; ) { final SibRaConnection connection = ( SibRaConnection ) iterator . next ( ) ; connection . invalidate ( ) ; } // If we have a connection exception then we must have hit a problem with getting the // physical connection . We need to throw an exception here to force the destroy of this // managed connection else it goes in the free pool . I better solution would be to expose // an api that we can call on jca , this is work in progress . if ( _connectionException != null ) { // The string used here is searched for by jca so that it won ' t log the error ResourceException exception = new ResourceException ( "Skip logging for this failing connection" ) ; if ( TRACE . isEventEnabled ( ) ) { SibTr . exception ( this , TRACE , exception ) ; } throw exception ; } if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , "cleanup" ) ; }
public class CmsUpdateInfo { /** * Checks if the categoryfolder setting needs to be updated . * @ return true if the categoryfolder setting needs to be updated */ public boolean needToSetCategoryFolder ( ) { } }
if ( m_adeModuleVersion == null ) { return true ; } CmsModuleVersion categoryFolderUpdateVersion = new CmsModuleVersion ( "9.0.0" ) ; return ( m_adeModuleVersion . compareTo ( categoryFolderUpdateVersion ) == - 1 ) ;
public class PolicyUtils { /** * Obtains the metadata for the given document . * @ param docIS * the document as an InputStream * @ return the document metadata as a Map */ public Map < String , String > getDocumentMetadata ( InputStream docIS ) { } }
Map < String , String > metadata = new HashMap < String , String > ( ) ; try { // Create instance of DocumentBuilderFactory DocumentBuilderFactory factory = DocumentBuilderFactory . newInstance ( ) ; // Get the DocumentBuilder DocumentBuilder docBuilder = factory . newDocumentBuilder ( ) ; // Create blank DOM Document and parse contents of input stream Document doc = docBuilder . parse ( docIS ) ; NodeList nodes = null ; metadata . put ( "PolicyId" , doc . getDocumentElement ( ) . getAttribute ( "PolicyId" ) ) ; nodes = doc . getElementsByTagName ( "Subjects" ) ; if ( nodes . getLength ( ) == 0 ) { metadata . put ( "anySubject" , "T" ) ; } nodes = doc . getElementsByTagName ( "Resources" ) ; if ( nodes . getLength ( ) == 0 ) { metadata . put ( "anyResource" , "T" ) ; } nodes = doc . getElementsByTagName ( "Actions" ) ; if ( nodes . getLength ( ) == 0 ) { metadata . put ( "anyAction" , "T" ) ; } nodes = doc . getElementsByTagName ( "Environments" ) ; if ( nodes . getLength ( ) == 0 ) { metadata . put ( "anyEnvironment" , "T" ) ; } } catch ( Exception e ) { log . error ( e . getMessage ( ) ) ; } return metadata ;
public class XMLEmitter { /** * Comment node . * @ param sComment * The comment text */ public void onComment ( @ Nullable final String sComment ) { } }
if ( StringHelper . hasText ( sComment ) ) { if ( isThrowExceptionOnNestedComments ( ) ) if ( sComment . contains ( COMMENT_START ) || sComment . contains ( COMMENT_END ) ) throw new IllegalArgumentException ( "XML comment contains nested XML comment: " + sComment ) ; _append ( COMMENT_START ) . _append ( sComment ) . _append ( COMMENT_END ) ; }
public class UUID { /** * Returns the given { @ code value } represented by the specified number of hex { @ code digits } . * @ param value is the number to format . * @ param digits are the number of digits requested . * @ return the given { @ code value } as hex { @ link String } with the given number of digits . */ private static String digits ( long value , int digits ) { } }
long hi = 1L << ( digits * 4 ) ; return Long . toHexString ( hi | ( value & ( hi - 1 ) ) ) . substring ( 1 ) ;
public class TldTaglibTypeImpl { /** * Returns all < code > function < / code > elements * @ return list of < code > function < / code > */ public List < FunctionType < TldTaglibType < T > > > getAllFunction ( ) { } }
List < FunctionType < TldTaglibType < T > > > list = new ArrayList < FunctionType < TldTaglibType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "function" ) ; for ( Node node : nodeList ) { FunctionType < TldTaglibType < T > > type = new FunctionTypeImpl < TldTaglibType < T > > ( this , "function" , childNode , node ) ; list . add ( type ) ; } return list ;
public class CoronaJobHistory { /** * Log job ' s priority . * @ param priority Jobs priority */ public void logJobPriority ( JobID jobid , JobPriority priority ) { } }
if ( disableHistory ) { return ; } if ( null != writers ) { log ( writers , RecordTypes . Job , new Keys [ ] { Keys . JOBID , Keys . JOB_PRIORITY } , new String [ ] { jobId . toString ( ) , priority . toString ( ) } ) ; }
public class MCCImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . MCC__RG : return rg != null && ! rg . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
public class JITDeploy { /** * d497921 */ public static Class < ? > generateWSEJBProxy ( ClassLoader classLoader , String proxyClassName , Class < ? > proxyInterface , Method [ ] proxyMethods , EJBMethodInfoImpl [ ] methodInfos , String ejbClassName , String beanName , ClassDefiner classDefiner ) throws ClassNotFoundException , EJBConfigurationException { } }
Class < ? > rtnClass = null ; final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; // d576626 if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "generateWSEJBProxy: " + proxyClassName + " : " + beanName ) ; try { byte [ ] classbytes = WSEJBProxy . generateClassBytes ( proxyClassName , proxyInterface , proxyMethods , methodInfos , ejbClassName , beanName ) ; rtnClass = classDefiner . findLoadedOrDefineClass ( classLoader , proxyClassName , classbytes ) ; } catch ( EJBConfigurationException ejbex ) { FFDCFilter . processException ( ejbex , CLASS_NAME + ".generateWSEJBProxy" , "536" ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "generateWSEJBProxy failed: " + ejbex . getMessage ( ) ) ; throw ejbex ; } catch ( Throwable ex ) { FFDCFilter . processException ( ex , CLASS_NAME + ".generateWSEJBProxy" , "545" ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "generateWSEJBProxy failed: " + ex . getMessage ( ) ) ; throw new ClassNotFoundException ( proxyClassName + " : " + ex . getMessage ( ) , ex ) ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "generateWSEJBProxy: " + rtnClass ) ; return rtnClass ;
public class AspectranNodeParser { /** * Adds the settings nodelets . */ private void addSettingsNodelets ( ) { } }
parser . setXpath ( "/aspectran/settings" ) ; parser . addNodeEndlet ( text -> { assistant . applySettings ( ) ; } ) ; parser . setXpath ( "/aspectran/settings/setting" ) ; parser . addNodelet ( attrs -> { String name = attrs . get ( "name" ) ; String value = attrs . get ( "value" ) ; assistant . putSetting ( name , value ) ; parser . pushObject ( name ) ; } ) ; parser . addNodeEndlet ( text -> { String name = parser . popObject ( ) ; if ( text != null ) { assistant . putSetting ( name , text ) ; } } ) ;
public class RunsInner { /** * Cancel an existing run . * @ param resourceGroupName The name of the resource group to which the container registry belongs . * @ param registryName The name of the container registry . * @ param runId The run ID . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void beginCancel ( String resourceGroupName , String registryName , String runId ) { } }
beginCancelWithServiceResponseAsync ( resourceGroupName , registryName , runId ) . toBlocking ( ) . single ( ) . body ( ) ;
public class QuotaLimit { /** * < pre > * Tiered limit values . You must specify this as a key : value pair , with an * integer value that is the maximum number of requests allowed for the * specified unit . Currently only STANDARD is supported . * < / pre > * < code > map & lt ; string , int64 & gt ; values = 10 ; < / code > */ public long getValuesOrDefault ( java . lang . String key , long defaultValue ) { } }
if ( key == null ) { throw new java . lang . NullPointerException ( ) ; } java . util . Map < java . lang . String , java . lang . Long > map = internalGetValues ( ) . getMap ( ) ; return map . containsKey ( key ) ? map . get ( key ) : defaultValue ;
public class ExtractorConfig { /** * Get string array . * @ param key the key * @ return the string [ ] */ public String [ ] getStringArray ( String key ) { } }
try { return getValue ( String [ ] . class , key ) ; } catch ( ClassCastException e ) { return new String [ ] { getString ( key ) } ; }
public class Strands { /** * Disables the current strand for thread scheduling purposes , for up to * the specified waiting time , unless the permit is available . * If the permit is available then it is consumed and the call * returns immediately ; otherwise the current strand becomes disabled * for scheduling purposes and lies dormant until one of four * things happens : * < ul > * < li > Some other strand invokes { @ link # unpark unpark } with the * current strand as the target ; or * < li > Some other strand interrupts * the current strand ; or * < li > The specified waiting time elapses ; or * < li > The call spuriously ( that is , for no reason ) returns . * < / ul > * This method does < em > not < / em > report which of these caused the * method to return . Callers should re - check the conditions which caused * the strand to park in the first place . Callers may also determine , * for example , the interrupt status of the strand , or the elapsed time * upon return . * @ param nanos the maximum number of nanoseconds to wait */ public static void parkNanos ( long nanos ) { } }
try { Strand . parkNanos ( nanos ) ; } catch ( SuspendExecution e ) { throw RuntimeSuspendExecution . of ( e ) ; }
public class StylesheetRoot { /** * Get an " xsl : template " property by node match . This looks in the imports as * well as this stylesheet . * @ see < a href = " http : / / www . w3 . org / TR / xslt # section - Defining - Template - Rules " > section - Defining - Template - Rules in XSLT Specification < / a > * @ param xctxt non - null reference to XPath runtime execution context . * @ param targetNode non - null reference of node that the template must match . * @ param mode qualified name of the node , or null . * @ param quietConflictWarnings true if conflict warnings should not be reported . * @ return reference to ElemTemplate that is the best match for targetNode , or * null if no match could be made . * @ throws TransformerException */ public ElemTemplate getTemplateComposed ( XPathContext xctxt , int targetNode , QName mode , boolean quietConflictWarnings , DTM dtm ) throws TransformerException { } }
return m_templateList . getTemplate ( xctxt , targetNode , mode , quietConflictWarnings , dtm ) ;
public class ChunkBlockHandler { /** * Adds a coordinate for the { @ link Chunk Chunks } around { @ link BlockPos } . * @ param world the world * @ param pos the pos * @ param size the size */ private void addCoord ( World world , BlockPos pos , int size ) { } }
getAffectedChunks ( world , pos . getX ( ) , pos . getZ ( ) , size ) . forEach ( c -> addCoord ( c , pos ) ) ;
public class EventDispatcher { protected int subscribe_quality_change_event ( String attr_name , String [ ] filters , boolean stateless ) throws DevFailed { } }
return event_supplier . subscribe_event ( attr_name , QUALITY_EVENT , this , filters , stateless ) ;
public class WebUtils { /** * 执行带文件上传的HTTP POST请求 。 * @ param url 请求地址 * @ param params 文本请求参数 * @ param fileParams 文件请求参数 * @ param charset 字符集 , 如UTF - 8 , GBK , GB2312 * @ param connectTimeout 连接超时时间 * @ param readTimeout 请求超时时间 * @ param proxyHost 代理host , 传null表示不使用代理 * @ param proxyPort 代理端口 , 传0表示不使用代理 * @ return 响应字符串 * @ throws IOException */ public static String doPost ( String url , Map < String , String > params , Map < String , FileItem > fileParams , String charset , int connectTimeout , int readTimeout , String proxyHost , int proxyPort ) throws IOException { } }
if ( fileParams == null || fileParams . isEmpty ( ) ) { return doPost ( url , params , charset , connectTimeout , readTimeout , proxyHost , proxyPort ) ; } String boundary = System . currentTimeMillis ( ) + "" ; // 随机分隔线 HttpURLConnection conn = null ; OutputStream out = null ; String rsp = null ; try { try { String ctype = "multipart/form-data;boundary=" + boundary + ";charset=" + charset ; conn = null ; if ( ! StringUtils . isEmpty ( proxyHost ) ) { conn = getConnection ( new URL ( url ) , METHOD_POST , ctype , proxyHost , proxyPort ) ; } else { conn = getConnection ( new URL ( url ) , METHOD_POST , ctype ) ; } conn . setConnectTimeout ( connectTimeout ) ; conn . setReadTimeout ( readTimeout ) ; } catch ( IOException e ) { Map < String , String > map = getParamsFromUrl ( url ) ; AlipayLogger . logCommError ( e , url , map . get ( "app_key" ) , map . get ( "method" ) , params ) ; throw e ; } try { out = conn . getOutputStream ( ) ; byte [ ] entryBoundaryBytes = ( "\r\n--" + boundary + "\r\n" ) . getBytes ( charset ) ; // 组装文本请求参数 Set < Entry < String , String > > textEntrySet = params . entrySet ( ) ; for ( Entry < String , String > textEntry : textEntrySet ) { byte [ ] textBytes = getTextEntry ( textEntry . getKey ( ) , textEntry . getValue ( ) , charset ) ; out . write ( entryBoundaryBytes ) ; out . write ( textBytes ) ; } // 组装文件请求参数 Set < Entry < String , FileItem > > fileEntrySet = fileParams . entrySet ( ) ; for ( Entry < String , FileItem > fileEntry : fileEntrySet ) { FileItem fileItem = fileEntry . getValue ( ) ; byte [ ] fileBytes = getFileEntry ( fileEntry . getKey ( ) , fileItem . getFileName ( ) , fileItem . getMimeType ( ) , charset ) ; out . write ( entryBoundaryBytes ) ; out . write ( fileBytes ) ; out . write ( fileItem . getContent ( ) ) ; } // 添加请求结束标志 byte [ ] endBoundaryBytes = ( "\r\n--" + boundary + "--\r\n" ) . getBytes ( charset ) ; out . write ( endBoundaryBytes ) ; rsp = getResponseAsString ( conn ) ; } catch ( IOException e ) { Map < String , String > map = getParamsFromUrl ( url ) ; AlipayLogger . logCommError ( e , conn , map . get ( "app_key" ) , map . get ( "method" ) , params ) ; throw e ; } } finally { if ( out != null ) { out . close ( ) ; } if ( conn != null ) { conn . disconnect ( ) ; } } return rsp ;
public class FastThreadLocal { /** * Set the value for the current thread . */ public final void set ( V value ) { } }
if ( value != InternalThreadLocalMap . UNSET ) { InternalThreadLocalMap threadLocalMap = InternalThreadLocalMap . get ( ) ; setKnownNotUnset ( threadLocalMap , value ) ; } else { remove ( ) ; }
public class Engraver { /** * Sets the engraving mode * < ul > < li > { @ link # NONE } : equal space between each note * < li > { @ link # DEFAULT } : smaller space for short note , bigger space for long notes , something that look nice ; ) * < / ul > * @ param mode { @ link # DEFAULT } or { @ link # NONE } * @ param variation , in % negative to reduce , positive * to improve space between notes . */ public void setMode ( int mode , int variation ) { } }
// only reset if needed if ( m_mode == mode && m_variation == variation ) return ; spacesAfter . clear ( ) ; m_mode = mode ; // bounds variation % // variation = Math . max ( variation , - 50 ) ; m_variation = Math . max ( VARIATION_MIN , Math . min ( VARIATION_MAX , variation ) ) ; if ( m_mode == DEFAULT ) { double factor = 1 + variation / 100f ; setSpaceAfter ( Note . DOTTED_WHOLE , 30 * factor ) ; setSpaceAfter ( Note . WHOLE , 25 * factor ) ; setSpaceAfter ( Note . DOTTED_HALF , 20 * factor ) ; setSpaceAfter ( Note . HALF , 15 * factor ) ; setSpaceAfter ( Note . DOTTED_QUARTER , 12 * factor ) ; setSpaceAfter ( Note . QUARTER , 10 * factor ) ; setSpaceAfter ( Note . DOTTED_EIGHTH , 7 * factor ) ; setSpaceAfter ( Note . EIGHTH , 5 * factor ) ; setSpaceAfter ( Note . DOTTED_SIXTEENTH , 2 * factor ) ; // invert factor factor = 1 - variation / 100 ; setSpaceAfter ( Note . SIXTEENTH , - 1 * factor ) ; setSpaceAfter ( Note . DOTTED_THIRTY_SECOND , - 2 * factor ) ; setSpaceAfter ( Note . THIRTY_SECOND , - 3 * factor ) ; setSpaceAfter ( Note . DOTTED_SIXTY_FOURTH , - 4 * factor ) ; setSpaceAfter ( Note . SIXTY_FOURTH , - 5 * factor ) ; } // else { / / mode = = NONE // / / do nothing , will always return 0
public class Post { /** * Adds children to a post . * @ param children The children to add . * @ return The post with children added . */ public final Post withChildren ( final List < Post > children ) { } }
return new Post ( id , slug , title , excerpt , content , authorId , author , publishTimestamp , modifiedTimestamp , status , parentId , guid , commentCount , this . metadata , type , mimeType , taxonomyTerms , children != null ? ImmutableList . copyOf ( children ) : ImmutableList . of ( ) ) ;
public class CollectionHelpers { /** * Returns an unmodifiable Collection View made up of the given Collections while translating the items into a common type . * The returned Collection View does not copy any of the data from any of the given Collections , therefore any changes * in the two Collections will be reflected in the View . * @ param c1 The first Collection , which contains items of type Type1. * @ param converter1 A Function that translates from Type1 to OutputType . * @ param c2 The second Collection , which contains items of type Type2. * @ param converter2 A Function that translates from Type2 to OutputType . * @ param < OutputType > The type of the items in the returned Set View . * @ param < Type1 > The type of the items in Set 1. * @ param < Type2 > The type of the items in Set 2. * @ return A new Collection View made up of the two Collections , with translation applied . */ public static < OutputType , Type1 , Type2 > Collection < OutputType > joinCollections ( Collection < Type1 > c1 , Function < Type1 , OutputType > converter1 , Collection < Type2 > c2 , Function < Type2 , OutputType > converter2 ) { } }
return new ConvertedSetView < > ( c1 , converter1 , c2 , converter2 ) ;
public class DisambiguateProperties { /** * Returns the corresponding instance if ` maybePrototype ` is a prototype of a constructor , * otherwise null */ @ Nullable private static JSType getInstanceIfPrototype ( JSType maybePrototype ) { } }
if ( maybePrototype . isFunctionPrototypeType ( ) ) { FunctionType constructor = maybePrototype . toObjectType ( ) . getOwnerFunction ( ) ; if ( constructor != null ) { if ( ! constructor . hasInstanceType ( ) ) { // this can happen when adding to the prototype of a non - constructor function return null ; } return constructor . getInstanceType ( ) ; } } return null ;
public class RouteDelegate { /** * Determines whether the given annotation is a ' constraint ' or not . * It just checks if the annotation has the { @ link Constraint } annotation on it or if the annotation is the { @ link * Valid } annotation . * @ param annotation the annotation to check * @ return { @ code true } if the given annotation is a constraint */ private static boolean isConstraint ( Annotation annotation ) { } }
return annotation . annotationType ( ) . isAnnotationPresent ( Constraint . class ) || annotation . annotationType ( ) . equals ( Valid . class ) ;
public class Dater { /** * Sets the hour , minute , second to the delegate date , format is { @ link DateStyle # CLOCK } * @ see # setClock ( int , int , int ) * @ see DateStyle # CLOCK * @ param clock * @ return */ public Dater setClock ( String clock ) { } }
String tip = "clock format must HH:mm:ss" ; checkArgument ( checkNotNull ( clock ) . length ( ) == 8 , tip ) ; List < String > pieces = Splitter . on ( ":" ) . splitToList ( clock ) ; checkArgument ( pieces . size ( ) == 3 , tip ) ; return setClock ( Ints . tryParse ( pieces . get ( 0 ) ) , Ints . tryParse ( pieces . get ( 1 ) ) , Ints . tryParse ( pieces . get ( 2 ) ) ) ;
public class Regex { /** * Splits the input * @ param in * @ param bufferSize * @ param limit See java . util . Pattern . split * @ return * @ throws IOException * @ see Pattern . split */ public String [ ] split ( PushbackReader in , int bufferSize , int limit ) throws IOException { } }
InputReader reader = Input . getInstance ( in , bufferSize ) ; List < String > list = split ( reader , limit ) ; return list . toArray ( new String [ list . size ( ) ] ) ;
public class FSNamesystem { /** * Create all the necessary directories */ private INode mkdirsInternal ( String src , PermissionStatus permissions ) throws IOException { } }
src = dir . normalizePath ( src ) ; // tokenize the src into components String [ ] names = INodeDirectory . getPathNames ( src ) ; if ( ! pathValidator . isValidName ( src , names ) ) { numInvalidFilePathOperations ++ ; throw new IOException ( "Invalid directory name: " + src ) ; } // check validity of the username checkUserName ( permissions ) ; // convert the names into an array of bytes w / o holding lock byte [ ] [ ] components = INodeDirectory . getPathComponents ( names ) ; INode [ ] inodes = new INode [ components . length ] ; writeLock ( ) ; try { if ( NameNode . stateChangeLog . isDebugEnabled ( ) ) { NameNode . stateChangeLog . debug ( "DIR* NameSystem.mkdirs: " + src ) ; } dir . rootDir . getExistingPathINodes ( components , inodes ) ; if ( isPermissionEnabled && isPermissionCheckingEnabled ( inodes ) ) { checkTraverse ( src , inodes ) ; } INode lastINode = inodes [ inodes . length - 1 ] ; if ( lastINode != null && lastINode . isDirectory ( ) ) { // all the users of mkdirs ( ) are used to expect ' true ' even if // a new directory is not created . return lastINode ; } if ( isInSafeMode ( ) ) { throw new SafeModeException ( "Cannot create directory " + src , safeMode ) ; } if ( isPermissionEnabled && isPermissionCheckingEnabled ( inodes ) ) { checkAncestorAccess ( src , inodes , FsAction . WRITE ) ; } // validate that we have enough inodes . This is , at best , a // heuristic because the mkdirs ( ) operation migth need to // create multiple inodes . checkFsObjectLimit ( ) ; if ( ! dir . mkdirs ( src , names , components , inodes , inodes . length , permissions , false , now ( ) ) ) { throw new IOException ( "Invalid directory name: " + src ) ; } return inodes [ inodes . length - 1 ] ; } finally { writeUnlock ( ) ; }
public class Http { /** * Makes POST request to given URL * @ param url url * @ param body request body to post or null to skip * @ param query query to append to url or null to skip * @ return Response object with HTTP response code and response as String * @ throws HttpException in case of invalid input parameters */ public static Response post ( String url , String body , Map < String , String > query ) throws HttpException { } }
return post ( url , body , query , null , DEFAULT_CONNECT_TIMEOUT , DEFAULT_READ_TIMEOUT ) ;
public class GenericSQLHelper { /** * Generate generic exec SQL . * @ param methodBuilder * the method builder * @ param method * the method */ public static void generateGenericExecSQL ( MethodSpec . Builder methodBuilder , final SQLiteModelMethod method ) { } }
boolean nullable ; final List < String > paramsList = new ArrayList < String > ( ) ; final List < String > contentValueList = new ArrayList < String > ( ) ; final One < Boolean > columnsToUpdate = new One < Boolean > ( true ) ; String sql = JQLChecker . getInstance ( ) . replace ( method , method . jql , new JQLReplacerListenerImpl ( method ) { @ Override public void onWhereStatementBegin ( Where_stmtContext ctx ) { super . onWhereStatementBegin ( ctx ) ; columnsToUpdate . value0 = false ; } @ Override public void onWhereStatementEnd ( Where_stmtContext ctx ) { super . onWhereStatementEnd ( ctx ) ; columnsToUpdate . value0 = true ; } @ Override public String onColumnName ( String columnName ) { String resolvedName = currentSchema . findColumnNameByPropertyName ( method , columnName ) ; AssertKripton . assertTrueOrUnknownPropertyInJQLException ( resolvedName != null , method , columnName ) ; return resolvedName ; } @ Override public String onBindParameter ( String bindParameterName , boolean inStatement ) { String propertyName = method . findParameterAliasByName ( bindParameterName ) ; if ( columnsToUpdate . value0 ) { contentValueList . add ( propertyName ) ; } else { paramsList . add ( propertyName ) ; } return "?" ; } } ) ; // update / insert columns final SQLiteEntity entity = method . getEntity ( ) ; for ( String item : contentValueList ) { // ASSERT : property is always in entity String propertyName = method . findParameterNameByAlias ( item ) ; TypeName paramType = method . findParameterTypeByAliasOrName ( item ) ; SQLProperty property = entity . get ( item ) ; if ( propertyName == null ) throw ( new PropertyNotFoundException ( method , propertyName , paramType ) ) ; Pair < String , TypeName > methodParam = new Pair < String , TypeName > ( propertyName , paramType ) ; // check same type TypeUtility . checkTypeCompatibility ( method , methodParam , property ) ; // check nullabliity // nullable = TypeUtility . isNullable ( method , methodParam , property ) ; // if ( nullable ) { // / / it use raw method param ' s typeName // methodBuilder . beginControlFlow ( " if ( $ L ! = null ) " , methodParam . value0 ) ; if ( method . isLogEnabled ( ) ) { methodBuilder . addCode ( "_contentValues.put($S, " , property . columnName ) ; } else { methodBuilder . addCode ( "_contentValues.put(" ) ; } // it does not need to be converted in string SQLTransformer . javaMethodParam2ContentValues ( methodBuilder , method , methodParam . value0 , methodParam . value1 , property ) ; methodBuilder . addCode ( ");\n" ) ; // if ( nullable ) { // methodBuilder . nextControlFlow ( " else " ) ; // if ( method . isLogEnabled ( ) ) { // methodBuilder . addStatement ( " _ contentValues . putNull ( $ S ) " , property . columnName ) ; // } else { // methodBuilder . addStatement ( " _ contentValues . putNull ( ) " ) ; // methodBuilder . endControlFlow ( ) ; } // where condition methodBuilder . addComment ( "build where condition" ) ; { // String separator = " " ; TypeName paramType ; String realName ; for ( String item : paramsList ) { methodBuilder . addCode ( "_contentValues.addWhereArgs(" ) ; paramType = method . findParameterTypeByAliasOrName ( item ) ; realName = method . findParameterNameByAlias ( item ) ; AssertKripton . assertTrueOrUnknownPropertyInJQLException ( paramType != null , method , item ) ; // code for query arguments nullable = TypeUtility . isNullable ( paramType ) ; if ( nullable ) { methodBuilder . addCode ( "($L==null?\"\":" , realName ) ; } // check for string conversion TypeUtility . beginStringConversion ( methodBuilder , paramType ) ; SQLTransformer . javaMethodParam2ContentValues ( methodBuilder , method , realName , paramType , null ) ; // check for string conversion TypeUtility . endStringConversion ( methodBuilder , paramType ) ; if ( nullable ) { methodBuilder . addCode ( ")" ) ; } methodBuilder . addCode ( ");\n" ) ; } } // log for where parames SqlBuilderHelper . generateLog ( method , methodBuilder ) ; // log methodBuilder . addCode ( "\n" ) ; methodBuilder . addStatement ( "database().execSQL($S, _contentValues.whereArgsAsArray())" , sql ) ;
public class PiElectronegativityDescriptor { /** * Gets the parameters attribute of the PiElectronegativityDescriptor * object * @ return The parameters value */ @ Override public Object [ ] getParameters ( ) { } }
// return the parameters as used for the descriptor calculation Object [ ] params = new Object [ 3 ] ; params [ 0 ] = maxIterations ; params [ 1 ] = lpeChecker ; params [ 2 ] = maxResonStruc ; return params ;
public class ParameterBuilder { /** * Sets a regular expression that must match for parameter values to be considered as valid . */ public ParameterBuilder matching ( String regex ) { } }
Validate . notNull ( regex , "regex is required" ) ; this . pattern = Pattern . compile ( regex ) ; return this ;
public class WebSocket { /** * Stop both the reading thread and the writing thread . * The reading thread will call { @ link # onReadingThreadFinished ( WebSocketFrame ) } * as its last step . Likewise , the writing thread will call { @ link * # onWritingThreadFinished ( WebSocketFrame ) } as its last step . * After both the threads have stopped , { @ link # onThreadsFinished ( ) } * is called . */ private void stopThreads ( long closeDelay ) { } }
ReadingThread readingThread ; WritingThread writingThread ; synchronized ( mThreadsLock ) { readingThread = mReadingThread ; writingThread = mWritingThread ; mReadingThread = null ; mWritingThread = null ; } if ( readingThread != null ) { readingThread . requestStop ( closeDelay ) ; } if ( writingThread != null ) { writingThread . requestStop ( ) ; }
public class KatharsisBoot { /** * Sets the default page limit for requests that return a collection of elements . If the api user does not * specify the page limit , then this default value will be used . * This is important to prevent denial of service attacks on the server . * NOTE : This using this feature requires a { @ link QuerySpecDeserializer } and it does not work with the * deprecated { @ link QueryParamsBuilder } . * @ param defaultPageLimit */ public void setDefaultPageLimit ( Long defaultPageLimit ) { } }
PreconditionUtil . assertNotNull ( "Setting the default page limit requires using the QuerySpecDeserializer, but " + "it is null. Are you using QueryParams instead?" , this . querySpecDeserializer ) ; ( ( DefaultQuerySpecDeserializer ) this . querySpecDeserializer ) . setDefaultLimit ( defaultPageLimit ) ;
public class OptionUtil { /** * Format a description of a Parameterizable ( including recursive options ) . * @ param buf Buffer to append to . * @ param pcls Parameterizable class to describe * @ param width Width * @ param indent Text indent * @ return Formatted description */ public static StringBuilder describeParameterizable ( StringBuilder buf , Class < ? > pcls , int width , String indent ) throws ClassInstantiationException { } }
println ( buf , width , "Description for class " + pcls . getName ( ) ) ; Title title = pcls . getAnnotation ( Title . class ) ; if ( title != null && title . value ( ) != null && ! title . value ( ) . isEmpty ( ) ) { println ( buf , width , title . value ( ) ) ; } Description desc = pcls . getAnnotation ( Description . class ) ; if ( desc != null && desc . value ( ) != null && ! desc . value ( ) . isEmpty ( ) ) { println ( buf , width , desc . value ( ) ) ; } for ( Reference ref : pcls . getAnnotationsByType ( Reference . class ) ) { if ( ! ref . prefix ( ) . isEmpty ( ) ) { println ( buf , width , ref . prefix ( ) ) ; } println ( buf , width , ref . authors ( ) ) ; println ( buf , width , ref . title ( ) ) ; println ( buf , width , ref . booktitle ( ) ) ; if ( ref . url ( ) . length ( ) > 0 ) { println ( buf , width , ref . url ( ) ) ; } } SerializedParameterization config = new SerializedParameterization ( ) ; TrackParameters track = new TrackParameters ( config ) ; @ SuppressWarnings ( "unused" ) Object p = ClassGenericsUtil . tryInstantiate ( Object . class , pcls , track ) ; Collection < TrackedParameter > options = track . getAllParameters ( ) ; if ( ! options . isEmpty ( ) ) { OptionUtil . formatForConsole ( buf , width , options ) ; } return buf ;
public class AudioStreamEncoder { /** * Encodes the given AudioInputStream , using the given FLACEncoder . * FLACEncoder must be in a state to accept samples and encode ( FLACOutputStream , * EncodingConfiguration , and StreamConfiguration have been set , and FLAC stream * has been opened ) . * @ param sin * @ return * @ throws IOException * @ throws IllegalArgumentException thrown if input sample size is not supported */ public static int encodeAudioInputStream ( AudioInputStream sin , int maxRead , FLACEncoder flac , boolean useThreads ) throws IOException , IllegalArgumentException { } }
AudioFormat format = sin . getFormat ( ) ; int frameSize = format . getFrameSize ( ) ; int sampleSize = format . getSampleSizeInBits ( ) ; int bytesPerSample = sampleSize / 8 ; if ( sampleSize % 8 != 0 ) { // end processing now throw new IllegalArgumentException ( "Unsupported Sample Size: size = " + sampleSize ) ; } int channels = format . getChannels ( ) ; boolean bigEndian = format . isBigEndian ( ) ; boolean isSigned = format . getEncoding ( ) == AudioFormat . Encoding . PCM_SIGNED ; byte [ ] samplesIn = new byte [ ( int ) maxRead ] ; int samplesRead ; int framesRead ; int [ ] sampleData = new int [ maxRead * channels / frameSize ] ; int unencodedSamples = 0 ; int totalSamples = 0 ; while ( ( samplesRead = sin . read ( samplesIn , 0 , maxRead ) ) > 0 ) { framesRead = samplesRead / ( frameSize ) ; if ( bigEndian ) { for ( int i = 0 ; i < framesRead * channels ; i ++ ) { int lower8Mask = 255 ; int temp = 0 ; int totalTemp = 0 ; for ( int x = bytesPerSample - 1 ; x >= 0 ; x ++ ) { int upShift = 8 * x ; if ( x == 0 ) // don ' t mask . . . we want sign temp = ( ( samplesIn [ bytesPerSample * i + x ] ) << upShift ) ; else temp = ( ( samplesIn [ bytesPerSample * i + x ] & lower8Mask ) << upShift ) ; totalTemp = totalTemp | temp ; } if ( ! isSigned ) { int reducer = 1 << ( bytesPerSample * 8 - 1 ) ; totalTemp -= reducer ; } sampleData [ i ] = totalTemp ; } } else { for ( int i = 0 ; i < framesRead * channels ; i ++ ) { int lower8Mask = 255 ; int temp = 0 ; int totalTemp = 0 ; for ( int x = 0 ; x < bytesPerSample ; x ++ ) { int upShift = 8 * x ; if ( x == bytesPerSample - 1 && isSigned ) // don ' t mask . . . we want sign temp = ( ( samplesIn [ bytesPerSample * i + x ] ) << upShift ) ; else temp = ( ( samplesIn [ bytesPerSample * i + x ] & lower8Mask ) << upShift ) ; totalTemp = totalTemp | temp ; } if ( ! isSigned ) { int reducer = 1 << ( bytesPerSample * 8 - 1 ) ; totalTemp -= reducer ; } sampleData [ i ] = totalTemp ; } } if ( framesRead > 0 ) { flac . addSamples ( sampleData , framesRead ) ; unencodedSamples += framesRead ; } if ( useThreads ) unencodedSamples -= flac . t_encodeSamples ( unencodedSamples , false , 5 ) ; else unencodedSamples -= flac . encodeSamples ( unencodedSamples , false ) ; totalSamples += unencodedSamples ; } totalSamples += unencodedSamples ; if ( useThreads ) unencodedSamples -= flac . t_encodeSamples ( unencodedSamples , true , 5 ) ; else unencodedSamples -= flac . encodeSamples ( unencodedSamples , true ) ; return totalSamples ;
public class DescribeDBSecurityGroupsResult { /** * A list of < a > DBSecurityGroup < / a > instances . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setDBSecurityGroups ( java . util . Collection ) } or { @ link # withDBSecurityGroups ( java . util . Collection ) } if you * want to override the existing values . * @ param dBSecurityGroups * A list of < a > DBSecurityGroup < / a > instances . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeDBSecurityGroupsResult withDBSecurityGroups ( DBSecurityGroup ... dBSecurityGroups ) { } }
if ( this . dBSecurityGroups == null ) { setDBSecurityGroups ( new com . amazonaws . internal . SdkInternalList < DBSecurityGroup > ( dBSecurityGroups . length ) ) ; } for ( DBSecurityGroup ele : dBSecurityGroups ) { this . dBSecurityGroups . add ( ele ) ; } return this ;
public class ScriptModuleLoader { /** * Add or update the existing { @ link ScriptModule } s with the given script archives . * This method will convert the archives to modules and then compile + link them in to the * dependency graph . It will then recursively re - link any modules depending on the new modules . * If this loader already contains an old version of the module , it will be unloaded on * successful compile of the new module . * @ param candidateArchives archives to load or update */ public synchronized void updateScriptArchives ( Set < ? extends ScriptArchive > candidateArchives ) { } }
Objects . requireNonNull ( candidateArchives ) ; long updateNumber = System . currentTimeMillis ( ) ; // map script module id to archive to be compiled Map < ModuleId , ScriptArchive > archivesToCompile = new HashMap < ModuleId , ScriptArchive > ( candidateArchives . size ( ) * 2 ) ; // create an updated mapping of the scriptModuleId to latest revisionId including the yet - to - be - compiled archives Map < ModuleId , ModuleIdentifier > oldRevisionIdMap = jbossModuleLoader . getLatestRevisionIds ( ) ; Map < ModuleId , ModuleIdentifier > updatedRevisionIdMap = new HashMap < ModuleId , ModuleIdentifier > ( ( oldRevisionIdMap . size ( ) + candidateArchives . size ( ) ) * 2 ) ; updatedRevisionIdMap . putAll ( oldRevisionIdMap ) ; // Map of the scriptModuleId to it ' s updated set of dependencies Map < ModuleId , Set < ModuleId > > archiveDependencies = new HashMap < ModuleId , Set < ModuleId > > ( ) ; for ( ScriptArchive scriptArchive : candidateArchives ) { ModuleId scriptModuleId = scriptArchive . getModuleSpec ( ) . getModuleId ( ) ; // filter out archives that have a newer module already loaded long createTime = scriptArchive . getCreateTime ( ) ; ScriptModule scriptModule = loadedScriptModules . get ( scriptModuleId ) ; long latestCreateTime = scriptModule != null ? scriptModule . getCreateTime ( ) : 0 ; if ( createTime < latestCreateTime ) { notifyArchiveRejected ( scriptArchive , ArchiveRejectedReason . HIGHER_REVISION_AVAILABLE , null ) ; continue ; } // create the new revisionIds that should be used for the linkages when the new modules // are defined . ModuleIdentifier newRevisionId = JBossModuleUtils . createRevisionId ( scriptModuleId , updateNumber ) ; updatedRevisionIdMap . put ( scriptModuleId , newRevisionId ) ; archivesToCompile . put ( scriptModuleId , scriptArchive ) ; // create a dependency map of the incoming archives so that we can later build a candidate graph archiveDependencies . put ( scriptModuleId , scriptArchive . getModuleSpec ( ) . getModuleDependencies ( ) ) ; } // create a dependency graph with the candidates swapped in in order to figure out the // order in which the candidates should be loaded DirectedGraph < ModuleId , DefaultEdge > candidateGraph = jbossModuleLoader . getModuleNameGraph ( ) ; GraphUtils . swapVertices ( candidateGraph , archiveDependencies ) ; // iterate over the graph in reverse dependency order Set < ModuleId > leaves = GraphUtils . getLeafVertices ( candidateGraph ) ; while ( ! leaves . isEmpty ( ) ) { for ( ModuleId scriptModuleId : leaves ) { ScriptArchive scriptArchive = archivesToCompile . get ( scriptModuleId ) ; if ( scriptArchive == null ) { continue ; } ModuleSpec moduleSpec ; ModuleIdentifier candidateRevisionId = updatedRevisionIdMap . get ( scriptModuleId ) ; Path modulePath = createModulePath ( candidateRevisionId ) ; final Path moduleCompilationRoot = compilationRootDir . resolve ( modulePath ) ; FileUtils . deleteQuietly ( moduleCompilationRoot . toFile ( ) ) ; try { Files . createDirectories ( moduleCompilationRoot ) ; } catch ( IOException ioe ) { notifyArchiveRejected ( scriptArchive , ArchiveRejectedReason . ARCHIVE_IO_EXCEPTION , ioe ) ; } try { moduleSpec = createModuleSpec ( scriptArchive , candidateRevisionId , updatedRevisionIdMap , moduleCompilationRoot ) ; } catch ( ModuleLoadException e ) { logger . error ( "Exception loading archive " + scriptArchive . getModuleSpec ( ) . getModuleId ( ) , e ) ; notifyArchiveRejected ( scriptArchive , ArchiveRejectedReason . ARCHIVE_IO_EXCEPTION , e ) ; continue ; } // load and compile the module jbossModuleLoader . addModuleSpec ( moduleSpec ) ; Module jbossModule = null ; try { jbossModule = jbossModuleLoader . loadModule ( candidateRevisionId ) ; compileModule ( jbossModule , moduleCompilationRoot ) ; // Now refresh the resource loaders for this module , and load the set of // compiled classes and populate into the module ' s local class cache . jbossModuleLoader . rescanModule ( jbossModule ) ; final Set < String > classesToLoad = new LinkedHashSet < String > ( ) ; Files . walkFileTree ( moduleCompilationRoot , new SimpleFileVisitor < Path > ( ) { public FileVisitResult visitFile ( Path file , BasicFileAttributes attrs ) throws IOException { String relativePath = moduleCompilationRoot . relativize ( file ) . toString ( ) ; if ( relativePath . endsWith ( ".class" ) ) { String className = relativePath . replaceAll ( ".class" , "" ) . replace ( "/" , "." ) ; classesToLoad . add ( className ) ; } return FileVisitResult . CONTINUE ; } ; } ) ; for ( String loadClass : classesToLoad ) { Class < ? > loadedClass = jbossModule . getClassLoader ( ) . loadClassLocal ( loadClass , true ) ; if ( loadedClass == null ) throw new ScriptCompilationException ( "Unable to load compiled class: " + loadClass ) ; } } catch ( Exception e ) { // rollback logger . error ( "Exception loading module " + candidateRevisionId , e ) ; if ( candidateArchives . contains ( scriptArchive ) ) { // this spec came from a candidate archive . Send reject notification notifyArchiveRejected ( scriptArchive , ArchiveRejectedReason . COMPILE_FAILURE , e ) ; } if ( jbossModule != null ) { jbossModuleLoader . unloadModule ( jbossModule ) ; } continue ; } // commit the change by removing the old module ModuleIdentifier oldRevisionId = oldRevisionIdMap . get ( scriptModuleId ) ; if ( oldRevisionId != null ) { jbossModuleLoader . unloadModule ( oldRevisionId ) ; } JBossScriptModule scriptModule = new JBossScriptModule ( scriptModuleId , jbossModule , scriptArchive ) ; ScriptModule oldModule = loadedScriptModules . put ( scriptModuleId , scriptModule ) ; notifyModuleUpdate ( scriptModule , oldModule ) ; // find dependents and add them to the to be compiled set Set < ModuleId > dependents = GraphUtils . getIncomingVertices ( candidateGraph , scriptModuleId ) ; for ( ModuleId dependentScriptModuleId : dependents ) { if ( ! archivesToCompile . containsKey ( dependentScriptModuleId ) ) { ScriptModule dependentScriptModule = loadedScriptModules . get ( dependentScriptModuleId ) ; if ( dependentScriptModule != null ) { archivesToCompile . put ( dependentScriptModuleId , dependentScriptModule . getSourceArchive ( ) ) ; ModuleIdentifier dependentRevisionId = JBossModuleUtils . createRevisionId ( dependentScriptModuleId , updateNumber ) ; updatedRevisionIdMap . put ( dependentScriptModuleId , dependentRevisionId ) ; } } } } GraphUtils . removeVertices ( candidateGraph , leaves ) ; leaves = GraphUtils . getLeafVertices ( candidateGraph ) ; }
public class StandardLinkBuilder { /** * Process an already - built URL just before returning it . * By default , this method will apply the { @ code HttpServletResponse . encodeURL ( url ) } mechanism , as standard * when using the Java Servlet API . Note however that this will only be applied if { @ code context } is * an implementation of { @ code IWebContext } ( i . e . the Servlet API will only be applied in web environments ) . * This method can be overridden by any subclasses that want to change this behaviour ( e . g . in order to * avoid using the Servlet API ) . * @ param context the execution context . * @ param link the already - built URL . * @ return the processed URL , ready to be used . */ protected String processLink ( final IExpressionContext context , final String link ) { } }
if ( ! ( context instanceof IWebContext ) ) { return link ; } final HttpServletResponse response = ( ( IWebContext ) context ) . getResponse ( ) ; return ( response != null ? response . encodeURL ( link ) : link ) ;
public class BaseMessageFilter { /** * Set the message receiver for this filter . * @ param messageReceiver The message receiver . */ public void setMessageReceiver ( BaseMessageReceiver messageReceiver , Integer intID ) { } }
if ( ( messageReceiver != null ) || ( intID != null ) ) if ( ( m_intID != null ) || ( m_messageReceiver != null ) ) Util . getLogger ( ) . warning ( "BaseMessageFilter/setMessageReceiver()----Error - Filter added twice." ) ; m_messageReceiver = messageReceiver ; m_intID = intID ;
public class RemoteMongoCollectionImpl { /** * Finds all documents in the collection . * @ param resultClass the class to decode each document into * @ param < ResultT > the target document type of the iterable . * @ return the find iterable interface */ public < ResultT > RemoteFindIterable < ResultT > find ( final Class < ResultT > resultClass ) { } }
return new RemoteFindIterableImpl < > ( proxy . find ( resultClass ) , dispatcher ) ;
public class PeerConnectionInt { /** * Call this method when new candidate arrived from other peer * @ param index index of media in sdp * @ param id id of candidate * @ param sdp sdp of candidate */ public void onCandidate ( long sessionId , int index , String id , String sdp ) { } }
send ( new PeerConnectionActor . OnCandidate ( sessionId , index , id , sdp ) ) ;
public class PassConfig { /** * Regenerates the top scope potentially only for a sub - tree of AST and then * copies information for the old global scope . * @ param compiler The compiler for which the global scope is generated . * @ param scriptRoot The root of the AST used to generate global scope . */ void patchGlobalTypedScope ( AbstractCompiler compiler , Node scriptRoot ) { } }
checkNotNull ( typedScopeCreator ) ; typedScopeCreator . patchGlobalScope ( topScope , scriptRoot ) ;
public class GaussianLikelihoodManager { /** * Precomputes likelihood for all the mixtures */ public void precomputeAll ( ) { } }
precomputes . resize ( mixtures . size ( ) ) ; for ( int i = 0 ; i < precomputes . size ; i ++ ) { precomputes . get ( i ) . setGaussian ( mixtures . get ( i ) ) ; }
public class InterpretedContainerImpl { /** * This method will create a new root Container for the supplied < code > entity < / code > when moving up from this container . As we are moving up from this container the * < code > entity < / code > being passed in should either be the one returned from < code > delegate . getEntryInEnclosingContainer < / code > or * < code > delegate . getEnclosingContainer ( ) < / code > . * < / p > < p > * When creating the new root this will either use the root { @ link ArtifactContainer } for the < code > entity < / code > as its delegate or if the { @ link StructureHelper } for this * object says there is a fake root in the tree for the entity then this will be used . * @ param entity The entity to get the root for * @ param firstPotentialRoot This is the first { @ link ArtifactContainer } that could potentially be a fake root from the structure helper . When < code > entity < / code > is an * { @ link ArtifactEntry } then this will be the container returned from < code > entity . getEnclosingContainer ( ) < / code > as this is the first container for the entity . If * < code > entity < / code > is a { @ link ArtifactContainer } then it could be a root itself so the < code > firstPotentialRoot < / code > will be the same as < code > entity < / code > . * @ return The information used to create the new root container */ private RootInformation getRootInformation ( EnclosedEntity entity , ArtifactContainer firstPotentialRoot ) { } }
// first we need to see if we need a new overlay container , you only get new overlays for new artifact roots , not fake roots OverlayContainer newOverlay = rootOverlay ; if ( delegate . isRoot ( ) ) { // the delegate was root , so we ' re moving up to a new artifact root . // we have to correct the overlay // the overlay is simple , it always has a 1:1 relationship with artifact roots . // as we have gone up a root , we make it do so also . newOverlay = rootOverlay . getParentOverlay ( ) ; } // now we need to work out what the root is for this entity . There are two options , either a fake root or a real root . // to see if we need to use a fake root we ask the structure helper if the path to this entity is valid // we only ask the structure helper this question if the path contains containers . . // eg . / filename does not . // / container / filename does . // because if it is just / filename then there can ' t be a fake root above it . // note that if filename is a container then it might be a fake root itself , // that is ok though as when we create a new InterpretedContainerImpl for it the constructor will do a test to see if it should be a fake root String ePath = entity . getPath ( ) ; // remove leading / from path so we can test if this has containers above it ePath = ePath . substring ( 1 ) ; ArtifactContainer structureHelperSetRootDelegate = null ; final ArtifactContainer newRootDelegate ; if ( ePath . indexOf ( "/" ) == - 1 || ( structureHelper != null && structureHelper . isValid ( entity . getRoot ( ) , entity . getPath ( ) ) ) ) { // easy case , the entry in the enclosing container is supposed to be there . . // we don ' t need to pass a fake root node , because the node did exist under the // artifact api root . // leave structureHelperSetRootDelegate as null as the new root won ' t have a structure helper root delegate . . . // . . . it will have a normal root delegate though so set this newRootDelegate = entity . getRoot ( ) ; } else { // walk up the enclosing container chain , asking the structure helper // if each should be considered as the root . ArtifactContainer enclosing = firstPotentialRoot ; while ( ! enclosing . isRoot ( ) ) { if ( structureHelper != null && structureHelper . isRoot ( enclosing ) ) { structureHelperSetRootDelegate = enclosing ; break ; } enclosing = enclosing . getEnclosingContainer ( ) ; } if ( structureHelper == null ) { newRootDelegate = enclosing ; } else { if ( structureHelperSetRootDelegate == null ) { // this means the structure helper told us a path was not valid , // but did not identify to us which container as part of that path // should have been the new root . throw new IllegalStateException ( ) ; } // we ' ve found the root delegate from the structure helper so use this as the root deleage for the new root newRootDelegate = structureHelperSetRootDelegate ; } } // the adaptable root for the new adaptable entry , is derivable , because // we only need to tell containers the correct overlay , // can initialise it with the correct overlay , and correct structure helper delegate . Container newRoot = new InterpretedContainerImpl ( newRootDelegate , newOverlay , factoryHolder , structureHelper , structureHelperSetRootDelegate ) ; return new RootInformation ( newRoot , newOverlay , structureHelperSetRootDelegate ) ;
public class LatentRelationalAnalysis { /** * Searches an index given the index directory and counts up the frequncy of the two words used in a phrase . * @ param indexDir a String containing the directory where the index is stored * @ param A a { @ code String } containing the first word of the phrase * @ param B a { @ code String } containing the last word of the phrase * @ return float */ public static float countPhraseFrequencies ( String indexDir , String A , String B ) { } }
File indexDir_f = new File ( indexDir ) ; if ( ! indexDir_f . exists ( ) || ! indexDir_f . isDirectory ( ) ) { System . err . println ( "Search failed: index directory does not exist" ) ; } else { try { return searchPhrase ( indexDir_f , A , B ) ; } catch ( Exception e ) { System . err . println ( "Unable to search " + indexDir ) ; return 0 ; } } return 0 ;
public class CachingAuthenticator { /** * Discards any cached principal for the collection of credentials satisfying the given predicate . * @ param predicate a predicate to filter credentials */ public void invalidateAll ( Predicate < ? super C > predicate ) { } }
final Set < C > keys = cache . asMap ( ) . keySet ( ) . stream ( ) . filter ( predicate ) . collect ( Collectors . toSet ( ) ) ; cache . invalidateAll ( keys ) ;
public class TextUtil { /** * Merge the given strings with to separators . * The separators are used to delimit the groups * of characters . * < p > Examples : * < ul > * < li > < code > merge ( ' { ' , ' } ' , " a " , " b " , " cd " ) < / code > returns the string * < code > " { a } { b } { cd } " < / code > < / li > * < li > < code > merge ( ' { ' , ' } ' , " a { bcd " ) < / code > returns the string * < code > " { a { bcd } " < / code > < / li > * < / ul > * @ param < T > is the type of the parameters . * @ param leftSeparator is the left separator to use . * @ param rightSeparator is the right separator to use . * @ param strs is the array of strings . * @ return the string with merged strings . * @ since 4.0 */ @ Pure public static < T > String join ( char leftSeparator , char rightSeparator , @ SuppressWarnings ( "unchecked" ) T ... strs ) { } }
final StringBuilder buffer = new StringBuilder ( ) ; for ( final Object s : strs ) { buffer . append ( leftSeparator ) ; if ( s != null ) { buffer . append ( s . toString ( ) ) ; } buffer . append ( rightSeparator ) ; } return buffer . toString ( ) ;
public class CommerceAddressRestrictionPersistenceImpl { /** * Removes the commerce address restriction with the primary key from the database . Also notifies the appropriate model listeners . * @ param primaryKey the primary key of the commerce address restriction * @ return the commerce address restriction that was removed * @ throws NoSuchAddressRestrictionException if a commerce address restriction with the primary key could not be found */ @ Override public CommerceAddressRestriction remove ( Serializable primaryKey ) throws NoSuchAddressRestrictionException { } }
Session session = null ; try { session = openSession ( ) ; CommerceAddressRestriction commerceAddressRestriction = ( CommerceAddressRestriction ) session . get ( CommerceAddressRestrictionImpl . class , primaryKey ) ; if ( commerceAddressRestriction == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchAddressRestrictionException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return remove ( commerceAddressRestriction ) ; } catch ( NoSuchAddressRestrictionException nsee ) { throw nsee ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class Pareto { /** * The crowding distance value of a solution provides an estimate of the * density of solutions surrounding that solution . The < em > crowding * distance < / em > value of a particular solution is the average distance of * its two neighboring solutions . * @ apiNote * Calculating the crowding distance has a time complexity of * { @ code O ( d * n * log ( n ) ) } , where { @ code d } is the number of dimensions and * { @ code n } the { @ code set } size . * @ see # crowdingDistance ( Seq , ElementComparator , ElementDistance , ToIntFunction ) * @ param set the point set used for calculating the < em > crowding distance < / em > * @ param < T > the vector type * @ return the crowded distances of the { @ code set } points * @ throws NullPointerException if the input { @ code set } is { @ code null } * @ throws IllegalArgumentException if { @ code set . get ( 0 ) . length ( ) < 2} */ public static < T > double [ ] crowdingDistance ( final Seq < ? extends Vec < T > > set ) { } }
return crowdingDistance ( set , Vec :: compare , Vec :: distance , Vec :: length ) ;
public class TaskQueue { /** * Establishes the heap invariant ( described above ) assuming the heap * satisfies the invariant except possibly for the leaf - node indexed by k * ( which may have a nextExecutionTime less than its parent ' s ) . * This method functions by " promoting " queue [ k ] up the hierarchy * ( by swapping it with its parent ) repeatedly until queue [ k ] ' s * nextExecutionTime is greater than or equal to that of its parent . */ private void fixUp ( int k ) { } }
while ( k > 1 ) { int j = k >> 1 ; if ( queue [ j ] . nextExecutionTime <= queue [ k ] . nextExecutionTime ) break ; TimerTask tmp = queue [ j ] ; queue [ j ] = queue [ k ] ; queue [ k ] = tmp ; k = j ; }
public class AtomContainerManipulator { /** * Counts the number of implicit hydrogens on the provided IAtomContainer . * As this method will sum all implicit hydrogens on each atom it is * important to ensure the atoms have already been perceived ( and thus have * an implicit hydrogen count ) ( see . * { @ link # percieveAtomTypesAndConfigureAtoms } ) . * @ param container the container to count the implicit hydrogens on * @ return the total number of implicit hydrogens * @ see org . openscience . cdk . interfaces . IAtom # getImplicitHydrogenCount ( ) * @ see # percieveAtomTypesAndConfigureAtoms * @ throws IllegalArgumentException if the provided container was null */ public static int getImplicitHydrogenCount ( IAtomContainer container ) { } }
if ( container == null ) throw new IllegalArgumentException ( "null container provided" ) ; int count = 0 ; for ( IAtom atom : container . atoms ( ) ) { Integer implicit = atom . getImplicitHydrogenCount ( ) ; if ( implicit != null ) { count += implicit ; } } return count ;
public class UnifierConfiguration { /** * Prepares equivalence types for features to be tested . All equivalence * types are given as { @ link PatternToken } s . They create an equivalence set ( with * abstraction ) . * @ param feature Feature to be tested , like gender , grammatical case or number . * @ param type Type of equivalence for the feature , for example plural , first person , genitive . * @ param elem Element specifying the equivalence . */ public final void setEquivalence ( String feature , String type , PatternToken elem ) { } }
EquivalenceTypeLocator typeKey = new EquivalenceTypeLocator ( feature , type ) ; if ( equivalenceTypes . containsKey ( typeKey ) ) { return ; } equivalenceTypes . put ( typeKey , elem ) ; List < String > lTypes ; if ( equivalenceFeatures . containsKey ( feature ) ) { lTypes = equivalenceFeatures . get ( feature ) ; } else { // workaround for issue # 13 lTypes = new CopyOnWriteArrayList < > ( ) ; equivalenceFeatures . put ( feature , lTypes ) ; } lTypes . add ( type ) ;
public class PatternInputStream { /** * Read next chunk from this stream . * @ return a chunk * @ throws IOException if chunk can not be read */ @ Override public Chunk < byte [ ] , BytesReference > readChunk ( ) throws IOException { } }
Chunk < byte [ ] , BytesReference > chunk = internalReadChunk ( ) ; if ( chunk != null ) { processChunk ( chunk ) ; } return chunk ;
public class Config { /** * Returns the map merkle tree config for the given name , creating one * if necessary and adding it to the collection of known configurations . * The configuration is found by matching the configuration name * pattern to the provided { @ code name } without the partition qualifier * ( the part of the name after { @ code ' @ ' } ) . * If no configuration matches , it will create one by cloning the * { @ code " default " } configuration and add it to the configuration * collection . * If there is no default config as well , it will create one and disable * the merkle tree by default . * This method is intended to easily and fluently create and add * configurations more specific than the default configuration without * explicitly adding it by invoking * { @ link # addMerkleTreeConfig ( MerkleTreeConfig ) } . * Because it adds new configurations if they are not already present , * this method is intended to be used before this config is used to * create a hazelcast instance . Afterwards , newly added configurations * may be ignored . * @ param name name of the map merkle tree config * @ return the map merkle tree configuration * @ throws ConfigurationException if ambiguous configurations are found * @ see StringPartitioningStrategy # getBaseName ( java . lang . String ) * @ see # setConfigPatternMatcher ( ConfigPatternMatcher ) * @ see # getConfigPatternMatcher ( ) */ public MerkleTreeConfig getMapMerkleTreeConfig ( String name ) { } }
return ConfigUtils . getConfig ( configPatternMatcher , mapMerkleTreeConfigs , name , MerkleTreeConfig . class , new BiConsumer < MerkleTreeConfig , String > ( ) { @ Override public void accept ( MerkleTreeConfig merkleTreeConfig , String name ) { merkleTreeConfig . setMapName ( name ) ; if ( "default" . equals ( name ) ) { merkleTreeConfig . setEnabled ( false ) ; } } } ) ;
public class ResourceClaim { /** * Grab a ticket in the queue . * @ param zookeeper ZooKeeper connection to use . * @ param lockNode Path to the znode representing the locking queue . * @ param ticket Name of the ticket to attempt to grab . * @ return True on success , false if the ticket was already grabbed by another process . */ static boolean grabTicket ( ZooKeeper zookeeper , String lockNode , String ticket ) throws InterruptedException , KeeperException { } }
try { zookeeper . create ( lockNode + "/" + ticket , new byte [ 0 ] , ZooDefs . Ids . OPEN_ACL_UNSAFE , CreateMode . EPHEMERAL ) ; } catch ( KeeperException e ) { if ( e . code ( ) == KeeperException . Code . NODEEXISTS ) { // It is possible that two processes try to grab the exact same ticket at the same time . // This is common for the locking ticket . logger . debug ( "Failed to claim ticket {}." , ticket ) ; return false ; } else { throw e ; } } logger . debug ( "Claimed ticket {}." , ticket ) ; return true ;
public class SynchronizedRingByteBuffer { /** * Calls fill if rings marked fits . Otherwise returns 0. * @ param ring * @ return * @ throws IOException * @ throws InterruptedException */ public int tryFillAll ( RingByteBuffer ring ) throws IOException , InterruptedException { } }
fillLock . lock ( ) ; try { if ( ring . marked ( ) > free ( ) ) { return 0 ; } return fill ( ring ) ; } finally { fillLock . unlock ( ) ; }
public class BuildStepsInner { /** * List the build arguments for a step including the secret arguments . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ param serviceFuture the ServiceFuture object tracking the Retrofit calls * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < BuildArgumentInner > > listBuildArgumentsNextAsync ( final String nextPageLink , final ServiceFuture < List < BuildArgumentInner > > serviceFuture , final ListOperationCallback < BuildArgumentInner > serviceCallback ) { } }
return AzureServiceFuture . fromPageResponse ( listBuildArgumentsNextSinglePageAsync ( nextPageLink ) , new Func1 < String , Observable < ServiceResponse < Page < BuildArgumentInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < BuildArgumentInner > > > call ( String nextPageLink ) { return listBuildArgumentsNextSinglePageAsync ( nextPageLink ) ; } } , serviceCallback ) ;
public class CommerceNotificationAttachmentUtil { /** * Returns the last commerce notification attachment in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; . * @ param uuid the uuid * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce notification attachment , or < code > null < / code > if a matching commerce notification attachment could not be found */ public static CommerceNotificationAttachment fetchByUuid_C_Last ( String uuid , long companyId , OrderByComparator < CommerceNotificationAttachment > orderByComparator ) { } }
return getPersistence ( ) . fetchByUuid_C_Last ( uuid , companyId , orderByComparator ) ;