signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class ConfigDelayedMerge { /** * static method also used by ConfigDelayedMergeObject */
static ResolveResult < ? extends AbstractConfigValue > resolveSubstitutions ( ReplaceableMergeStack replaceable , List < AbstractConfigValue > stack , ResolveContext context , ResolveSource source ) throws NotPossibleToResolve { } } | if ( ConfigImpl . traceSubstitutionsEnabled ( ) ) { ConfigImpl . trace ( context . depth ( ) , "delayed merge stack has " + stack . size ( ) + " items:" ) ; int count = 0 ; for ( AbstractConfigValue v : stack ) { ConfigImpl . trace ( context . depth ( ) + 1 , count + ": " + v ) ; count += 1 ; } } // to resolve substitutions , we need to recursively resolve
// the stack of stuff to merge , and merge the stack so
// we won ' t be a delayed merge anymore . If restrictToChildOrNull
// is non - null , or resolve options allow partial resolves ,
// we may remain a delayed merge though .
ResolveContext newContext = context ; int count = 0 ; AbstractConfigValue merged = null ; for ( AbstractConfigValue end : stack ) { // the end value may or may not be resolved already
ResolveSource sourceForEnd ; if ( end instanceof ReplaceableMergeStack ) throw new ConfigException . BugOrBroken ( "A delayed merge should not contain another one: " + replaceable ) ; else if ( end instanceof Unmergeable ) { // the remainder could be any kind of value , including another
// ConfigDelayedMerge
AbstractConfigValue remainder = replaceable . makeReplacement ( context , count + 1 ) ; if ( ConfigImpl . traceSubstitutionsEnabled ( ) ) ConfigImpl . trace ( newContext . depth ( ) , "remainder portion: " + remainder ) ; // If , while resolving ' end ' we come back to the same
// merge stack , we only want to look _ below _ ' end '
// in the stack . So we arrange to replace the
// ConfigDelayedMerge with a value that is only
// the remainder of the stack below this one .
if ( ConfigImpl . traceSubstitutionsEnabled ( ) ) ConfigImpl . trace ( newContext . depth ( ) , "building sourceForEnd" ) ; // we resetParents ( ) here because we ' ll be resolving " end "
// against a root which does NOT contain " end "
sourceForEnd = source . replaceWithinCurrentParent ( ( AbstractConfigValue ) replaceable , remainder ) ; if ( ConfigImpl . traceSubstitutionsEnabled ( ) ) ConfigImpl . trace ( newContext . depth ( ) , " sourceForEnd before reset parents but after replace: " + sourceForEnd ) ; sourceForEnd = sourceForEnd . resetParents ( ) ; } else { if ( ConfigImpl . traceSubstitutionsEnabled ( ) ) ConfigImpl . trace ( newContext . depth ( ) , "will resolve end against the original source with parent pushed" ) ; sourceForEnd = source . pushParent ( replaceable ) ; } if ( ConfigImpl . traceSubstitutionsEnabled ( ) ) { ConfigImpl . trace ( newContext . depth ( ) , "sourceForEnd =" + sourceForEnd ) ; } if ( ConfigImpl . traceSubstitutionsEnabled ( ) ) ConfigImpl . trace ( newContext . depth ( ) , "Resolving highest-priority item in delayed merge " + end + " against " + sourceForEnd + " endWasRemoved=" + ( source != sourceForEnd ) ) ; ResolveResult < ? extends AbstractConfigValue > result = newContext . resolve ( end , sourceForEnd ) ; AbstractConfigValue resolvedEnd = result . value ; newContext = result . context ; if ( resolvedEnd != null ) { if ( merged == null ) { merged = resolvedEnd ; } else { if ( ConfigImpl . traceSubstitutionsEnabled ( ) ) ConfigImpl . trace ( newContext . depth ( ) + 1 , "merging " + merged + " with fallback " + resolvedEnd ) ; merged = merged . withFallback ( resolvedEnd ) ; } } count += 1 ; if ( ConfigImpl . traceSubstitutionsEnabled ( ) ) ConfigImpl . trace ( newContext . depth ( ) , "stack merged, yielding: " + merged ) ; } return ResolveResult . make ( newContext , merged ) ; |
public class HalResource { /** * Adds state to the resource .
* @ param state Resource state
* @ return HAL resource */
public HalResource addState ( ObjectNode state ) { } } | state . fields ( ) . forEachRemaining ( entry -> model . set ( entry . getKey ( ) , entry . getValue ( ) ) ) ; return this ; |
public class Tracer { /** * Enters the scope of code where the given { @ link Span } is in the current Context , and returns an
* object that represents that scope . The scope is exited when the returned object is closed .
* < p > Supports try - with - resource idiom .
* < p > Can be called with { @ link BlankSpan } to enter a scope of code where tracing is stopped .
* < p > Example of usage :
* < pre > { @ code
* private static Tracer tracer = Tracing . getTracer ( ) ;
* void doWork ( ) {
* / / Create a Span as a child of the current Span .
* Span span = tracer . spanBuilder ( " my span " ) . startSpan ( ) ;
* try ( Scope ws = tracer . withSpan ( span ) ) {
* tracer . getCurrentSpan ( ) . addAnnotation ( " my annotation " ) ;
* doSomeOtherWork ( ) ; / / Here " span " is the current Span .
* span . end ( ) ;
* } < / pre >
* < p > Prior to Java SE 7 , you can use a finally block to ensure that a resource is closed
* regardless of whether the try statement completes normally or abruptly .
* < p > Example of usage prior to Java SE7:
* < pre > { @ code
* private static Tracer tracer = Tracing . getTracer ( ) ;
* void doWork ( ) {
* / / Create a Span as a child of the current Span .
* Span span = tracer . spanBuilder ( " my span " ) . startSpan ( ) ;
* Scope ws = tracer . withSpan ( span ) ;
* try {
* tracer . getCurrentSpan ( ) . addAnnotation ( " my annotation " ) ;
* doSomeOtherWork ( ) ; / / Here " span " is the current Span .
* } finally {
* ws . close ( ) ;
* span . end ( ) ;
* } < / pre >
* @ param span The { @ link Span } to be set to the current Context .
* @ return an object that defines a scope where the given { @ link Span } will be set to the current
* Context .
* @ throws NullPointerException if { @ code span } is { @ code null } .
* @ since 0.5 */
@ MustBeClosed public final Scope withSpan ( Span span ) { } } | return CurrentSpanUtils . withSpan ( Utils . checkNotNull ( span , "span" ) , /* endSpan = */
false ) ; |
public class BenchmarkInterceptor { /** * This code is executed after the method is called .
* @ param object receiver object for the called method
* @ param methodName name of the called method
* @ param arguments arguments to the called method
* @ param result result of the executed method call or result of beforeInvoke if method was not called
* @ return result */
public Object afterInvoke ( Object object , String methodName , Object [ ] arguments , Object result ) { } } | ( ( List ) calls . get ( methodName ) ) . add ( System . currentTimeMillis ( ) ) ; return result ; |
public class JsonRpcBasicServer { /** * Returns parameters into an { @ link InputStream } of JSON data .
* @ param method the method
* @ param id the id
* @ param params the base64 encoded params
* @ return the { @ link InputStream }
* @ throws IOException on error */
static InputStream createInputStream ( String method , String id , String params ) throws IOException { } } | StringBuilder envelope = new StringBuilder ( ) ; envelope . append ( "{\"" ) ; envelope . append ( JSONRPC ) ; envelope . append ( "\":\"" ) ; envelope . append ( VERSION ) ; envelope . append ( "\",\"" ) ; envelope . append ( ID ) ; envelope . append ( "\":" ) ; // the ' id ' value is assumed to be numerical .
if ( null != id && ! id . isEmpty ( ) ) { envelope . append ( id ) ; } else { envelope . append ( "null" ) ; } envelope . append ( ",\"" ) ; envelope . append ( METHOD ) ; envelope . append ( "\":" ) ; if ( null != method && ! method . isEmpty ( ) ) { envelope . append ( '"' ) ; envelope . append ( method ) ; envelope . append ( '"' ) ; } else { envelope . append ( "null" ) ; } envelope . append ( ",\"" ) ; envelope . append ( PARAMS ) ; envelope . append ( "\":" ) ; if ( null != params && ! params . isEmpty ( ) ) { String decodedParams ; // some specifications suggest that the GET " params " query parameter should be Base64 encoded and
// some suggest not . Try to deal with both scenarios - - the code here was previously only doing
// Base64 decoding .
// http : / / www . simple - is - better . org / json - rpc / transport _ http . html
// http : / / www . jsonrpc . org / historical / json - rpc - over - http . html # encoded - parameters
if ( BASE64_PATTERN . matcher ( params ) . matches ( ) ) { decodedParams = new String ( Base64 . decode ( params ) , StandardCharsets . UTF_8 ) ; } else { switch ( params . charAt ( 0 ) ) { case '[' : case '{' : decodedParams = params ; break ; default : throw new IOException ( "badly formed 'param' parameter starting with; [" + params . charAt ( 0 ) + "]" ) ; } } envelope . append ( decodedParams ) ; } else { envelope . append ( "[]" ) ; } envelope . append ( '}' ) ; return new ByteArrayInputStream ( envelope . toString ( ) . getBytes ( StandardCharsets . UTF_8 ) ) ; |
public class MainActivity { /** * Request notification listener . */
private void requestNotificationListener ( ) { } } | AndPermission . with ( this ) . notification ( ) . listener ( ) . rationale ( new NotifyListenerRationale ( ) ) . onGranted ( new Action < Void > ( ) { @ Override public void onAction ( Void data ) { toast ( R . string . successfully ) ; } } ) . onDenied ( new Action < Void > ( ) { @ Override public void onAction ( Void data ) { toast ( R . string . failure ) ; } } ) . start ( ) ; |
public class BlackDuckRequestFilter { /** * This will return the filter key / value pairs as Black Duck expects them : [ key1 : value1 , key1 : value2 , key2 : value3 ] etc */
public List < String > getFilterParameters ( ) { } } | final List < String > parameters = new ArrayList < > ( ) ; filterKeysToValues . forEach ( ( filterKey , filterValues ) -> { filterValues . forEach ( filterValue -> { final String parameterString = String . format ( "%s:%s" , filterKey , filterValue ) ; parameters . add ( parameterString ) ; } ) ; } ) ; return parameters ; |
public class DescribeTableRestoreStatusResult { /** * A list of status details for one or more table restore requests .
* @ return A list of status details for one or more table restore requests . */
public java . util . List < TableRestoreStatus > getTableRestoreStatusDetails ( ) { } } | if ( tableRestoreStatusDetails == null ) { tableRestoreStatusDetails = new com . amazonaws . internal . SdkInternalList < TableRestoreStatus > ( ) ; } return tableRestoreStatusDetails ; |
public class TurfMeta { /** * Get all coordinates from a { @ link Point } object , returning a { @ code List } of Point objects .
* If you have a geometry collection , you need to break it down to individual geometry objects
* before using { @ link # coordAll } .
* @ param point any { @ link Point } object
* @ return a { @ code List } made up of { @ link Point } s
* @ since 2.0.0 */
@ NonNull public static List < Point > coordAll ( @ NonNull Point point ) { } } | return coordAll ( new ArrayList < Point > ( ) , point ) ; |
public class MapTileApproximater { /** * Try to get a tile bitmap from the pool , otherwise allocate a new one
* @ param pTileSizePx
* @ return */
public static Bitmap getTileBitmap ( final int pTileSizePx ) { } } | final Bitmap bitmap = BitmapPool . getInstance ( ) . obtainSizedBitmapFromPool ( pTileSizePx , pTileSizePx ) ; if ( bitmap != null ) { return bitmap ; } return Bitmap . createBitmap ( pTileSizePx , pTileSizePx , Bitmap . Config . ARGB_8888 ) ; |
public class HandleSuperBuilder { /** * Returns the explicitly requested singular annotation on this node ( field
* or parameter ) , or null if there ' s no { @ code @ Singular } annotation on it .
* @ param node The node ( field or method param ) to inspect for its name and potential { @ code @ Singular } annotation . */
private SingularData getSingularData ( EclipseNode node , ASTNode source ) { } } | for ( EclipseNode child : node . down ( ) ) { if ( ! annotationTypeMatches ( Singular . class , child ) ) continue ; char [ ] pluralName = node . getKind ( ) == Kind . FIELD ? removePrefixFromField ( node ) : ( ( AbstractVariableDeclaration ) node . get ( ) ) . name ; AnnotationValues < Singular > ann = createAnnotation ( Singular . class , child ) ; String explicitSingular = ann . getInstance ( ) . value ( ) ; if ( explicitSingular . isEmpty ( ) ) { if ( Boolean . FALSE . equals ( node . getAst ( ) . readConfiguration ( ConfigurationKeys . SINGULAR_AUTO ) ) ) { node . addError ( "The singular must be specified explicitly (e.g. @Singular(\"task\")) because auto singularization is disabled." ) ; explicitSingular = new String ( pluralName ) ; } else { explicitSingular = autoSingularize ( new String ( pluralName ) ) ; if ( explicitSingular == null ) { node . addError ( "Can't singularize this name; please specify the singular explicitly (i.e. @Singular(\"sheep\"))" ) ; explicitSingular = new String ( pluralName ) ; } } } char [ ] singularName = explicitSingular . toCharArray ( ) ; TypeReference type = ( ( AbstractVariableDeclaration ) node . get ( ) ) . type ; TypeReference [ ] typeArgs = null ; String typeName ; if ( type instanceof ParameterizedSingleTypeReference ) { typeArgs = ( ( ParameterizedSingleTypeReference ) type ) . typeArguments ; typeName = new String ( ( ( ParameterizedSingleTypeReference ) type ) . token ) ; } else if ( type instanceof ParameterizedQualifiedTypeReference ) { TypeReference [ ] [ ] tr = ( ( ParameterizedQualifiedTypeReference ) type ) . typeArguments ; if ( tr != null ) typeArgs = tr [ tr . length - 1 ] ; char [ ] [ ] tokens = ( ( ParameterizedQualifiedTypeReference ) type ) . tokens ; StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < tokens . length ; i ++ ) { if ( i > 0 ) sb . append ( "." ) ; sb . append ( tokens [ i ] ) ; } typeName = sb . toString ( ) ; } else { typeName = type . toString ( ) ; } String targetFqn = EclipseSingularsRecipes . get ( ) . toQualified ( typeName ) ; EclipseSingularizer singularizer = EclipseSingularsRecipes . get ( ) . getSingularizer ( targetFqn ) ; if ( singularizer == null ) { node . addError ( "Lombok does not know how to create the singular-form builder methods for type '" + typeName + "'; they won't be generated." ) ; return null ; } return new SingularData ( child , singularName , pluralName , typeArgs == null ? Collections . < TypeReference > emptyList ( ) : Arrays . asList ( typeArgs ) , targetFqn , singularizer , source ) ; } return null ; |
public class UnifiedClassLoader { /** * { @ inheritDoc } */
@ Override @ FFDCIgnore ( PrivilegedActionException . class ) public Enumeration < URL > getResources ( String name ) throws IOException { } } | /* * The default implementation of getResources never calls getResources on it ' s parent , instead it just calls findResources on all of the loaders parents . We know that our
* parent will be a gateway class loader that changes the order that resources are loaded but it does this in getResources ( as that is where the order * should * be changed
* according to the JavaDoc ) . Therefore call getResources on our parent and then findResources on ourself . */
ClassLoader parent = null ; try { final ClassLoader thisClassLoader = this ; parent = AccessController . doPrivileged ( new java . security . PrivilegedExceptionAction < ClassLoader > ( ) { @ Override public ClassLoader run ( ) throws Exception { return thisClassLoader . getParent ( ) ; } } ) ; } catch ( PrivilegedActionException pae ) { // return null ;
} if ( parent == null ) { // If there ' s no parent there is nothing to worry about so use the super . getResources
return super . getResources ( name ) ; } try { final String f_name = name ; final ClassLoader f_parent = parent ; Enumeration < URL > eURL = AccessController . doPrivileged ( new java . security . PrivilegedExceptionAction < Enumeration < URL > > ( ) { @ Override public Enumeration < URL > run ( ) throws Exception { return f_parent . getResources ( f_name ) ; } } ) ; return new CompositeEnumeration < URL > ( eURL ) . add ( this . findResources ( name ) ) ; } catch ( PrivilegedActionException pae ) { return null ; } // Note we don ' t need to worry about getSystemResources as our parent will do that for us
// return new CompositeEnumeration < URL > ( parent . getResources ( name ) ) . add ( this . findResources ( name ) ) ; |
public class RuleClassifier { /** * Find best value of entropy for nominal attributes */
public void findBestValEntropyNominalAtt ( AutoExpandVector < DoubleVector > attrib , int attNumValues ) { } } | ArrayList < ArrayList < Double > > distClassValue = new ArrayList < ArrayList < Double > > ( ) ; // System . out . print ( " attrib " + attrib + " \ n " ) ;
for ( int z = 0 ; z < attrib . size ( ) ; z ++ ) { distClassValue . add ( new ArrayList < Double > ( ) ) ; } for ( int v = 0 ; v < attNumValues ; v ++ ) { DoubleVector saveVal = new DoubleVector ( ) ; for ( int z = 0 ; z < attrib . size ( ) ; z ++ ) { if ( attrib . get ( z ) != null ) { distClassValue . get ( z ) . add ( attrib . get ( z ) . getValue ( v ) ) ; } else { distClassValue . get ( z ) . add ( 0.0 ) ; } if ( distClassValue . get ( z ) . get ( v ) . isNaN ( ) ) { distClassValue . get ( z ) . add ( 0.0 ) ; } saveVal . setValue ( z , distClassValue . get ( z ) . get ( v ) ) ; } double sumValue = saveVal . sumOfValues ( ) ; if ( sumValue > 0.0 ) { double entropyVal = entropy ( saveVal ) ; if ( entropyVal <= this . minEntropyNominalAttrib ) { this . minEntropyNominalAttrib = entropyVal ; this . saveBestEntropyNominalAttrib . setValue ( 0 , v ) ; this . saveBestEntropyNominalAttrib . setValue ( 1 , entropyVal ) ; this . saveBestEntropyNominalAttrib . setValue ( 2 , 0.0 ) ; } } } |
public class Ix { /** * Calls the given action to generate a value or terminate whenever the next ( )
* is called on the resulting Ix . iterator ( ) .
* The result ' s iterator ( ) doesn ' t support remove ( ) .
* The action may call { @ code onNext } at most once to signal the next value per action invocation .
* The { @ code onCompleted } should be called to indicate no further values will be generated ( may be
* called with an onNext in the same action invocation ) . Calling { @ code onError } will immediately
* throw the given exception ( as is if it ' s a RuntimeException or Error ; or wrapped into a RuntimeException ) .
* @ param < T > the value type
* @ param nextSupplier the action called with an IxEmitter API to receive value , not null
* @ return the new Ix instance
* @ throws NullPointerException if nextSupplier is null
* @ since 1.0 */
public static < T > Ix < T > generate ( IxConsumer < IxEmitter < T > > nextSupplier ) { } } | return new IxGenerateStateless < T > ( nullCheck ( nextSupplier , "nextSupplier is null" ) ) ; |
public class SecurityServiceImpl { /** * Eventually this will be execution context aware and pick the right domain .
* Till then , we ' re only accessing the system domain configuration .
* @ return SecurityConfiguration representing the " effective " configuration
* for the execution context . */
private SecurityConfiguration getEffectiveSecurityConfiguration ( ) { } } | SecurityConfiguration effectiveConfig = configs . getService ( cfgSystemDomain ) ; if ( effectiveConfig == null ) { Tr . error ( tc , "SECURITY_SERVICE_ERROR_BAD_DOMAIN" , cfgSystemDomain , CFG_KEY_SYSTEM_DOMAIN ) ; throw new IllegalArgumentException ( Tr . formatMessage ( tc , "SECURITY_SERVICE_ERROR_BAD_DOMAIN" , cfgSystemDomain , CFG_KEY_SYSTEM_DOMAIN ) ) ; } return effectiveConfig ; |
public class PersistenceUtils { /** * If the given resource points to an AEM page , delete the page using PageManager .
* Otherwise delete the resource using ResourceResolver .
* @ param resource Resource to delete */
public static void deletePageOrResource ( Resource resource ) { } } | Page configPage = resource . adaptTo ( Page . class ) ; if ( configPage != null ) { try { log . trace ( "! Delete page {}" , configPage . getPath ( ) ) ; PageManager pageManager = configPage . getPageManager ( ) ; pageManager . delete ( configPage , false ) ; } catch ( WCMException ex ) { throw convertWCMException ( "Unable to delete configuration page at " + resource . getPath ( ) , ex ) ; } } else { try { log . trace ( "! Delete resource {}" , resource . getPath ( ) ) ; resource . getResourceResolver ( ) . delete ( resource ) ; } catch ( PersistenceException ex ) { throw convertPersistenceException ( "Unable to delete configuration resource at " + resource . getPath ( ) , ex ) ; } } |
public class HubState { /** * Stops the hub . */
@ CommandArgument public void stop ( ) { } } | try { jobHub . shutdown ( ) ; jobHub = null ; Registry registry = getRegistry ( ) ; registry . unbind ( "AuthenticationService" ) ; System . out . println ( "Hub stopped" ) ; } catch ( Exception e ) { logger . error ( "An error occurred while stopping the hub" , e ) ; System . err . println ( "Hub did not shut down cleanly, see log for details." ) ; } |
public class ItemDataTraversingVisitor { /** * Visit all child nodes . */
protected void visitChildNodes ( NodeData node ) throws RepositoryException { } } | if ( isInterrupted ( ) ) return ; for ( NodeData data : dataManager . getChildNodesData ( node ) ) { if ( isInterrupted ( ) ) return ; data . accept ( this ) ; } |
public class VersionMonitor { /** * Gets the current VersionMonitor base on a Class that will load it ' s manifest file .
* @ param clazz Class that will load the manifest MF file
* @ return A { @ link VersionMonitor } pojo with the information .
* @ throws IOException If Unable to read the Manifest file . */
public static VersionMonitor getVersion ( Class clazz ) throws IOException { } } | Manifest manifest = new Manifest ( ) ; manifest . read ( clazz . getResourceAsStream ( MANIFEST_PATH ) ) ; return getVersion ( manifest ) ; |
public class JsonReader { /** * Reads the next array
* @ param < T > the component type of the array
* @ param elementType class information for the component type
* @ return the array
* @ throws IOException Something went wrong reading the array */
public < T > T [ ] nextArray ( @ NonNull Class < T > elementType ) throws IOException { } } | return nextArray ( StringUtils . EMPTY , elementType ) ; |
public class OCommandExecutorSQLCreateProperty { /** * Execute the CREATE PROPERTY . */
public Object execute ( final Map < Object , Object > iArgs ) { } } | if ( type == null ) throw new OCommandExecutionException ( "Cannot execute the command because it has not been parsed yet" ) ; final ODatabaseRecord database = getDatabase ( ) ; final OClassImpl sourceClass = ( OClassImpl ) database . getMetadata ( ) . getSchema ( ) . getClass ( className ) ; if ( sourceClass == null ) throw new OCommandExecutionException ( "Source class '" + className + "' not found" ) ; OPropertyImpl prop = ( OPropertyImpl ) sourceClass . getProperty ( fieldName ) ; if ( prop != null ) throw new OCommandExecutionException ( "Property '" + className + "." + fieldName + "' already exists. Remove it before to retry." ) ; // CREATE THE PROPERTY
OClass linkedClass = null ; OType linkedType = null ; if ( linked != null ) { // FIRST SEARCH BETWEEN CLASSES
linkedClass = database . getMetadata ( ) . getSchema ( ) . getClass ( linked ) ; if ( linkedClass == null ) // NOT FOUND : SEARCH BETWEEN TYPES
linkedType = OType . valueOf ( linked . toUpperCase ( Locale . ENGLISH ) ) ; } // CREATE IT LOCALLY
prop = sourceClass . addPropertyInternal ( fieldName , type , linkedType , linkedClass ) ; sourceClass . saveInternal ( ) ; return sourceClass . properties ( ) . size ( ) ; |
public class RequestBuilder { /** * Add a header to the request . Adds this value to any existing values for this name .
* @ param name The name of the header
* @ param value The value
* @ return The request builder */
public RequestBuilder < T , ID > addHeader ( String name , String value ) { } } | headers . add ( name , value ) ; return this ; |
public class XMLConfiguration { protected void initLoginConfig ( XmlParser . Node node ) { } } | XmlParser . Node method = node . get ( "auth-method" ) ; FormAuthenticator _formAuthenticator = null ; if ( method != null ) { Authenticator authenticator = null ; String m = method . toString ( false , true ) ; if ( SecurityConstraint . __FORM_AUTH . equals ( m ) ) authenticator = _formAuthenticator = new FormAuthenticator ( ) ; else if ( SecurityConstraint . __BASIC_AUTH . equals ( m ) ) authenticator = new BasicAuthenticator ( ) ; else if ( SecurityConstraint . __DIGEST_AUTH . equals ( m ) ) authenticator = new DigestAuthenticator ( ) ; else if ( SecurityConstraint . __CERT_AUTH . equals ( m ) ) authenticator = new ClientCertAuthenticator ( ) ; else if ( SecurityConstraint . __CERT_AUTH2 . equals ( m ) ) authenticator = new ClientCertAuthenticator ( ) ; else log . warn ( "UNKNOWN AUTH METHOD: " + m ) ; getWebApplicationContext ( ) . setAuthenticator ( authenticator ) ; } XmlParser . Node name = node . get ( "realm-name" ) ; if ( name != null ) getWebApplicationContext ( ) . setRealmName ( name . toString ( false , true ) ) ; XmlParser . Node formConfig = node . get ( "form-login-config" ) ; if ( formConfig != null ) { if ( _formAuthenticator == null ) log . warn ( "FORM Authentication miss-configured" ) ; else { XmlParser . Node loginPage = formConfig . get ( "form-login-page" ) ; if ( loginPage != null ) _formAuthenticator . setLoginPage ( loginPage . toString ( false , true ) ) ; XmlParser . Node errorPage = formConfig . get ( "form-error-page" ) ; if ( errorPage != null ) { String ep = errorPage . toString ( false , true ) ; _formAuthenticator . setErrorPage ( ep ) ; } } } |
public class FeatureOverlayQuery { /** * Perform a query based upon the map click location and build a info message
* @ param latLng location
* @ param view view
* @ param map Google Map
* @ return information message on what was clicked , or null */
public String buildMapClickMessage ( LatLng latLng , View view , GoogleMap map ) { } } | return buildMapClickMessage ( latLng , view , map , null ) ; |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcDistributionControlElementType ( ) { } } | if ( ifcDistributionControlElementTypeEClass == null ) { ifcDistributionControlElementTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 160 ) ; } return ifcDistributionControlElementTypeEClass ; |
public class ICalTimeZone { /** * Gets the observance information of a date .
* @ param year the year
* @ param month the month ( 1-12)
* @ param day the day of the month
* @ param hour the hour
* @ param minute the minute
* @ param second the second
* @ return the observance information or null if none was found */
private Boundary getObservanceBoundary ( int year , int month , int day , int hour , int minute , int second ) { } } | if ( sortedObservances . isEmpty ( ) ) { return null ; } DateValue givenTime = new DateTimeValueImpl ( year , month , day , hour , minute , second ) ; int closestIndex = - 1 ; Observance closest = null ; DateValue closestValue = null ; for ( int i = 0 ; i < sortedObservances . size ( ) ; i ++ ) { Observance observance = sortedObservances . get ( i ) ; // skip observances that start after the given time
ICalDate dtstart = getValue ( observance . getDateStart ( ) ) ; if ( dtstart != null ) { DateValue dtstartValue = convertFromRawComponents ( dtstart ) ; if ( dtstartValue . compareTo ( givenTime ) > 0 ) { continue ; } } DateValue dateValue = getObservanceDateClosestToTheGivenDate ( observance , givenTime , false ) ; if ( dateValue != null && ( closestValue == null || closestValue . compareTo ( dateValue ) < 0 ) ) { closestValue = dateValue ; closest = observance ; closestIndex = i ; } } Observance observanceIn = closest ; DateValue observanceInStart = closestValue ; Observance observanceAfter = null ; DateValue observanceAfterStart = null ; if ( closestIndex < sortedObservances . size ( ) - 1 ) { observanceAfter = sortedObservances . get ( closestIndex + 1 ) ; observanceAfterStart = getObservanceDateClosestToTheGivenDate ( observanceAfter , givenTime , true ) ; } /* * If any of the DTSTART properties are missing their time components ,
* then observanceInStart / observanceAfterStart could be a DateValue
* object . If so , convert it to a DateTimeValue object ( see Issue 77 ) . */
if ( observanceInStart != null && ! ( observanceInStart instanceof DateTimeValue ) ) { observanceInStart = new DTBuilder ( observanceInStart ) . toDateTime ( ) ; } if ( observanceAfterStart != null && ! ( observanceAfterStart instanceof DateTimeValue ) ) { observanceAfterStart = new DTBuilder ( observanceAfterStart ) . toDateTime ( ) ; } return new Boundary ( ( DateTimeValue ) observanceInStart , observanceIn , ( DateTimeValue ) observanceAfterStart , observanceAfter ) ; |
public class LocalCall { /** * Calls a execution module function on the given target asynchronously and
* returns information about the scheduled job that can be used to query the result .
* Authentication is done with the token therefore you have to login prior
* to using this function .
* @ param client SaltClient instance
* @ param target the target for the function
* @ param auth authentication credentials to use
* @ return information about the scheduled job */
public CompletionStage < Optional < LocalAsyncResult < R > > > callAsync ( final SaltClient client , Target < ? > target , AuthMethod auth ) { } } | return callAsync ( client , target , auth , Optional . empty ( ) ) ; |
public class EJBWrapper { /** * Adds the default definition for the Object . hashCode method .
* @ param cw ASM ClassWriter to add the method to .
* @ param implClassName name of the wrapper class being generated . */
private static void addDefaultHashCodeMethod ( ClassWriter cw , String implClassName ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , INDENT + "adding method : hashCode ()I" ) ; // public int hashCode ( )
final String desc = "()I" ; MethodVisitor mv = cw . visitMethod ( ACC_PUBLIC , "hashCode" , desc , null , null ) ; GeneratorAdapter mg = new GeneratorAdapter ( mv , ACC_PUBLIC , "hashCode" , desc ) ; mg . visitCode ( ) ; // return System . identityHashCode ( this ) ;
mg . loadThis ( ) ; mg . visitMethodInsn ( INVOKESTATIC , "java/lang/System" , "identityHashCode" , "(Ljava/lang/Object;)I" ) ; mg . returnValue ( ) ; mg . endMethod ( ) ; mg . visitEnd ( ) ; |
public class LocaleIDParser { /** * Advance index past language , and accumulate normalized language code in buffer .
* Index must be at 0 when this is called . Index is left at a terminator or id
* separator . Returns the start of the language code in the buffer . */
private int parseLanguage ( ) { } } | int startLength = buffer . length ( ) ; if ( haveExperimentalLanguagePrefix ( ) ) { append ( AsciiUtil . toLower ( id [ 0 ] ) ) ; append ( HYPHEN ) ; index = 2 ; } char c ; while ( ! isTerminatorOrIDSeparator ( c = next ( ) ) ) { append ( AsciiUtil . toLower ( c ) ) ; } -- index ; // unget
if ( buffer . length ( ) - startLength == 3 ) { String lang = LocaleIDs . threeToTwoLetterLanguage ( getString ( 0 ) ) ; if ( lang != null ) { set ( 0 , lang ) ; } } return 0 ; |
public class ModelsImpl { /** * Get All Entity Roles for a given entity .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param entityId entity Id
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the List & lt ; EntityRole & gt ; object */
public Observable < List < EntityRole > > getClosedListEntityRolesAsync ( UUID appId , String versionId , UUID entityId ) { } } | return getClosedListEntityRolesWithServiceResponseAsync ( appId , versionId , entityId ) . map ( new Func1 < ServiceResponse < List < EntityRole > > , List < EntityRole > > ( ) { @ Override public List < EntityRole > call ( ServiceResponse < List < EntityRole > > response ) { return response . body ( ) ; } } ) ; |
public class GeneratorRegistry { /** * Returns the path to use in the " build time process " to generate the
* resource path for debug mode .
* @ param path
* the resource path
* @ return the path to use in the " build time process " to generate the
* resource path for debug mode . */
public String getDebugModeBuildTimeGenerationPath ( String path ) { } } | int idx = path . indexOf ( "?" ) ; String debugModeGeneratorPath = path . substring ( 0 , idx ) ; debugModeGeneratorPath = debugModeGeneratorPath . replaceAll ( "\\." , "/" ) ; int jawrGenerationParamIdx = path . indexOf ( JawrRequestHandler . GENERATION_PARAM ) ; String parameter = path . substring ( jawrGenerationParamIdx + JawrRequestHandler . GENERATION_PARAM . length ( ) + 1 ) ; // Add
// for
// the
// character
ResourceGenerator resourceGenerator = resolveResourceGenerator ( parameter ) ; String suffixPath = null ; if ( resourceGenerator instanceof SpecificCDNDebugPathResourceGenerator ) { suffixPath = ( ( SpecificCDNDebugPathResourceGenerator ) resourceGenerator ) . getDebugModeBuildTimeGenerationPath ( parameter ) ; } else { suffixPath = parameter . replaceFirst ( GeneratorRegistry . PREFIX_SEPARATOR , JawrConstant . URL_SEPARATOR ) ; } return debugModeGeneratorPath + "/" + suffixPath ; |
public class CurationManager { /** * Shutdown method
* @ throws PluginException
* if any errors occur */
@ Override public void shutdown ( ) throws PluginException { } } | if ( storage != null ) { try { storage . shutdown ( ) ; } catch ( PluginException pe ) { log . error ( "Failed to shutdown storage: {}" , pe . getMessage ( ) ) ; throw pe ; } } if ( indexer != null ) { try { indexer . shutdown ( ) ; } catch ( PluginException pe ) { log . error ( "Failed to shutdown indexer: {}" , pe . getMessage ( ) ) ; throw pe ; } } |
public class MaterialCameraCapture { /** * Native call to capture the frame of the video stream . */
protected String nativeCaptureToDataURL ( CanvasElement canvas , Element element , String mimeType ) { } } | VideoElement videoElement = ( VideoElement ) element ; int width = videoElement . getVideoWidth ( ) ; int height = videoElement . getVideoHeight ( ) ; if ( Double . isNaN ( width ) || Double . isNaN ( height ) ) { width = videoElement . getClientWidth ( ) ; height = videoElement . getClientHeight ( ) ; } canvas . setWidth ( width ) ; canvas . setHeight ( height ) ; Context2d context = canvas . getContext2d ( ) ; context . drawImage ( videoElement , 0 , 0 , width , height ) ; return canvas . toDataUrl ( mimeType ) ; |
public class ModelControllerImpl { /** * Executes an operation on the controller
* @ param operation the operation
* @ param handler the handler
* @ param control the transaction control
* @ param attachments the operation attachments
* @ return the result of the operation */
@ Override public ModelNode execute ( final ModelNode operation , final OperationMessageHandler handler , final OperationTransactionControl control , final OperationAttachments attachments ) { } } | SecurityIdentity securityIdentity = securityIdentitySupplier . get ( ) ; OperationResponse or = securityIdentity . runAs ( ( PrivilegedAction < OperationResponse > ) ( ) -> internalExecute ( operation , handler , control , attachments , prepareStep , false , partialModelIndicator . isModelPartial ( ) ) ) ; ModelNode result = or . getResponseNode ( ) ; try { or . close ( ) ; } catch ( IOException e ) { ROOT_LOGGER . debugf ( e , "Caught exception closing response to %s whose associated streams, " + "if any, were not wanted" , operation ) ; } return result ; |
public class JMThread { /** * Run with schedule scheduled future .
* @ param delayMillis the delay millis
* @ param runnable the runnable
* @ return the scheduled future */
public static ScheduledFuture < ? > runWithSchedule ( long delayMillis , Runnable runnable ) { } } | return newSingleScheduledThreadPool ( ) . schedule ( buildRunnableWithLogging ( "runWithSchedule" , runnable , delayMillis ) , delayMillis , TimeUnit . MILLISECONDS ) ; |
public class Sql { /** * If this SQL object was created with a Connection then this method commits
* the connection . If this SQL object was created from a DataSource then
* this method does nothing .
* @ throws SQLException if a database access error occurs */
public void commit ( ) throws SQLException { } } | if ( useConnection == null ) { LOG . info ( "Commit operation not supported when using datasets unless using withTransaction or cacheConnection - attempt to commit ignored" ) ; return ; } try { useConnection . commit ( ) ; } catch ( SQLException e ) { LOG . warning ( "Caught exception committing connection: " + e . getMessage ( ) ) ; throw e ; } |
public class Updater { /** * Update with the given appcast information in the specified targetDir
* @ param appcast The appcast content ( containing the file to download )
* @ param targetDir The target directory for downloaded update files
* @ return Updated files
* @ throws Exception in case of an error */
public Set < Path > update ( Appcast appcast , Path targetDir ) throws Exception { } } | Set < Path > files ; if ( appcast == null ) { throw new IllegalArgumentException ( "Appcast cannot be null!" ) ; } LOG . debug ( "Updating application ''{}''..." , appcast . getTitle ( ) ) ; // Download the update and verfiy it
Path downloaded = appcastManager . download ( appcast , targetDir ) ; if ( downloaded == null ) { throw new Exception ( "Could not download update package for application '" + appcast . getTitle ( ) + "'!" ) ; } LOG . debug ( "Downloaded update package ''{}''" , downloaded ) ; // Unzip the update if required
files = ZipHandler . unzip ( downloaded , targetDir , true ) ; LOG . debug ( "Extracted files: {}" , files ) ; // Check if there is an update script available and execute it if so
Map < String , Object > bindings = new HashMap < > ( ) ; bindings . put ( "targetDir" , targetDir ) ; files . stream ( ) . filter ( ( filePath ) -> ( filePath . getFileName ( ) . toString ( ) . endsWith ( UPDATE_SCRIPT_SUFFIX ) ) ) . forEach ( ( filePath ) -> { executeUpdateScript ( filePath , bindings ) ; |
public class TimeoutImpl { /** * Restart the timer on a new thread in synchronous mode .
* In this mode , a timeout only causes the thread to be interrupted , it does not directly set the result of the QueuedFuture .
* This is needed when doing Retries or Fallback on an async thread . If the result is set directly , then we have no opportunity to handle the exception . */
public void runSyncOnNewThread ( Thread newThread ) { } } | lock . writeLock ( ) . lock ( ) ; try { if ( this . timeoutTask == null ) { throw new IllegalStateException ( Tr . formatMessage ( tc , "internal.error.CWMFT4999E" ) ) ; } stop ( ) ; this . stopped = false ; long remaining = check ( ) ; Runnable timeoutTask = ( ) -> { newThread . interrupt ( ) ; } ; start ( timeoutTask , remaining ) ; } finally { lock . writeLock ( ) . unlock ( ) ; } |
public class Reflector { /** * do nothing when not exist
* @ param obj
* @ param prop
* @ param value
* @ throws PageException */
public static void callSetterEL ( Object obj , String prop , Object value ) throws PageException { } } | try { MethodInstance setter = getSetter ( obj , prop , value , null ) ; if ( setter != null ) setter . invoke ( obj ) ; } catch ( InvocationTargetException e ) { Throwable target = e . getTargetException ( ) ; if ( target instanceof PageException ) throw ( PageException ) target ; throw Caster . toPageException ( e . getTargetException ( ) ) ; } catch ( Exception e ) { throw Caster . toPageException ( e ) ; } |
public class BuildController { /** * Deletes a link between a build and another
* @ param buildId From this build . . .
* @ param targetBuildId . . . to this build
* @ return List of builds */
@ RequestMapping ( value = "builds/{buildId}/links/{targetBuildId}" , method = RequestMethod . DELETE ) public Build deleteBuildLink ( @ PathVariable ID buildId , @ PathVariable ID targetBuildId ) { } } | Build build = structureService . getBuild ( buildId ) ; Build targetBuild = structureService . getBuild ( targetBuildId ) ; structureService . deleteBuildLink ( build , targetBuild ) ; return build ; |
public class CmsDbExportView { /** * Sets the init values for check boxes . < p > */
private void setupCheckBoxes ( ) { } } | m_includeResource . setValue ( new Boolean ( true ) ) ; m_includeUnchanged . setValue ( new Boolean ( true ) ) ; m_includeSystem . setValue ( new Boolean ( true ) ) ; m_recursive . setValue ( new Boolean ( true ) ) ; |
public class CreateMASCaseManager { /** * Method to close the file caseManager . It is called just one time , by the
* MASReader , once every test and stroy have been added .
* @ param caseManager */
public static void closeMASCaseManager ( File caseManager ) { } } | FileWriter caseManagerWriter ; try { caseManagerWriter = new FileWriter ( caseManager , true ) ; caseManagerWriter . write ( "}\n" ) ; caseManagerWriter . flush ( ) ; caseManagerWriter . close ( ) ; } catch ( IOException e ) { Logger logger = Logger . getLogger ( "CreateMASCaseManager.closeMASCaseManager" ) ; logger . info ( "ERROR: There is a mistake closing caseManager file.\n" ) ; } |
public class ExternalKoPeMeRunner { /** * Runs KoPeMe , and returns 0 , if everything works allright
* @ return 0 , if everything works allright */
public int run ( ) { } } | try { if ( compile ) { compile ( ) ; } String separator = "/" ; String cpseperator = ":" ; if ( System . getProperty ( "os.name" ) . contains ( "indows" ) ) { separator = "\\" ; cpseperator = ";" ; } String s = fileName . replace ( separator , "." ) ; if ( ".java" . equals ( s . substring ( s . length ( ) - 5 ) ) ) { s = s . substring ( 0 , s . length ( ) - 5 ) ; // . java Entfernen
} else { s = s . substring ( 0 , s . length ( ) - 6 ) ; // . class entfernen
} String localClasspath = classpath ; if ( compileFolder != null ) localClasspath = localClasspath + cpseperator + compileFolder ; String command = "java -cp " + localClasspath ; if ( libraryPath != null && libraryPath != "" ) command += "-Djava.library.path=" + libraryPath ; command = command + " de.kopeme.testrunner.PerformanceTestRunner " + s ; // System . out . println ( command ) ;
Process p = Runtime . getRuntime ( ) . exec ( command ) ; BufferedReader br = new BufferedReader ( new InputStreamReader ( p . getInputStream ( ) ) ) ; String line ; BufferedWriter bw = null ; // System . out . println ( " ExternalOutputFile : " + externalOutputFile ) ;
if ( externalOutputFile != null && externalOutputFile != "" ) { File output = new File ( externalOutputFile ) ; try { bw = new BufferedWriter ( new FileWriter ( output ) ) ; } catch ( IOException e1 ) { // TODO Automatisch generierter Erfassungsblock
e1 . printStackTrace ( ) ; } } while ( ( line = br . readLine ( ) ) != null ) { if ( bw == null ) { System . out . println ( line ) ; } else { bw . write ( line + "\n" ) ; } } br = new BufferedReader ( new InputStreamReader ( p . getErrorStream ( ) ) ) ; while ( ( line = br . readLine ( ) ) != null ) { if ( bw == null ) { System . out . println ( line ) ; } else { bw . write ( line + "\n" ) ; } } if ( bw != null ) bw . close ( ) ; int returnValue = p . waitFor ( ) ; // System . out . println ( " Returnvalue : " + returnValue ) ;
return returnValue ; } catch ( IOException e ) { // TODO Automatisch generierter Erfassungsblock
e . printStackTrace ( ) ; } catch ( InterruptedException e ) { // TODO Automatisch generierter Erfassungsblock
e . printStackTrace ( ) ; } return 1 ; |
public class ModuleItem { /** * If more PmiModule . listStatistics methods are implemented ,
* this method can be simplified as listData ( recursive )
* Leave it alone now . */
public StatsImpl getStats ( int [ ] dataIds , boolean recursive ) { } } | if ( dataIds == null ) return getStats ( recursive ) ; if ( instance == null ) return null ; SpdData [ ] dataList = instance . listData ( dataIds ) ; ModuleItem [ ] items = children ( ) ; ArrayList dataMembers = null ; // return data members
ArrayList colMembers = null ; // return subcollection members
// convert from Spd to Wpd and set dataMembers
if ( dataList != null ) { dataMembers = new ArrayList ( dataList . length ) ; for ( int i = 0 ; i < dataList . length ; i ++ ) { dataMembers . add ( dataList [ i ] . getStatistic ( ) ) ; } } // getStats from children
if ( recursive && ( items != null ) ) { colMembers = new ArrayList ( items . length ) ; for ( int i = 0 ; i < items . length ; i ++ ) { colMembers . add ( items [ i ] . getStats ( dataIds , recursive ) ) ; } } // return Stats - individual instance will return different XXXStats instance
// Note : construct dataMembers in ModuleItem and pass it to instance
// because getStats will be overwritten by subclasses
// and we don ' t want to duplicate the same code .
return instance . getStats ( dataMembers , colMembers ) ; |
public class SassVaadinGenerator { /** * Compile the SASS source to a CSS source
* @ param bundle
* the bundle
* @ param content
* the resource content to compile
* @ param path
* the compiled resource path
* @ param context
* the generator context
* @ return the compiled CSS content */
protected String compile ( JoinableResourceBundle bundle , String content , String path , GeneratorContext context ) { } } | try { JawrScssResolver scssResolver = new JawrScssResolver ( bundle , rsHandler ) ; JawrScssStylesheet sheet = new JawrScssStylesheet ( bundle , content , path , scssResolver , context . getCharset ( ) ) ; sheet . compile ( urlMode ) ; String parsedScss = sheet . printState ( ) ; addLinkedResources ( path , context , scssResolver . getLinkedResources ( ) ) ; return parsedScss ; } catch ( Exception e ) { throw new BundlingProcessException ( "Unable to generate content for resource path : '" + path + "'" , e ) ; } |
public class EkstaziCFT { /** * Instrument all classfiles ( files that end with . class ) inside
* the given jar and rewrite the existing jar .
* This method can be simplified if we move Ekstazi code from Java
* 6 to newer version . See
* http : / / docs . oracle . com / javase / 7 / docs / technotes / guides / io / fsp / zipfilesystemprovider . html */
protected static void instrumentJarFile ( String pathToFile ) throws IOException { } } | File jarFile = new File ( pathToFile ) ; // Use tmp file for output ( in the same directory ) .
File newFile = File . createTempFile ( "any" , ".jar" , jarFile . getParentFile ( ) ) ; ZipInputStream zis = null ; ZipOutputStream zos = null ; try { zis = new ZipInputStream ( new FileInputStream ( jarFile ) ) ; zos = new ZipOutputStream ( new FileOutputStream ( newFile ) ) ; for ( ZipEntry entry = zis . getNextEntry ( ) ; entry != null ; entry = zis . getNextEntry ( ) ) { String name = entry . getName ( ) ; zos . putNextEntry ( new ZipEntry ( name ) ) ; if ( name . endsWith ( ".class" ) ) { // We instrument classfiles and then write them to
// new jar .
byte [ ] classfileBuffer = FileUtil . loadBytes ( zis , false ) ; byte [ ] newClassfileBuffer = instrumentClassFile ( classfileBuffer ) ; zos . write ( newClassfileBuffer ) ; } else { int data ; while ( ( data = zis . read ( ) ) != - 1 ) { zos . write ( data ) ; } } zos . closeEntry ( ) ; } } finally { FileUtil . closeAndIgnoreExceptions ( zis ) ; FileUtil . closeAndIgnoreExceptions ( zos ) ; } // Move new jar to old jar .
// Files . move ( newFile , jarFile , StandardCopyOption . REPLACE _ EXISTING ) ;
FileUtil . copyBytes ( newFile , jarFile ) ; newFile . delete ( ) ; |
public class Frame { /** * Get the stack slot that will contain given method argument . Assumes that
* this frame is at the location ( just before ) a method invocation
* instruction .
* @ param i
* the argument index : 0 for first arg , etc .
* @ param numArguments
* total number of arguments to the called method
* @ return slot containing the argument value */
public int getArgumentSlot ( int i , int numArguments ) { } } | if ( i >= numArguments ) { throw new IllegalArgumentException ( ) ; } return ( slotList . size ( ) - numArguments ) + i ; |
public class FastaReader { /** * Return next raw FASTA record or { @ literal null } if end of stream is reached .
* < p > This method is thread - safe . < / p >
* @ return next raw FASTA record or { @ literal null } if end of stream is reached */
public synchronized RawFastaRecord takeRawRecord ( ) { } } | RawFastaRecord rawFastaRecord ; try { rawFastaRecord = nextRawRecord ( ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } if ( rawFastaRecord == null ) isFinished = true ; return rawFastaRecord ; |
public class TOTPBuilder { /** * Build a Time - based One - time Password { @ link TOTP } using the current
* system time ( current time in milliseconds since the UNIX epoch ) . Note
* that the builder instance can be reused for subsequent
* configuration / generation calls .
* @ return a Time - based One - time Password { @ link TOTP } instance . */
public TOTP build ( ) { } } | long time = System . currentTimeMillis ( ) ; return new TOTP ( generateTOTP ( time ) , time , hmacShaAlgorithm , digits , timeStep ) ; |
public class ProgressTracker { /** * Add a new Progress to the tracker .
* @ param p Progress */
public synchronized void addProgress ( Progress p ) { } } | // Don ' t add more than once .
Iterator < WeakReference < Progress > > iter = progresses . iterator ( ) ; while ( iter . hasNext ( ) ) { WeakReference < Progress > ref = iter . next ( ) ; // since we are at it anyway , remove old links .
if ( ref . get ( ) == null ) { iter . remove ( ) ; } else { if ( ref . get ( ) == p ) { return ; } } } progresses . add ( new WeakReference < > ( p ) ) ; |
public class LogicUtil { /** * Checks if the object equals one of the other objects given
* @ param object to check
* @ param objects to use equals against
* @ return True if one of the objects equal the object */
public static < A , B > boolean equalsAny ( A object , B ... objects ) { } } | for ( B o : objects ) { if ( bothNullOrEqual ( o , object ) ) { return true ; } } return false ; |
public class Log { /** * Register a Context . Factory to create a Log . */
public static void preRegister ( Context context , PrintWriter w ) { } } | context . put ( Log . class , ( Context . Factory < Log > ) ( c -> new Log ( c , w ) ) ) ; |
public class TagTrackingCacheEventListener { /** * If the element has a TaggedCacheKey remove the tag associations */
protected void removeElement ( Ehcache cache , Element element ) { } } | final Set < CacheEntryTag > tags = this . getTags ( element ) ; // Check if the key is tagged
if ( tags != null && ! tags . isEmpty ( ) ) { final String cacheName = cache . getName ( ) ; final LoadingCache < CacheEntryTag , Set < Object > > cacheKeys = taggedCacheKeys . getIfPresent ( cacheName ) ; // If there are tracked tagged keys remove matching tags
if ( cacheKeys != null ) { final Object key = element . getObjectKey ( ) ; logger . debug ( "Tracking removing key cache {} with tag {} : {}" , cacheName , tags , key ) ; for ( final CacheEntryTag tag : tags ) { final Set < Object > taggedKeys = cacheKeys . getIfPresent ( tag ) ; // Remove the tagged key
if ( taggedKeys != null ) { taggedKeys . remove ( key ) ; } } } } |
public class dnsaction { /** * Use this API to add dnsaction . */
public static base_response add ( nitro_service client , dnsaction resource ) throws Exception { } } | dnsaction addresource = new dnsaction ( ) ; addresource . actionname = resource . actionname ; addresource . actiontype = resource . actiontype ; addresource . ipaddress = resource . ipaddress ; addresource . ttl = resource . ttl ; addresource . viewname = resource . viewname ; addresource . preferredloclist = resource . preferredloclist ; return addresource . add_resource ( client ) ; |
public class SimpleRequestManager { @ Override public OptionalThing < String > getHeader ( String headerKey ) { } } | return OptionalThing . ofNullable ( getRequest ( ) . getHeader ( headerKey ) , ( ) -> { throw new RequestInfoNotFoundException ( "Not found the header for the request: key=" + headerKey + " path=" + getRequestPath ( ) ) ; } ) ; |
public class FileColumn { /** * Set the column attributes from the given string in the format name = value [ : name = value . . . ]
* @ param str The string containing the formatted attributes */
public void setAttributes ( String str ) { } } | str = str . trim ( ) ; String delimiter = ":" ; // default is colon delimiter ,
if ( str . indexOf ( ";" ) != - 1 ) // but use semi - colon with date format parameters ( eg . HH : mm )
delimiter = ";" ; StringTokenizer st = new StringTokenizer ( str , delimiter ) ; while ( st . hasMoreTokens ( ) ) { String token = st . nextToken ( ) . trim ( ) ; int pos = token . indexOf ( "=" ) ; if ( pos != - 1 ) { String name = token . substring ( 0 , pos ) . trim ( ) ; String value = token . substring ( pos + 1 ) . trim ( ) ; if ( name . equals ( TYPE_ATTR ) ) { type = getCellType ( value ) ; } else if ( name . equals ( FORMAT_ATTR ) ) { format = value ; } else if ( name . equals ( INPUT_TYPE_ATTR ) ) { inputType = getDataType ( value ) ; if ( outputType == NO_TYPE ) outputType = inputType ; } else if ( name . equals ( INPUT_FORMAT_ATTR ) ) { inputFormat = value ; } else if ( name . equals ( OUTPUT_TYPE_ATTR ) ) { outputType = getDataType ( value ) ; if ( inputType == NO_TYPE ) inputType = outputType ; } else if ( name . equals ( OUTPUT_FORMAT_ATTR ) ) { outputFormat = value ; } else if ( name . equals ( REGEX_ATTR ) ) { regex = value ; } else if ( name . equals ( NULL_VALUE_ATTR ) ) { nullValue = value ; } else if ( name . equals ( ALIGN_ATTR ) ) { align = getAlignment ( value ) ; } else if ( name . equals ( WRAP_ATTR ) ) { wrap = Boolean . parseBoolean ( value ) ; } else if ( name . equals ( DISPLAY_ATTR ) ) { display = value ; } } } |
public class MeshUtils { /** * Scale the mesh at x , y and z axis .
* @ param mesh Mesh to be scaled .
* @ param x Scale to be applied on x - axis .
* @ param y Scale to be applied on y - axis .
* @ param z Scale to be applied on z - axis . */
public static void scale ( GVRMesh mesh , float x , float y , float z ) { } } | final float [ ] vertices = mesh . getVertices ( ) ; final int vsize = vertices . length ; for ( int i = 0 ; i < vsize ; i += 3 ) { vertices [ i ] *= x ; vertices [ i + 1 ] *= y ; vertices [ i + 2 ] *= z ; } mesh . setVertices ( vertices ) ; |
public class ClientSessionCache { /** * Clean out the cache after commit .
* TODO keep hollow objects ? E . g . references to correct , e . t . c !
* @ param retainValues retainValues flag
* @ param detachAllOnCommit detachAllOnCommit flag */
public void postCommit ( boolean retainValues , boolean detachAllOnCommit ) { } } | int logSizeObjBefore = objs . size ( ) ; long t1 = System . nanoTime ( ) ; // TODO later : empty cache ( ? )
if ( ! deletedObjects . isEmpty ( ) ) { for ( ZooPC co : deletedObjects . values ( ) ) { if ( co . jdoZooIsDeleted ( ) ) { objs . remove ( co . jdoZooGetOid ( ) ) ; co . jdoZooGetContext ( ) . notifyEvent ( co , ZooInstanceEvent . POST_DELETE ) ; } } } if ( detachAllOnCommit ) { detachAllOnCommit ( ) ; } else if ( retainValues ) { if ( ! dirtyObjects . isEmpty ( ) ) { for ( ZooPC co : dirtyObjects ) { if ( ! co . jdoZooIsDeleted ( ) ) { co . jdoZooMarkClean ( ) ; } } } } else { if ( objs . size ( ) > 100000 ) { LOGGER . warn ( "Cache is getting large. Consider retainValues=true" + " to speed up and avoid expensive eviction." ) ; } for ( ZooPC co : objs . values ( ) ) { if ( retainValues || co instanceof ZooClassDef ) { co . jdoZooMarkClean ( ) ; } else { co . jdoZooEvict ( ) ; } co . jdoZooGetContext ( ) . notifyEvent ( co , ZooInstanceEvent . POST_STORE ) ; } } dirtyObjects . clear ( ) ; deletedObjects . clear ( ) ; // generic objects
if ( ! dirtyGenObjects . isEmpty ( ) ) { for ( GenericObject go : dirtyGenObjects ) { if ( go . jdoZooIsDeleted ( ) ) { genericObjects . remove ( go . getOid ( ) ) ; continue ; } go . jdoZooMarkClean ( ) ; } } if ( ! genericObjects . isEmpty ( ) ) { for ( GenericObject go : genericObjects . values ( ) ) { if ( ! retainValues ) { go . jdoZooMarkHollow ( ) ; } } } dirtyGenObjects . clear ( ) ; // schema
Iterator < ZooClassDef > iterS = schemata . values ( ) . iterator ( ) ; for ( ; iterS . hasNext ( ) ; ) { ZooClassDef cs = iterS . next ( ) ; if ( cs . jdoZooIsDeleted ( ) ) { iterS . remove ( ) ; nodeSchemata . get ( cs . jdoZooGetNode ( ) ) . remove ( cs . getJavaClass ( ) ) ; continue ; } // keep in cache ? ? ?
cs . jdoZooMarkClean ( ) ; // TODO remove if cache is flushed - > retainValues ! ! ! ! !
} if ( Session . LOGGER . isInfoEnabled ( ) ) { int logSizeObjAfter = objs . size ( ) ; long t2 = System . nanoTime ( ) ; Session . LOGGER . info ( "ClientCache.postCommit() -- Time= {} ns; Cache size before/after: {} / {}" , ( t2 - t1 ) , logSizeObjBefore , logSizeObjAfter ) ; } |
public class PairtreeUtils { /** * Unclean the ID from the Pairtree path .
* @ param aID A cleaned ID to unclean
* @ return The unclean ID */
public static String decodeID ( final String aID ) { } } | Objects . requireNonNull ( aID , LOGGER . getMessage ( MessageCodes . PT_004 ) ) ; final StringBuilder idBuf = new StringBuilder ( ) ; for ( int index = 0 ; index < aID . length ( ) ; index ++ ) { final char character = aID . charAt ( index ) ; // Decode characters that need to be decoded according to Pairtree specification
if ( character == EQUALS_SIGN ) { idBuf . append ( PATH_SEP ) ; } else if ( character == PLUS_SIGN ) { idBuf . append ( COLON ) ; } else if ( character == COMMA ) { idBuf . append ( PERIOD ) ; } else if ( character == HEX_INDICATOR ) { /* Get the next two characters since they are hex characters */
final String hex = aID . substring ( index + 1 , index + 3 ) ; final char [ ] chars = Character . toChars ( Integer . parseInt ( hex , 16 ) ) ; assert chars . length == 1 ; idBuf . append ( chars [ 0 ] ) ; index = index + 2 ; } else { idBuf . append ( character ) ; } } return idBuf . toString ( ) ; |
public class FactoryImageDenoise { /** * Denoises an image using VISU Shrink wavelet denoiser .
* @ param imageType The type of image being transform .
* @ param numLevels Number of levels in the wavelet transform . If not sure , try using 3.
* @ param minPixelValue Minimum allowed pixel intensity value
* @ param maxPixelValue Maximum allowed pixel intensity value
* @ return filter for image noise removal . */
public static < T extends ImageGray < T > > WaveletDenoiseFilter < T > waveletVisu ( Class < T > imageType , int numLevels , double minPixelValue , double maxPixelValue ) { } } | ImageDataType info = ImageDataType . classToType ( imageType ) ; WaveletTransform descTran = createDefaultShrinkTransform ( info , numLevels , minPixelValue , maxPixelValue ) ; DenoiseWavelet denoiser = FactoryDenoiseWaveletAlg . visu ( imageType ) ; return new WaveletDenoiseFilter < > ( descTran , denoiser ) ; |
public class AnnotationUtil { /** * Attempts to get the riak user metadata from a domain object by looking
* for a { @ literal @ RiakUsermeta } annotated field or getter method .
* @ param < T > the type of the domain object
* @ param metaContainer the RiakUserMetadata container
* @ param domainObject the domain object
* @ return a Map containing the user metadata . */
public static < T > RiakUserMetadata getUsermetaData ( RiakUserMetadata metaContainer , T domainObject ) { } } | return AnnotationHelper . getInstance ( ) . getUsermetaData ( metaContainer , domainObject ) ; |
public class NaiveBayesClassifier { /** * Read the Naive Bayes Model from HDFS */
public void init ( Configuration conf , Path generatedModel ) throws IOException , InterruptedException { } } | FileSystem fileSystem = FileSystem . get ( conf ) ; for ( Category category : Category . values ( ) ) { wordCountPerCategory . put ( category , new HashMap < String , Integer > ( ) ) ; // init token count
} // Use a HashSet to calculate the total vocabulary size
Set < String > vocabulary = new HashSet < String > ( ) ; // Read tuples from generate job
for ( FileStatus fileStatus : fileSystem . globStatus ( generatedModel ) ) { TupleFile . Reader reader = new TupleFile . Reader ( fileSystem , conf , fileStatus . getPath ( ) ) ; Tuple tuple = new Tuple ( reader . getSchema ( ) ) ; while ( reader . next ( tuple ) ) { // Read Tuple
Integer count = ( Integer ) tuple . get ( "count" ) ; Category category = ( Category ) tuple . get ( "category" ) ; String word = tuple . get ( "word" ) . toString ( ) ; vocabulary . add ( word ) ; tokensPerCategory . put ( category , MapUtils . getInteger ( tokensPerCategory , category , 0 ) + count ) ; wordCountPerCategory . get ( category ) . put ( word , count ) ; } reader . close ( ) ; } V = vocabulary . size ( ) ; |
public class RenameHandler { /** * Creates an instance , providing the ability to load types in config files .
* This is not normally used as the preferred option is to edit the singleton .
* If the flag is set to true , the classpath config files will be used to register types and enums .
* @ param loadFromClasspath whether to load any types in classpath config files
* @ return a new instance , not null */
public static RenameHandler create ( boolean loadFromClasspath ) { } } | RenameHandler handler = new RenameHandler ( ) ; if ( loadFromClasspath ) { handler . loadFromClasspath ( ) ; } return handler ; |
public class QuickDrawContext { /** * TODO : All other operations can delegate to these ! : - ) */
private void frameShape ( final Shape pShape ) { } } | if ( isPenVisible ( ) ) { setupForPaint ( ) ; Stroke stroke = getStroke ( penSize ) ; Shape shape = stroke . createStrokedShape ( pShape ) ; graphics . draw ( shape ) ; } |
public class TotalSupportTree { /** * Returns the set of frequent item sets .
* @ param out a print stream for output of frequent item sets .
* @ param list a container to store frequent item sets on output .
* @ return the number of discovered frequent item sets */
private long getFrequentItemsets ( PrintStream out , List < ItemSet > list ) { } } | long n = 0 ; if ( root . children != null ) { for ( int i = 0 ; i < root . children . length ; i ++ ) { Node child = root . children [ i ] ; if ( child != null && child . support >= minSupport ) { int [ ] itemset = { child . id } ; n += getFrequentItemsets ( out , list , itemset , i , child ) ; } } } return n ; |
public class AWSStorageGatewayClient { /** * Gets a description of a Server Message Block ( SMB ) file share settings from a file gateway . This operation is
* only supported for file gateways .
* @ param describeSMBSettingsRequest
* @ return Result of the DescribeSMBSettings operation returned by the service .
* @ throws InvalidGatewayRequestException
* An exception occurred because an invalid gateway request was issued to the service . For more information ,
* see the error and message fields .
* @ throws InternalServerErrorException
* An internal server error has occurred during the request . For more information , see the error and message
* fields .
* @ sample AWSStorageGateway . DescribeSMBSettings
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / storagegateway - 2013-06-30 / DescribeSMBSettings "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DescribeSMBSettingsResult describeSMBSettings ( DescribeSMBSettingsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDescribeSMBSettings ( request ) ; |
public class RecursiveObjectWriter { /** * Recursively sets values of some ( all ) object and its subobjects properties .
* The object can be a user defined object , map or array . Property values
* correspondently are object properties , map key - pairs or array elements with
* their indexes .
* If some properties do not exist or introspection fails they are just silently
* skipped and no errors thrown .
* @ param obj an object to write properties to .
* @ param values a map , containing property names and their values .
* @ see # setProperty ( Object , String , Object ) */
public static void setProperties ( Object obj , Map < String , Object > values ) { } } | if ( values == null || values . size ( ) == 0 ) return ; for ( Map . Entry < String , Object > entry : values . entrySet ( ) ) { setProperty ( obj , entry . getKey ( ) , entry . getValue ( ) ) ; } |
public class ClusterStateChangeReasonMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ClusterStateChangeReason clusterStateChangeReason , ProtocolMarshaller protocolMarshaller ) { } } | if ( clusterStateChangeReason == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( clusterStateChangeReason . getCode ( ) , CODE_BINDING ) ; protocolMarshaller . marshall ( clusterStateChangeReason . getMessage ( ) , MESSAGE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class VirtualMachineScaleSetsInner { /** * Redeploy one or more virtual machines in a VM scale set .
* @ param resourceGroupName The name of the resource group .
* @ param vmScaleSetName The name of the VM scale set .
* @ param instanceIds The virtual machine scale set instance ids . Omitting the virtual machine scale set instance ids will result in the operation being performed on all virtual machines in the virtual machine scale set .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < OperationStatusResponseInner > redeployAsync ( String resourceGroupName , String vmScaleSetName , List < String > instanceIds , final ServiceCallback < OperationStatusResponseInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( redeployWithServiceResponseAsync ( resourceGroupName , vmScaleSetName , instanceIds ) , serviceCallback ) ; |
public class Symm { /** * Convenience Function
* encode String into InputStream and call encode ( InputStream , OutputStream )
* @ param string
* @ param out
* @ throws IOException */
public void encode ( String string , OutputStream out ) throws IOException { } } | encode ( new ByteArrayInputStream ( string . getBytes ( ) ) , out ) ; |
public class PasswordCipher { /** * If the cipher text decodes to an odd number of bytes , we can ' t go on ! */
private static void sanityCheckOnCipherBytes ( String cipherText , byte [ ] cipherBytes ) { } } | if ( cipherBytes . length % 2 != 0 ) { throw new IllegalStateException ( "Ciphered text decodes to an odd number of bytes! Text='" + cipherText + "', decodes to " + cipherBytes . length + " bytes." ) ; } |
public class SecStrucTools { /** * Obtain the List of secondary structure elements ( SecStrucElement ) of a
* Structure .
* @ param s
* Structure with SS assignments
* @ return List of SecStrucElement objects */
public static List < SecStrucElement > getSecStrucElements ( Structure s ) { } } | List < SecStrucElement > listSSE = new ArrayList < SecStrucElement > ( ) ; GroupIterator iter = new GroupIterator ( s ) ; // SecStruc information - initialize
SecStrucType type = SecStrucType . coil ; ResidueNumber previous = new ResidueNumber ( ) ; ResidueNumber start = new ResidueNumber ( ) ; String chainId = "" ; int count = 0 ; // counts the number of residues in SSE
// Create a map for the IDs of the SSE in the structure
Map < SecStrucType , Integer > ids = new TreeMap < SecStrucType , Integer > ( ) ; for ( SecStrucType t : SecStrucType . values ( ) ) ids . put ( t , 1 ) ; while ( iter . hasNext ( ) ) { Group g = iter . next ( ) ; if ( g . hasAminoAtoms ( ) ) { Object p = g . getProperty ( Group . SEC_STRUC ) ; if ( p == null ) continue ; SecStrucInfo ss = ( SecStrucInfo ) p ; if ( count > 0 ) { // If chain and type are equal increment counter
if ( ss . type == type && chainId == g . getChainId ( ) ) { previous = g . getResidueNumber ( ) ; count ++ ; continue ; } else { // Save the current SSE if chain or type change
SecStrucElement sse = new SecStrucElement ( type , start , previous , count , ids . get ( type ) , chainId ) ; listSSE . add ( sse ) ; ids . put ( type , ids . get ( type ) + 1 ) ; count = 0 ; // Initialize a new SSE one
if ( ss . type != SecStrucType . coil ) { type = ss . type ; start = g . getResidueNumber ( ) ; previous = start ; chainId = g . getChainId ( ) ; count = 1 ; } } } else { // This is for the first residue only
if ( ss . type != SecStrucType . coil ) { type = ss . type ; start = g . getResidueNumber ( ) ; previous = start ; chainId = g . getChainId ( ) ; count = 1 ; } } } } return listSSE ; |
public class AttrPause { /** * Creates a new attribute instance from the provided String .
* @ param str string representation of the attribute
* @ return attribute instance or { @ code null } if provided string is
* { @ code null }
* @ throws BOSHException on parse or validation failure */
static AttrPause createFromString ( final String str ) throws BOSHException { } } | if ( str == null ) { return null ; } else { return new AttrPause ( str ) ; } |
public class FedoraObjectTripleGenerator_3_0 { /** * For the given datastream , add the triples that are common for all
* datastreams . This will include :
* < ul >
* < li > object < i > view : disseminates < / i > datastream < / li >
* < li > datastream < i > view : disseminationType < / i > < / li >
* < li > datastream < i > view : isVolatile < / i > < / li >
* < li > datastream < i > view : lastModifiedDate < / i > < / li >
* < li > datastream < i > view : mimeType < / i > < / li >
* < li > datastream < i > model : state < / i > < / li >
* < / ul > */
private void addCoreDatastreamTriples ( Datastream ds , URIReference objURI , Set < Triple > set ) throws Exception { } } | URIReference dsURI = new SimpleURIReference ( new URI ( objURI . getURI ( ) . toString ( ) + "/" + ds . DatastreamID ) ) ; add ( objURI , VIEW . DISSEMINATES , dsURI , set ) ; URIReference dsDissType = new SimpleURIReference ( new URI ( FEDORA . uri + "*/" + ds . DatastreamID ) ) ; add ( dsURI , VIEW . DISSEMINATION_TYPE , dsDissType , set ) ; boolean isVolatile = ds . DSControlGrp . equals ( "E" ) || ds . DSControlGrp . equals ( "R" ) ; add ( dsURI , VIEW . IS_VOLATILE , isVolatile , set ) ; add ( dsURI , VIEW . LAST_MODIFIED_DATE , ds . DSCreateDT , set ) ; add ( dsURI , VIEW . MIME_TYPE , ds . DSMIME , set ) ; add ( dsURI , MODEL . STATE , getStateResource ( ds . DSState ) , set ) ; |
public class CmsSerialDateValue { /** * Extracts the dates from a JSON array .
* @ param array the JSON array where the dates are stored in .
* @ return list of the extracted dates . */
private SortedSet < Date > readDates ( JSONArray array ) { } } | if ( null != array ) { SortedSet < Date > result = new TreeSet < > ( ) ; for ( int i = 0 ; i < array . length ( ) ; i ++ ) { try { long l = Long . valueOf ( array . getString ( i ) ) . longValue ( ) ; result . add ( new Date ( l ) ) ; } catch ( NumberFormatException | JSONException e ) { LOG . error ( "Could not read date from JSON array." , e ) ; } } return result ; } return new TreeSet < > ( ) ; |
public class LogMgr { /** * Sets up a circular chain of pointers to the records in the page . There is
* an integer added to the end of each log record whose value is the offset
* of the previous log record . The first four bytes of the page contain an
* integer whose value is the offset of the integer for the last log record
* in the page . */
private void finalizeRecord ( ) { } } | myPage . setVal ( currentPos , new IntegerConstant ( getLastRecordPosition ( ) ) ) ; setPreviousNextRecordPosition ( currentPos + pointerSize ) ; setLastRecordPosition ( currentPos ) ; currentPos += pointerSize ; setNextRecordPosition ( currentPos ) ; // leave for next pointer
currentPos += pointerSize ; |
public class DeepLinkUtil { /** * Adds remote connection parameter to link GET query ,
* if parameter yet not present in this link .
* @ param link link to add connection parameter
* @ return link with remote connection GET parameter */
public static String makeDeepLink ( String link ) { } } | if ( link != null // If getCurrentRemoteConnectionLink ( ) = = null means there is no current remote connection
&& getCurrentRemoteConnectionLink ( ) != null // If containsRemoteParameter ( link ) is true means link already contain remote connection parameter
&& ! containsRemoteParameter ( link ) ) { link += ( link . contains ( "?" ) ? "&" : "?" ) + PARAM_REMOTE_CONNECTION + "=" + getCurrentRemoteConnectionLink ( ) ; } return link ; |
public class MultiAdditionNeighbourhood { /** * Generates a move for the given subset solution that adds a random subset of currently unselected IDs to the
* selection . Possible fixed IDs are not considered to be selected . The maximum number of additions \ ( k \ ) and
* maximum allowed subset size are respected . If no items can be added , < code > null < / code > is returned .
* Note that first , a random number of additions is picked ( uniformly distributed ) from the valid range and then ,
* a random subset of this size is sampled from the currently unselected IDs , to be added ( again , all possible
* subsets are uniformly distributed , within the fixed size ) . Because the amount of possible moves increases with
* the number of performed additions , the probability of generating each specific move thus decreases with the
* number of additions . In other words , randomly generated moves are < b > not < / b > uniformly distributed across
* different numbers of performed additions , but each specific move performing fewer additions is more likely
* to be selected than each specific move performing more additions .
* @ param solution solution for which a random multi addition move is generated
* @ param rnd source of randomness used to generate random move
* @ return random multi addition move , < code > null < / code > if no items can be added */
@ Override public SubsetMove getRandomMove ( SubsetSolution solution , Random rnd ) { } } | // get set of candidate IDs for addition ( fixed IDs are discarded )
Set < Integer > addCandidates = getAddCandidates ( solution ) ; // compute maximum number of adds
int curMaxAdds = maxAdditions ( addCandidates , solution ) ; // return null if no additions are possible
if ( curMaxAdds == 0 ) { return null ; } // pick number of additions ( in [ 1 , curMaxAdds ] )
int numAdds = rnd . nextInt ( curMaxAdds ) + 1 ; // pick random IDs to add to selection
Set < Integer > add = SetUtilities . getRandomSubset ( addCandidates , numAdds , rnd ) ; // create and return move
return new GeneralSubsetMove ( add , Collections . emptySet ( ) ) ; |
public class BsScheduledJob { @ Override public Map < String , Object > toSource ( ) { } } | Map < String , Object > sourceMap = new HashMap < > ( ) ; if ( available != null ) { addFieldToSource ( sourceMap , "available" , available ) ; } if ( crawler != null ) { addFieldToSource ( sourceMap , "crawler" , crawler ) ; } if ( createdBy != null ) { addFieldToSource ( sourceMap , "createdBy" , createdBy ) ; } if ( createdTime != null ) { addFieldToSource ( sourceMap , "createdTime" , createdTime ) ; } if ( cronExpression != null ) { addFieldToSource ( sourceMap , "cronExpression" , cronExpression ) ; } if ( jobLogging != null ) { addFieldToSource ( sourceMap , "jobLogging" , jobLogging ) ; } if ( name != null ) { addFieldToSource ( sourceMap , "name" , name ) ; } if ( scriptData != null ) { addFieldToSource ( sourceMap , "scriptData" , scriptData ) ; } if ( scriptType != null ) { addFieldToSource ( sourceMap , "scriptType" , scriptType ) ; } if ( sortOrder != null ) { addFieldToSource ( sourceMap , "sortOrder" , sortOrder ) ; } if ( target != null ) { addFieldToSource ( sourceMap , "target" , target ) ; } if ( updatedBy != null ) { addFieldToSource ( sourceMap , "updatedBy" , updatedBy ) ; } if ( updatedTime != null ) { addFieldToSource ( sourceMap , "updatedTime" , updatedTime ) ; } return sourceMap ; |
public class IO { /** * Zip a list of files into specified target file .
* @ param target
* the target file as the zip package
* @ param files
* the files to be zipped . */
public static void zipInto ( File target , File ... files ) { } } | ZipOutputStream zos = null ; try { zos = new ZipOutputStream ( new BufferedOutputStream ( new FileOutputStream ( target ) ) ) ; byte [ ] buffer = new byte [ 128 ] ; for ( File f : files ) { ZipEntry entry = new ZipEntry ( f . getName ( ) ) ; InputStream is = new BufferedInputStream ( new FileInputStream ( f ) ) ; zos . putNextEntry ( entry ) ; int read = 0 ; while ( ( read = is . read ( buffer ) ) != - 1 ) { zos . write ( buffer , 0 , read ) ; } zos . closeEntry ( ) ; IO . close ( is ) ; } } catch ( IOException e ) { throw E . ioException ( e ) ; } finally { IO . close ( zos ) ; } |
public class DependencyPipe { /** * Create feature alphabets , which maps 64 - bit feature code into
* its integer index ( starting from index 0 ) . This method is called
* before training a dependency model .
* @ param file file path of the training data */
public void createAlphabets ( String file , String conllFormat ) throws IOException { } } | createDictionaries ( file , conllFormat ) ; long start = System . currentTimeMillis ( ) ; logger . debug ( "Creating Alphabet ... " ) ; HashSet < String > posTagSet = new HashSet < > ( ) ; HashSet < String > cposTagSet = new HashSet < > ( ) ; DependencyReader reader = DependencyReader . createDependencyReader ( conllFormat ) ; reader . startReading ( file ) ; DependencyInstance dependencyInstance = reader . nextInstance ( ) ; while ( dependencyInstance != null ) { for ( int i = 0 ; i < dependencyInstance . getLength ( ) ; ++ i ) { if ( dependencyInstance . getPostags ( ) != null ) posTagSet . add ( dependencyInstance . getPostags ( ) [ i ] ) ; if ( dependencyInstance . getCpostags ( ) != null ) cposTagSet . add ( dependencyInstance . getCpostags ( ) [ i ] ) ; } dependencyInstance . setInstIds ( dictionariesSet , coarseMap , conjWord ) ; synFactory . initFeatureAlphabets ( dependencyInstance ) ; dependencyInstance = reader . nextInstance ( ) ; } if ( logger . isDebugEnabled ( ) ) { logger . debug ( String . format ( "[%d ms]%n" , System . currentTimeMillis ( ) - start ) ) ; } closeAlphabets ( ) ; reader . close ( ) ; synFactory . checkCollisions ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( String . format ( "Num of CONLL fine POS tags: %d%n" , posTagSet . size ( ) ) ) ; logger . debug ( String . format ( "Num of CONLL coarse POS tags: %d%n" , cposTagSet . size ( ) ) ) ; logger . debug ( String . format ( "Num of labels: %d%n" , types . length ) ) ; logger . debug ( String . format ( "Num of Syntactic Features: %d %d%n" , synFactory . getNumberWordFeatures ( ) , synFactory . getNumberLabeledArcFeatures ( ) ) ) ; } numCPOS = cposTagSet . size ( ) ; |
public class JsonIOUtil { /** * Merges the { @ code message } with the byte array using the given { @ code schema } . */
public static < T > void mergeFrom ( byte [ ] data , int offset , int length , T message , Schema < T > schema , boolean numeric ) throws IOException { } } | final IOContext context = new IOContext ( DEFAULT_JSON_FACTORY . _getBufferRecycler ( ) , data , false ) ; final JsonParser parser = newJsonParser ( null , data , offset , offset + length , false , context ) ; /* final JsonParser parser = DEFAULT _ JSON _ FACTORY . createJsonParser ( data , offset ,
length ) ; */
try { mergeFrom ( parser , message , schema , numeric ) ; } finally { parser . close ( ) ; } |
public class PHS398FellowshipSupplementalV1_2Generator { /** * This method is used to set additional information data to AdditionalInformation XMLObject from DevelopmentProposal ,
* ProposalYnq */
private void setAdditionalInformation ( AdditionalInformation additionalInformation ) { } } | Boolean hasInvestigator = false ; additionalInformation . addNewFellowshipTrainingAndCareerGoals ( ) ; additionalInformation . addNewActivitiesPlannedUnderThisAward ( ) ; ProposalPersonContract principalInvestigator = s2SProposalPersonService . getPrincipalInvestigator ( pdDoc ) ; for ( ProposalPersonContract proposalPerson : pdDoc . getDevelopmentProposal ( ) . getProposalPersons ( ) ) { if ( proposalPerson . isPrincipalInvestigator ( ) ) { hasInvestigator = true ; CitizenshipType citizenShip = s2SProposalPersonService . getCitizenship ( proposalPerson ) ; if ( citizenShip != null && StringUtils . isNotBlank ( citizenShip . getCitizenShip ( ) ) ) { if ( citizenShip . getCitizenShip ( ) . trim ( ) . equals ( CitizenshipDataType . NON_U_S_CITIZEN_WITH_TEMPORARY_VISA . toString ( ) ) ) { additionalInformation . setCitizenship ( CitizenshipDataType . NON_U_S_CITIZEN_WITH_TEMPORARY_VISA ) ; } else if ( citizenShip . getCitizenShip ( ) . trim ( ) . equals ( CitizenshipDataType . PERMANENT_RESIDENT_OF_U_S . toString ( ) ) ) { additionalInformation . setCitizenship ( CitizenshipDataType . PERMANENT_RESIDENT_OF_U_S ) ; } else if ( citizenShip . getCitizenShip ( ) . trim ( ) . equals ( CitizenshipDataType . U_S_CITIZEN_OR_NONCITIZEN_NATIONAL . toString ( ) ) ) { additionalInformation . setCitizenship ( CitizenshipDataType . U_S_CITIZEN_OR_NONCITIZEN_NATIONAL ) ; } else if ( citizenShip . getCitizenShip ( ) . trim ( ) . equals ( CitizenshipDataType . PERMANENT_RESIDENT_OF_U_S_PENDING . toString ( ) ) ) { additionalInformation . setCitizenship ( CitizenshipDataType . PERMANENT_RESIDENT_OF_U_S_PENDING ) ; } } else { additionalInformation . setCitizenship ( null ) ; } } } if ( principalInvestigator != null && principalInvestigator . getMobilePhoneNumber ( ) != null ) { additionalInformation . setAlernatePhoneNumber ( principalInvestigator . getMobilePhoneNumber ( ) ) ; } if ( ! hasInvestigator ) { additionalInformation . setCitizenship ( null ) ; } additionalInformation . setConcurrentSupport ( YesNoDataType . N_NO ) ; AttachedFileDataType attachedFileDataType = null ; for ( NarrativeContract narrative : pdDoc . getDevelopmentProposal ( ) . getNarratives ( ) ) { if ( narrative . getNarrativeType ( ) . getCode ( ) != null ) { switch ( Integer . parseInt ( narrative . getNarrativeType ( ) . getCode ( ) ) ) { case CONCURRENT_SUPPORT : attachedFileDataType = getAttachedFileType ( narrative ) ; if ( attachedFileDataType == null ) { continue ; } ConcurrentSupportDescription concurrentSupportDescription = ConcurrentSupportDescription . Factory . newInstance ( ) ; concurrentSupportDescription . setAttFile ( attachedFileDataType ) ; additionalInformation . setConcurrentSupport ( YesNoDataType . Y_YES ) ; additionalInformation . setConcurrentSupportDescription ( concurrentSupportDescription ) ; break ; case FELLOWSHIP : attachedFileDataType = getAttachedFileType ( narrative ) ; if ( attachedFileDataType == null ) { continue ; } FellowshipTrainingAndCareerGoals fellowshipTrainingAndCareerGoals = FellowshipTrainingAndCareerGoals . Factory . newInstance ( ) ; fellowshipTrainingAndCareerGoals . setAttFile ( attachedFileDataType ) ; additionalInformation . setFellowshipTrainingAndCareerGoals ( fellowshipTrainingAndCareerGoals ) ; break ; case DISSERTATION : attachedFileDataType = getAttachedFileType ( narrative ) ; if ( attachedFileDataType == null ) { continue ; } DissertationAndResearchExperience dissertationAndResearchExperience = DissertationAndResearchExperience . Factory . newInstance ( ) ; dissertationAndResearchExperience . setAttFile ( attachedFileDataType ) ; additionalInformation . setDissertationAndResearchExperience ( dissertationAndResearchExperience ) ; break ; case ACTIVITIES : attachedFileDataType = getAttachedFileType ( narrative ) ; if ( attachedFileDataType == null ) { continue ; } ActivitiesPlannedUnderThisAward activitiesPlannedUnderThisAward = ActivitiesPlannedUnderThisAward . Factory . newInstance ( ) ; activitiesPlannedUnderThisAward . setAttFile ( attachedFileDataType ) ; additionalInformation . setActivitiesPlannedUnderThisAward ( activitiesPlannedUnderThisAward ) ; break ; default : break ; } } } |
public class MetricCommitter { /** * ~ Methods * * * * * */
@ Override public void run ( ) { } } | while ( ! Thread . currentThread ( ) . isInterrupted ( ) ) { try { List < Metric > dequeuedMetrics = collectionService . commitMetrics ( METRIC_MESSAGES_CHUNK_SIZE , TIMEOUT ) ; int noOfDatapointsCommitted = 0 ; for ( Metric metric : dequeuedMetrics ) { noOfDatapointsCommitted += metric . getDatapoints ( ) . size ( ) ; } if ( dequeuedMetrics . size ( ) > 0 ) { LOGGER . info ( MessageFormat . format ( "Committed {0} metrics." , dequeuedMetrics . size ( ) ) ) ; monitorService . modifyCounter ( Counter . COMMIT_CLIENT_METRIC_WRITES , dequeuedMetrics . size ( ) , new HashMap < String , String > ( ) ) ; } if ( noOfDatapointsCommitted > 0 ) { LOGGER . debug ( MessageFormat . format ( "Committed {0} datapoints." , noOfDatapointsCommitted ) ) ; jobCounter . addAndGet ( noOfDatapointsCommitted ) ; monitorService . modifyCounter ( Counter . COMMIT_CLIENT_DATAPOINT_WRITES , noOfDatapointsCommitted , new HashMap < String , String > ( ) ) ; } Thread . sleep ( POLL_INTERVAL_MS ) ; } catch ( InterruptedException ie ) { LOGGER . info ( "Execution was interrupted." ) ; Thread . currentThread ( ) . interrupt ( ) ; break ; } catch ( Throwable ex ) { LOGGER . info ( "Error occurred while committing metrics. Reason {}" , ex . toString ( ) ) ; } } LOGGER . warn ( MessageFormat . format ( "Metric committer thread interrupted. {} datapoints committed by this thread." , jobCounter . get ( ) ) ) ; collectionService . dispose ( ) ; monitorService . dispose ( ) ; |
public class MailBuilder { /** * Sets the content of the message , both the plain text and HTML version .
* @ param content the content of the message
* @ return this builder
* @ deprecated use { @ link # content ( Body ) } */
@ Deprecated public MailBuilder content ( net . sargue . mailgun . content . MailContent content ) { } } | // NOSONAR
return text ( content . text ( ) ) . html ( content . html ( ) ) ; |
public class Mailer { /** * Actually instantiates and configures the { @ link Session } instance . Delegates resolving transport protocol specific properties to the
* { @ link # transportStrategy } in two ways :
* < ol >
* < li > request an initial property list which the strategy may pre - populate < / li >
* < li > by requesting the property names according to the respective transport protocol it handles ( for the host property name it would
* be < em > " mail . smtp . host " < / em > for SMTP and < em > " mail . smtps . host " < / em > for SMTPS ) < / li >
* < / ol >
* @ param host The address URL of the SMTP server to be used .
* @ param port The port of the SMTP server .
* @ param username An optional username , may be < code > null < / code > .
* @ param password An optional password , may be < code > null < / code > .
* @ return A fully configured < code > Session < / code > instance complete with transport protocol settings .
* @ see TransportStrategy # generateProperties ( )
* @ see TransportStrategy # propertyNameHost ( )
* @ see TransportStrategy # propertyNamePort ( )
* @ see TransportStrategy # propertyNameUsername ( )
* @ see TransportStrategy # propertyNameAuthenticate ( ) */
public Session createMailSession ( final String host , final int port , final String username , final String password ) { } } | Properties props = transportStrategy . generateProperties ( ) ; props . put ( transportStrategy . propertyNameHost ( ) , host ) ; props . put ( transportStrategy . propertyNamePort ( ) , String . valueOf ( port ) ) ; if ( username != null ) { props . put ( transportStrategy . propertyNameUsername ( ) , username ) ; } if ( password != null ) { props . put ( transportStrategy . propertyNameAuthenticate ( ) , "true" ) ; return Session . getInstance ( props , new Authenticator ( ) { @ Override protected PasswordAuthentication getPasswordAuthentication ( ) { return new PasswordAuthentication ( username , password ) ; } } ) ; } else { return Session . getInstance ( props ) ; } |
public class VmlGraphicsContext { /** * Draw a circle on the < code > GraphicsContext < / code > .
* @ param parent
* parent group object
* @ param name
* The circle ' s name .
* @ param position
* The center position as a coordinate .
* @ param radius
* The circle ' s radius .
* @ param style
* The styling object by which the circle should be drawn . */
public void drawCircle ( Object parent , String name , Coordinate position , double radius , ShapeStyle style ) { } } | if ( isAttached ( ) ) { Element circle = helper . createOrUpdateElement ( parent , name , "oval" , style ) ; // Real position is the upper left corner of the circle :
applyAbsolutePosition ( circle , new Coordinate ( position . getX ( ) - radius , position . getY ( ) - radius ) ) ; // width and height are both radius * 2
int size = ( int ) ( 2 * radius ) ; applyElementSize ( circle , size , size , false ) ; } |
public class DataStoreStash { /** * Lists all tables present in Stash . Note that tables that were present in EmoDB but empty during the
* Stash operation are not listed . */
public Iterable < StashTable > listStashTables ( ) throws StashNotAvailableException { } } | final StashReader stashReader = _stashReader . getLockedView ( ) ; return new Iterable < StashTable > ( ) { @ Override public Iterator < StashTable > iterator ( ) { return stashReader . listTables ( ) ; } } ; |
public class ParameterMetadataProvider { /** * Builds a new { @ link ParameterMetadata } for the given type and name .
* @ param < T >
* @ param part must not be { @ literal null } .
* @ param type parameter type , must not be { @ literal null } .
* @ param parameter
* @ return */
private < T > ParameterMetadata < T > next ( Part part , Class < T > type , Parameter parameter ) { } } | Assert . notNull ( type , "Type must not be null!" ) ; ParameterMetadata < T > value = new ParameterMetadata < T > ( type , parameter . getName ( ) . get ( ) , part . getType ( ) , bindableParameterValues == null ? ParameterMetadata . PLACEHOLDER : bindableParameterValues . next ( ) ) ; expressions . add ( value ) ; return value ; |
public class ProcessorsProcessor { /** * / * ( non - Javadoc )
* @ see org . opoo . press . Processor # postGenerate ( org . opoo . press . Site ) */
@ Override public void postGenerate ( Site site ) { } } | if ( processors != null ) { for ( Processor p : processors ) { p . postGenerate ( site ) ; } } |
public class FibonacciHeap { /** * { @ inheritDoc }
* @ throws IllegalStateException
* if the heap has already been used in the right hand side of a
* meld */
@ Override @ ConstantTime ( amortized = true ) public AddressableHeap . Handle < K , V > insert ( K key , V value ) { } } | if ( other != this ) { throw new IllegalStateException ( "A heap cannot be used after a meld" ) ; } if ( key == null ) { throw new NullPointerException ( "Null keys not permitted" ) ; } Node < K , V > n = new Node < K , V > ( this , key , value ) ; addToRootList ( n ) ; size ++ ; return n ; |
public class ClusterComputeResourceService { /** * Das method gets the current cluster configurations .
* @ param connectionResources
* @ param clusterMor
* @ param clusterName
* @ return
* @ throws RuntimeFaultFaultMsg
* @ throws InvalidPropertyFaultMsg */
private ClusterConfigInfoEx getClusterConfiguration ( ConnectionResources connectionResources , ManagedObjectReference clusterMor , String clusterName ) throws RuntimeFaultFaultMsg , InvalidPropertyFaultMsg { } } | ObjectContent [ ] objectContents = GetObjectProperties . getObjectProperties ( connectionResources , clusterMor , new String [ ] { ClusterParameter . CONFIGURATION_EX . getValue ( ) } ) ; if ( objectContents != null && objectContents . length == 1 ) { List < DynamicProperty > dynamicProperties = objectContents [ 0 ] . getPropSet ( ) ; if ( dynamicProperties != null && dynamicProperties . size ( ) == 1 && dynamicProperties . get ( 0 ) . getVal ( ) instanceof ClusterConfigInfoEx ) { return ( ClusterConfigInfoEx ) dynamicProperties . get ( 0 ) . getVal ( ) ; } } throw new RuntimeException ( String . format ( ANOTHER_FAILURE_MSG , clusterName ) ) ; |
public class RoaringBitmap { /** * In - place bitwise AND ( intersection ) operation . The current bitmap is modified .
* @ param x2 other bitmap */
public void and ( final RoaringBitmap x2 ) { } } | int pos1 = 0 , pos2 = 0 , intersectionSize = 0 ; final int length1 = highLowContainer . size ( ) , length2 = x2 . highLowContainer . size ( ) ; while ( pos1 < length1 && pos2 < length2 ) { final short s1 = highLowContainer . getKeyAtIndex ( pos1 ) ; final short s2 = x2 . highLowContainer . getKeyAtIndex ( pos2 ) ; if ( s1 == s2 ) { final Container c1 = highLowContainer . getContainerAtIndex ( pos1 ) ; final Container c2 = x2 . highLowContainer . getContainerAtIndex ( pos2 ) ; final Container c = c1 . iand ( c2 ) ; if ( ! c . isEmpty ( ) ) { highLowContainer . replaceKeyAndContainerAtIndex ( intersectionSize ++ , s1 , c ) ; } ++ pos1 ; ++ pos2 ; } else if ( Util . compareUnsigned ( s1 , s2 ) < 0 ) { // s1 < s2
pos1 = highLowContainer . advanceUntil ( s2 , pos1 ) ; } else { // s1 > s2
pos2 = x2 . highLowContainer . advanceUntil ( s1 , pos2 ) ; } } highLowContainer . resize ( intersectionSize ) ; |
public class PrePopulatedValidationSupport { /** * Add a new CodeSystem resource which will be available to the validator . Note that
* { @ link CodeSystem # getUrl ( ) the URL field ) in this resource must contain a value as this
* value will be used as the logical URL . */
public void addCodeSystem ( CodeSystem theCodeSystem ) { } } | Validate . notBlank ( theCodeSystem . getUrl ( ) , "theCodeSystem.getUrl() must not return a value" ) ; myCodeSystems . put ( theCodeSystem . getUrl ( ) , theCodeSystem ) ; |
public class DynamicCounter { /** * Increment a counter specified by a name , and a sequence of ( key , value ) pairs . */
public static void increment ( String name , String ... tags ) { } } | final MonitorConfig . Builder configBuilder = MonitorConfig . builder ( name ) ; Preconditions . checkArgument ( tags . length % 2 == 0 , "The sequence of (key, value) pairs must have even size: one key, one value" ) ; try { for ( int i = 0 ; i < tags . length ; i += 2 ) { configBuilder . withTag ( tags [ i ] , tags [ i + 1 ] ) ; } increment ( configBuilder . build ( ) ) ; } catch ( IllegalArgumentException e ) { LOGGER . warn ( "Failed to get a counter to increment: {}" , e . getMessage ( ) ) ; } |
public class ProcessingContext { /** * Log a info message . */
void logNote ( String msg , Object ... args ) { } } | messager . printMessage ( Diagnostic . Kind . NOTE , String . format ( msg , args ) ) ; |
public class SideBarUtils { /** * Gets all side bar sections for the specified UI class .
* @ param uiClass the UI class , must not be { @ code null } .
* @ return a collection of side bar section descriptors , never { @ code null } .
* @ see SideBarSection # ui ( ) */
public Collection < SideBarSectionDescriptor > getSideBarSections ( Class < ? extends UI > uiClass ) { } } | List < SideBarSectionDescriptor > supportedSections = new ArrayList < SideBarSectionDescriptor > ( ) ; for ( SideBarSectionDescriptor section : sections ) { if ( section . isAvailableFor ( uiClass ) ) { supportedSections . add ( section ) ; } } return supportedSections ; |
public class LongTupleNeighborhoodIterables { /** * Creates an iterable that provides iterators for iterating over the
* Moore neighborhood of the given center and the given radius . < br >
* < br >
* If the given minimum - or maximum are non - < code > null < / code > ,
* they will be used for clamping the neighborhood . < br >
* < br >
* Copies of the given tuples will be stored internally . < br >
* < br >
* Note : The result of this method will be equivalent to creating
* an ( unbounded ) iterable of the specified neighborhood , and then
* clamping the resulting iterable to the given bounds :
* < pre > < code >
* Iterable & lt ; MutableLongTuple & gt ; iterable =
* LongTupleNeighborhoodIterators . mooreNeighborhoodIterable (
* center , radius ) ;
* Iterable & lt ; MutableLongTuple & gt ; result =
* LongTupleIterables . clampingIterable ( min , max ) ;
* < / code > < / pre >
* But as the bounds may be adjusted here at creation time , this
* method may be achieve a higher performance . < br >
* < br >
* Also see < a href = " . . / . . / package - summary . html # Neighborhoods " >
* Neighborhoods < / a >
* @ param center The center of the Moore neighborhood
* @ param radius The radius of the Moore neighborhood
* @ param min The ( optional ) minimum , inclusive
* @ param max The ( optional ) maximum , exclusive
* @ param order The iteration { @ link Order }
* @ return The iterable
* @ throws IllegalArgumentException If the given ( non - < code > null < / code > )
* tuples do not have the same { @ link Tuple # getSize ( ) size } */
public static Iterable < MutableLongTuple > mooreNeighborhoodIterable ( LongTuple center , final int radius , LongTuple min , LongTuple max , Order order ) { } } | Objects . requireNonNull ( order , "The order is null" ) ; if ( min != null ) { Utils . checkForEqualSize ( center , min ) ; } if ( max != null ) { Utils . checkForEqualSize ( center , max ) ; } final LongTuple localCenter = LongTuples . copy ( center ) ; final LongTuple localMin = min == null ? null : LongTuples . copy ( min ) ; final LongTuple localMax = max == null ? null : LongTuples . copy ( max ) ; return ( ) -> LongTupleNeighborhoodIterators . mooreNeighborhoodIterator ( localCenter , radius , localMin , localMax , order ) ; |
public class JvmWildcardTypeReferenceImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public int eDerivedStructuralFeatureID ( int baseFeatureID , Class < ? > baseClass ) { } } | if ( baseClass == JvmConstraintOwner . class ) { switch ( baseFeatureID ) { case TypesPackage . JVM_CONSTRAINT_OWNER__CONSTRAINTS : return TypesPackage . JVM_WILDCARD_TYPE_REFERENCE__CONSTRAINTS ; default : return - 1 ; } } return super . eDerivedStructuralFeatureID ( baseFeatureID , baseClass ) ; |
public class StandardRoadConnection { /** * Notify the iterators about changes . */
protected void fireIteratorUpdate ( ) { } } | if ( this . listeningIterators != null ) { for ( final IClockwiseIterator iterator : this . listeningIterators ) { if ( iterator != null ) { iterator . dataStructureUpdated ( ) ; } } } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.