signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Director { /** * Creates array of productIds and calls method below * @ param featureNames Collection of features names to uninstall * @ param allowUninstallAll If false , will fail if no user features are installed * @ param force If uninstallation should be forced * @ throws InstallException */ public void uninstallFeaturesPrereqChecking ( Collection < String > featureNames , boolean allowUninstallAll , boolean force ) throws InstallException { } }
getUninstallDirector ( ) . uninstallFeaturesPrereqChecking ( featureNames , allowUninstallAll , force ) ;
public class PicketBoxSecurityIntegration { /** * { @ inheritDoc } */ public org . ironjacamar . core . spi . security . SecurityContext getSecurityContext ( ) { } }
org . jboss . security . SecurityContext sc = SecurityContextAssociation . getSecurityContext ( ) ; if ( sc == null ) return null ; return new PicketBoxSecurityContext ( sc ) ;
public class RestoreAgent { /** * Creates a ZooKeeper directory if it doesn ' t exist . Crashes VoltDB if the * creation fails for any reason other then the path already existing . * @ param path */ void createZKDirectory ( String path ) { } }
try { try { m_zk . create ( path , new byte [ 0 ] , Ids . OPEN_ACL_UNSAFE , CreateMode . PERSISTENT ) ; } catch ( KeeperException e ) { if ( e . code ( ) != Code . NODEEXISTS ) { throw e ; } } } catch ( Exception e ) { VoltDB . crashGlobalVoltDB ( "Failed to create Zookeeper node: " + e . getMessage ( ) , false , e ) ; }
public class RecyclerFactory { /** * Creates a new { @ link Recycler } . * @ param newObjectCreator the { @ link Function } to create a new object * @ param < T > the type being recycled . * @ return the { @ link Recycler } * @ throws NullPointerException if { @ code newObjectCreator } is { @ code null } */ public static < T > Recycler < T > createRecycler ( Function < Handle < T > , T > newObjectCreator ) { } }
Objects . requireNonNull ( newObjectCreator , "newObjectCreator must not be null" ) ; return new Recycler < T > ( ) { @ Override protected T newObject ( Handle < T > handle ) { return newObjectCreator . apply ( handle ) ; } } ;
public class DefaultConsumerBootstrap { /** * 取消订阅服务列表 */ public void unSubscribe ( ) { } }
if ( StringUtils . isEmpty ( consumerConfig . getDirectUrl ( ) ) && consumerConfig . isSubscribe ( ) ) { List < RegistryConfig > registryConfigs = consumerConfig . getRegistry ( ) ; if ( registryConfigs != null ) { for ( RegistryConfig registryConfig : registryConfigs ) { Registry registry = RegistryFactory . getRegistry ( registryConfig ) ; try { registry . unSubscribe ( consumerConfig ) ; } catch ( Exception e ) { String appName = consumerConfig . getAppName ( ) ; if ( LOGGER . isWarnEnabled ( appName ) ) { LOGGER . warnWithApp ( appName , "Catch exception when unSubscribe from registry: " + registryConfig . getId ( ) + ", but you can ignore if it's called by JVM shutdown hook" , e ) ; } } } } }
public class rewritepolicy_binding { /** * Use this API to fetch rewritepolicy _ binding resource of given name . */ public static rewritepolicy_binding get ( nitro_service service , String name ) throws Exception { } }
rewritepolicy_binding obj = new rewritepolicy_binding ( ) ; obj . set_name ( name ) ; rewritepolicy_binding response = ( rewritepolicy_binding ) obj . get_resource ( service ) ; return response ;
public class DAOValidatorHelper { /** * Methode permettant de verifier si un chemin contient des variables d ' environnement * @ param expressionChaine a controler * @ returnResultat de la verification */ public static boolean isExpressionContainsENV ( String expression ) { } }
// Si la chaine est vide : false if ( expression == null || expression . trim ( ) . length ( ) == 0 ) { // On retourne false return false ; } // On split return isExpressionContainPattern ( expression , ENV_CHAIN_PATTERN ) ;
public class DatastreamResource { /** * < p > Invoke API - A . getDatastreamDissemination ( context , pid , dsID , asOfDateTime ) * < / p > < p > * GET / objects / { pid } / datastreams / { dsID } / content ? asOfDateTime < / p > */ @ Path ( "/{dsID}/content" ) @ GET public Response getDatastream ( @ PathParam ( RestParam . PID ) String pid , @ PathParam ( RestParam . DSID ) String dsID , @ QueryParam ( RestParam . AS_OF_DATE_TIME ) String dateTime , @ QueryParam ( RestParam . DOWNLOAD ) String download , @ QueryParam ( RestParam . FLASH ) @ DefaultValue ( "false" ) boolean flash ) { } }
Context context = getContext ( ) ; try { Date asOfDateTime = DateUtility . parseDateOrNull ( dateTime ) ; MIMETypedStream stream = m_access . getDatastreamDissemination ( context , pid , dsID , asOfDateTime ) ; if ( m_datastreamFilenameHelper != null ) { m_datastreamFilenameHelper . addContentDispositionHeader ( context , pid , dsID , download , asOfDateTime , stream ) ; } return buildResponse ( stream ) ; } catch ( Exception ex ) { return handleException ( ex , flash ) ; }
public class MiniTemplatorParser { /** * If shortFormEnabled is true , the short form commands in the format " < $ . . . > " are also recognized . */ private void parseTemplateCommands ( ) throws MiniTemplator . TemplateSyntaxException { } }
int p = 0 ; // p is the current position within templateText while ( true ) { int p0 = templateText . indexOf ( cmdStartStr , p ) ; // p0 is the start of the current command boolean shortForm = false ; if ( shortFormEnabled && p0 != p ) { if ( p0 == - 1 ) { p0 = templateText . indexOf ( cmdStartStrShort , p ) ; shortForm = true ; } else { int p2 = templateText . substring ( p , p0 ) . indexOf ( cmdStartStrShort ) ; if ( p2 != - 1 ) { p0 = p + p2 ; shortForm = true ; } } } if ( p0 == - 1 ) { // no more commands break ; } conditionalExclude ( p , p0 ) ; // process text up to the start of the current command if ( shortForm ) { // short form command p = templateText . indexOf ( cmdEndStrShort , p0 + cmdStartStrShort . length ( ) ) ; if ( p == - 1 ) { // if no terminating " > " is found , we process it as normal text p = p0 + cmdStartStrShort . length ( ) ; conditionalExclude ( p0 , p ) ; continue ; } p += cmdEndStrShort . length ( ) ; String cmdLine = templateText . substring ( p0 + cmdStartStrShort . length ( ) , p - cmdEndStrShort . length ( ) ) ; if ( ! processShortFormTemplateCommand ( cmdLine , p0 , p ) ) { // If a short form command is not recognized , we process the whole command structure are normal // text . conditionalExclude ( p0 , p ) ; } } else { // normal ( long ) form command p = templateText . indexOf ( cmdEndStr , p0 + cmdStartStr . length ( ) ) ; if ( p == - 1 ) { throw new MiniTemplator . TemplateSyntaxException ( "Invalid HTML comment in template at offset " + p0 + "." ) ; } p += cmdEndStr . length ( ) ; String cmdLine = templateText . substring ( p0 + cmdStartStr . length ( ) , p - cmdEndStr . length ( ) ) ; resumeCmdParsingFromStart = false ; if ( ! processTemplateCommand ( cmdLine , p0 , p ) ) { conditionalExclude ( p0 , p ) ; } // process as normal temlate text if ( resumeCmdParsingFromStart ) { // ( if a subtemplate has been included ) p = p0 ; } } }
public class AWSBackupClient { /** * Backup plans are documents that contain information that AWS Backup uses to schedule tasks that create recovery * points of resources . * If you call < code > CreateBackupPlan < / code > with a plan that already exists , the existing < code > backupPlanId < / code > * is returned . * @ param createBackupPlanRequest * @ return Result of the CreateBackupPlan operation returned by the service . * @ throws LimitExceededException * A limit in the request has been exceeded ; for example , a maximum number of items allowed in a request . * @ throws AlreadyExistsException * The required resource already exists . * @ throws InvalidParameterValueException * Indicates that something is wrong with a parameter ' s value . For example , the value is out of range . * @ throws MissingParameterValueException * Indicates that a required parameter is missing . * @ throws ServiceUnavailableException * The request failed due to a temporary failure of the server . * @ sample AWSBackup . CreateBackupPlan * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / backup - 2018-11-15 / CreateBackupPlan " target = " _ top " > AWS API * Documentation < / a > */ @ Override public CreateBackupPlanResult createBackupPlan ( CreateBackupPlanRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateBackupPlan ( request ) ;
public class AvroUtils { /** * A helper method to extract avro serialization configurations from the topology configuration and register * specific kryo serializers as necessary . A default serializer will be provided if none is specified in the * configuration . " avro . serializer " should specify the complete class name of the serializer , e . g . * " org . apache . stgorm . hdfs . avro . GenericAvroSerializer " * @ param conf The topology configuration * @ throws ClassNotFoundException If the specified serializer cannot be located . */ public static void addAvroKryoSerializations ( Config conf ) throws ClassNotFoundException { } }
final Class serializerClass ; if ( conf . containsKey ( "avro.serializer" ) ) { serializerClass = Class . forName ( ( String ) conf . get ( "avro.serializer" ) ) ; } else { serializerClass = GenericAvroSerializer . class ; } conf . registerSerialization ( GenericData . Record . class , serializerClass ) ; conf . setSkipMissingKryoRegistrations ( false ) ;
public class DescribeRaidArraysResult { /** * A < code > RaidArrays < / code > object that describes the specified RAID arrays . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setRaidArrays ( java . util . Collection ) } or { @ link # withRaidArrays ( java . util . Collection ) } if you want to * override the existing values . * @ param raidArrays * A < code > RaidArrays < / code > object that describes the specified RAID arrays . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeRaidArraysResult withRaidArrays ( RaidArray ... raidArrays ) { } }
if ( this . raidArrays == null ) { setRaidArrays ( new com . amazonaws . internal . SdkInternalList < RaidArray > ( raidArrays . length ) ) ; } for ( RaidArray ele : raidArrays ) { this . raidArrays . add ( ele ) ; } return this ;
public class MultiUserChatLightManager { /** * Returns a collection with the XMPP addresses of the MUC Light services . * @ return a collection with the XMPP addresses of the MUC Light services . * @ throws XMPPErrorException * @ throws NoResponseException * @ throws NotConnectedException * @ throws InterruptedException */ public List < DomainBareJid > getLocalServices ( ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
ServiceDiscoveryManager sdm = ServiceDiscoveryManager . getInstanceFor ( connection ( ) ) ; return sdm . findServices ( MultiUserChatLight . NAMESPACE , false , false ) ;
public class EsaResourceImpl { /** * { @ inheritDoc } */ @ Override public Map < String , Collection < String > > getRequireFeatureWithTolerates ( ) { } }
// The feature may be an older feature which never had the tolerates information // stored , in which case , look in the older requireFeature field and massage // that info into the required format . // Or there may just not be any required features at all . Collection < RequireFeatureWithTolerates > rfwt = _asset . getWlpInformation ( ) . getRequireFeatureWithTolerates ( ) ; if ( rfwt != null ) { Map < String , Collection < String > > rv = new HashMap < String , Collection < String > > ( ) ; for ( RequireFeatureWithTolerates feature : rfwt ) { rv . put ( feature . getFeature ( ) , feature . getTolerates ( ) ) ; } return rv ; } // Newer field not present , check the older field Collection < String > rf = _asset . getWlpInformation ( ) . getRequireFeature ( ) ; if ( rf != null ) { Map < String , Collection < String > > rv = new HashMap < String , Collection < String > > ( ) ; for ( String feature : rf ) { rv . put ( feature , Collections . < String > emptyList ( ) ) ; } return rv ; } // No required features at all return null ;
public class JcrTools { /** * Execute the supplied operation on each node in the workspace accessible by the supplied session . * @ param session the session * @ param includeSystemNodes true if all nodes under " / jcr : system " should be included , or false if the system nodes should be * excluded * @ param operation the operation * @ throws Exception the exception thrown by the repository or the operation */ public void onEachNode ( Session session , boolean includeSystemNodes , NodeOperation operation ) throws Exception { } }
Node node = session . getRootNode ( ) ; operation . run ( node ) ; NodeIterator iter = node . getNodes ( ) ; while ( iter . hasNext ( ) ) { Node child = iter . nextNode ( ) ; if ( ! includeSystemNodes && child . getName ( ) . equals ( "jcr:system" ) ) continue ; operation . run ( child ) ; onEachNodeBelow ( child , operation ) ; }
public class GeometryEngine { /** * See OperatorDisjoint . */ public static boolean disjoint ( Geometry geometry1 , Geometry geometry2 , SpatialReference spatialReference ) { } }
OperatorDisjoint op = ( OperatorDisjoint ) factory . getOperator ( Operator . Type . Disjoint ) ; boolean result = op . execute ( geometry1 , geometry2 , spatialReference , null ) ; return result ;
public class NodeTypes { /** * Validates that the given property definition is valid under the ModeShape and JCR type rules within the given context . * ModeShape considers a property definition valid if it meets these criteria : * < ol > * < li > Residual properties cannot be mandatory < / li > * < li > If the property is auto - created , it must specify a default value < / li > * < li > If the property is single - valued , it can only specify a single default value < / li > * < li > If the property overrides an existing property definition from a supertype , the new definition must be mandatory if the * old definition was mandatory < / li > * < li > The property cannot override an existing property definition from a supertype if the ancestor definition is protected < / li > * < li > If the property overrides an existing property definition from a supertype , the new definition must have the same * required type as the old definition or a required type that can ALWAYS be cast to the required type of the ancestor ( see * section 3.6.4 of the JCR 2.0 specification ) < / li > * < / ol > * Note that an empty set of properties would meet the criteria above . * @ param propertyDefinition the property definition to be validated * @ param supertypes the names of the supertypes of the node type to which this property belongs * @ param pendingTypes the list of types previously registered in this batch but not yet committed to the repository * @ throws RepositoryException if the property definition is not valid */ private void validatePropertyDefinition ( JcrPropertyDefinition propertyDefinition , List < Name > supertypes , List < JcrNodeType > pendingTypes ) throws RepositoryException { } }
assert propertyDefinition != null ; assert supertypes != null ; assert pendingTypes != null ; boolean residual = JcrNodeType . RESIDUAL_ITEM_NAME . equals ( propertyDefinition . getName ( ) ) ; if ( propertyDefinition . isMandatory ( ) && ! propertyDefinition . isProtected ( ) && residual ) { throw new InvalidNodeTypeDefinitionException ( JcrI18n . residualPropertyDefinitionsCannotBeMandatory . text ( propertyDefinition . getName ( ) ) ) ; } if ( propertyDefinition . isAutoCreated ( ) && residual ) { throw new InvalidNodeTypeDefinitionException ( JcrI18n . residualPropertyDefinitionsCannotBeAutoCreated . text ( propertyDefinition . getName ( ) ) ) ; } Value [ ] defaultValues = propertyDefinition . getDefaultValues ( ) ; if ( propertyDefinition . isAutoCreated ( ) && ! propertyDefinition . isProtected ( ) && ( defaultValues == null || defaultValues . length == 0 ) ) { throw new InvalidNodeTypeDefinitionException ( JcrI18n . autocreatedPropertyNeedsDefault . text ( propertyDefinition . getName ( ) , propertyDefinition . getDeclaringNodeType ( ) . getName ( ) ) ) ; } if ( ! propertyDefinition . isMultiple ( ) && ( defaultValues != null && defaultValues . length > 1 ) ) { throw new InvalidNodeTypeDefinitionException ( JcrI18n . singleValuedPropertyNeedsSingleValuedDefault . text ( propertyDefinition . getName ( ) , propertyDefinition . getDeclaringNodeType ( ) . getName ( ) ) ) ; } Name propName = context . getValueFactories ( ) . getNameFactory ( ) . create ( propertyDefinition . getName ( ) ) ; propName = propName == null ? JcrNodeType . RESIDUAL_NAME : propName ; List < JcrPropertyDefinition > propertyDefinitionsFromAncestors = findPropertyDefinitions ( supertypes , propName , propertyDefinition . isMultiple ( ) ? PropertyCardinality . MULTI_VALUED_ONLY : PropertyCardinality . SINGLE_VALUED_ONLY , pendingTypes ) ; for ( JcrPropertyDefinition propertyDefinitionFromAncestor : propertyDefinitionsFromAncestors ) { if ( propertyDefinitionFromAncestor . isProtected ( ) ) { throw new InvalidNodeTypeDefinitionException ( JcrI18n . cannotOverrideProtectedDefinition . text ( propertyDefinitionFromAncestor . getDeclaringNodeType ( ) . getName ( ) , "property" ) ) ; } if ( propertyDefinitionFromAncestor . isMandatory ( ) && ! propertyDefinition . isMandatory ( ) ) { throw new InvalidNodeTypeDefinitionException ( JcrI18n . cannotMakeMandatoryDefinitionOptional . text ( propertyDefinitionFromAncestor . getDeclaringNodeType ( ) . getName ( ) , "property" ) ) ; } if ( ! propertyDefinition . isAsOrMoreConstrainedThan ( propertyDefinitionFromAncestor , context ) ) { throw new InvalidNodeTypeDefinitionException ( JcrI18n . constraintsChangedInSubtype . text ( propName , propertyDefinitionFromAncestor . getDeclaringNodeType ( ) . getName ( ) ) ) ; } if ( ! isAlwaysSafeConversion ( propertyDefinition . getRequiredType ( ) , propertyDefinitionFromAncestor . getRequiredType ( ) ) ) { throw new InvalidNodeTypeDefinitionException ( JcrI18n . cannotRedefineProperty . text ( propName , org . modeshape . jcr . api . PropertyType . nameFromValue ( propertyDefinition . getRequiredType ( ) ) , propertyDefinitionFromAncestor . getDeclaringNodeType ( ) . getName ( ) , org . modeshape . jcr . api . PropertyType . nameFromValue ( propertyDefinitionFromAncestor . getRequiredType ( ) ) ) ) ; } }
public class JsonPathUtils { /** * Evaluate JsonPath expression on given payload string and return result as string . * @ param payload * @ param jsonPathExpression * @ return */ public static String evaluateAsString ( String payload , String jsonPathExpression ) { } }
try { JSONParser parser = new JSONParser ( JSONParser . MODE_JSON_SIMPLE ) ; Object receivedJson = parser . parse ( payload ) ; ReadContext readerContext = JsonPath . parse ( receivedJson ) ; return evaluateAsString ( readerContext , jsonPathExpression ) ; } catch ( ParseException e ) { throw new CitrusRuntimeException ( "Failed to parse JSON text" , e ) ; }
public class StandaloneConfiguration { /** * copy another configuration ' s values into this one if they are set . */ public void merge ( StandaloneConfiguration other ) { } }
if ( other == null ) { return ; } if ( isMergeAble ( Integer . class , other . browserTimeout , browserTimeout ) ) { browserTimeout = other . browserTimeout ; } if ( isMergeAble ( Integer . class , other . jettyMaxThreads , jettyMaxThreads ) ) { jettyMaxThreads = other . jettyMaxThreads ; } if ( isMergeAble ( Integer . class , other . timeout , timeout ) ) { timeout = other . timeout ; } // role , host , port , log , debug , version , enablePassThrough , and help are not merged , // they are only consumed by the immediately running process and should never affect a remote
public class RendererBuilder { /** * Configure prototypes used as Renderer instances . * @ param prototypes to use by the builder in order to create Renderer instances . */ public final void setPrototypes ( Collection < ? extends Renderer < ? extends T > > prototypes ) { } }
if ( prototypes == null ) { throw new NeedsPrototypesException ( "RendererBuilder has to be created with a non null collection of" + "Collection<Renderer<T> to provide new or recycled Renderer instances" ) ; } this . prototypes = new LinkedList < > ( prototypes ) ;
public class RaftSessionRegistry { /** * Removes all sessions registered for the given service . * @ param primitiveId the service identifier */ public void removeSessions ( PrimitiveId primitiveId ) { } }
sessions . entrySet ( ) . removeIf ( e -> e . getValue ( ) . getService ( ) . serviceId ( ) . equals ( primitiveId ) ) ;
public class FunctionKeyReader { /** * Creates new extractor capable of extracting a saga instance key from a message . */ public static < MESSAGE > FunctionKeyReader < MESSAGE , String > create ( final Class < MESSAGE > messageClazz , final KeyReadFunction < MESSAGE > readFunction ) { } }
return new FunctionKeyReader < > ( messageClazz , ExtractFunctionReader . encapsulate ( readFunction ) ) ;
public class SshX509RsaPublicKey { /** * Encode the public key into a blob of binary data , the encoded result will * be passed into init to recreate the key . * @ return an encoded byte array * @ throws SshException * @ todo Implement this com . sshtools . ssh . SshPublicKey method */ public byte [ ] getEncoded ( ) throws SshException { } }
try { return cert . getEncoded ( ) ; } catch ( Throwable ex ) { throw new SshException ( "Failed to encoded key data" , SshException . INTERNAL_ERROR , ex ) ; }
public class Polygon { /** * Merge equals points . * Creates constraints and populates the context with points . */ public void prepareTriangulation ( TriangulationContext < ? > tcx ) { } }
int hint = _points . size ( ) ; if ( _steinerPoints != null ) { hint += _steinerPoints . size ( ) ; } if ( _holes != null ) { for ( Polygon p : _holes ) { hint += p . pointCount ( ) ; } } HashMap < TriangulationPoint , TriangulationPoint > uniquePts = new HashMap < TriangulationPoint , TriangulationPoint > ( hint ) ; TriangulationPoint . mergeInstances ( uniquePts , _points ) ; if ( _steinerPoints != null ) { TriangulationPoint . mergeInstances ( uniquePts , _steinerPoints ) ; } if ( _holes != null ) { for ( Polygon p : _holes ) { TriangulationPoint . mergeInstances ( uniquePts , p . _points ) ; } } if ( m_triangles == null ) { m_triangles = new ArrayList < DelaunayTriangle > ( _points . size ( ) ) ; } else { m_triangles . clear ( ) ; } // Outer constraints for ( int i = 0 ; i < _points . size ( ) - 1 ; i ++ ) { tcx . newConstraint ( _points . get ( i ) , _points . get ( i + 1 ) ) ; } tcx . newConstraint ( _points . get ( 0 ) , _points . get ( _points . size ( ) - 1 ) ) ; // Hole constraints if ( _holes != null ) { for ( Polygon p : _holes ) { for ( int i = 0 ; i < p . _points . size ( ) - 1 ; i ++ ) { tcx . newConstraint ( p . _points . get ( i ) , p . _points . get ( i + 1 ) ) ; } tcx . newConstraint ( p . _points . get ( 0 ) , p . _points . get ( p . _points . size ( ) - 1 ) ) ; } } tcx . addPoints ( uniquePts . keySet ( ) ) ;
public class XSplitter { /** * Perform an minimum overlap split . The { @ link # chooseMinimumOverlapSplit } * calculates the partition for the split dimension determined by * { @ link # chooseSplitAxis } * < code > ( common split history , minFanout , maxEntries - minFanout + 1 ) < / code > * with the minimum overlap . This range may have been tested before ( by the * { @ link # topologicalSplit } ) , but for the minimum overlap test we need to * test that anew . Note that this method returns < code > null < / code > , if the * minimum overlap split has a volume which is larger than the allowed * < code > maxOverlap < / code > ratio or if the tree ' s minimum fanout is not larger * than the minimum directory size . * @ return distribution resulting from the minimum overlap split */ public SplitSorting minimumOverlapSplit ( ) { } }
if ( node . getEntry ( 0 ) instanceof LeafEntry ) { throw new IllegalArgumentException ( "The minimum overlap split will only be performed on directory nodes" ) ; } if ( node . getNumEntries ( ) < 2 ) { throw new IllegalArgumentException ( "Splitting less than two entries is pointless." ) ; } int maxEntries = tree . getDirCapacity ( ) - 1 ; int minFanout = tree . get_min_fanout ( ) ; if ( node . getNumEntries ( ) < maxEntries ) { throw new IllegalArgumentException ( "This entry list has not yet reached the maximum limit: " + node . getNumEntries ( ) + "<=" + maxEntries ) ; } assert ! ( node . getEntry ( 0 ) instanceof LeafEntry ) ; if ( minFanout >= tree . getDirMinimum ( ) ) { // minFanout not set for allowing underflowing nodes return null ; } IntIterator dimensionListing ; if ( node . getEntry ( 0 ) instanceof XTreeDirectoryEntry ) { // filter common split dimensions dimensionListing = getCommonSplitDimensions ( node ) ; if ( ! dimensionListing . hasNext ( ) ) { // no common dimensions return null ; } } else { // test all dimensions dimensionListing = new IntegerRangeIterator ( 0 , node . getEntry ( 0 ) . getDimensionality ( ) ) ; } int formerSplitAxis = this . splitAxis ; maxEntries = maxEntries + 1 - minFanout ; // = maximum left - hand size chooseSplitAxis ( dimensionListing , minFanout , maxEntries ) ; // find the best split point if ( formerSplitAxis == this . splitAxis && tree . getDirMinimum ( ) > minFanout ) { // remember : this follows an unsuccessful topological split // avoid duplicate computations of { minEntries , . . . , maxEntries } double minOverlap = pastOverlap ; // test { minFanout , . . . , minEntries - 1} SplitSorting ret1 = chooseMinimumOverlapSplit ( this . splitAxis , minFanout , tree . getDirMinimum ( ) - 1 , false ) ; if ( ret1 != null && pastOverlap < minOverlap ) { minOverlap = pastOverlap ; // this is a valid choice } // test { maxEntries - minEntries + 2 , . . . , maxEntries - minFanout + 1} SplitSorting ret2 = chooseMinimumOverlapSplit ( this . splitAxis , minFanout , tree . getDirMinimum ( ) - 1 , true ) ; if ( ret2 == null ) { // accept first range regardless of whether or not there is one pastOverlap = minOverlap ; return ret1 ; } if ( pastOverlap < minOverlap ) { // the second range is better return ret2 ; } pastOverlap = minOverlap ; // the first range is better return ret1 ; } else { return chooseMinimumOverlapSplit ( this . splitAxis , minFanout , maxEntries , false ) ; }
public class ClassFeatureSet { /** * Determine if given method overrides a superclass or superinterface * method . * @ param javaClass * class defining the method * @ param method * the method * @ return true if the method overrides a superclass / superinterface method , * false if not */ private boolean overridesSuperclassMethod ( JavaClass javaClass , Method method ) { } }
if ( method . isStatic ( ) ) { return false ; } try { JavaClass [ ] superclassList = javaClass . getSuperClasses ( ) ; if ( superclassList != null ) { JavaClassAndMethod match = Hierarchy . findMethod ( superclassList , method . getName ( ) , method . getSignature ( ) , Hierarchy . INSTANCE_METHOD ) ; if ( match != null ) { return true ; } } JavaClass [ ] interfaceList = javaClass . getAllInterfaces ( ) ; if ( interfaceList != null ) { JavaClassAndMethod match = Hierarchy . findMethod ( interfaceList , method . getName ( ) , method . getSignature ( ) , Hierarchy . INSTANCE_METHOD ) ; if ( match != null ) { return true ; } } return false ; } catch ( ClassNotFoundException e ) { return true ; }
public class DHistogram { /** * The initial histogram bins are setup from the Vec rollups . */ static public DHistogram [ ] initialHist ( Frame fr , int ncols , int nbins , DHistogram hs [ ] , int min_rows , boolean doGrpSplit , boolean isBinom ) { } }
Vec vecs [ ] = fr . vecs ( ) ; for ( int c = 0 ; c < ncols ; c ++ ) { Vec v = vecs [ c ] ; final float minIn = ( float ) Math . max ( v . min ( ) , - Float . MAX_VALUE ) ; // inclusive vector min final float maxIn = ( float ) Math . min ( v . max ( ) , Float . MAX_VALUE ) ; // inclusive vector max final float maxEx = find_maxEx ( maxIn , v . isInt ( ) ? 1 : 0 ) ; // smallest exclusive max final long vlen = v . length ( ) ; hs [ c ] = v . naCnt ( ) == vlen || v . min ( ) == v . max ( ) ? null : make ( fr . _names [ c ] , nbins , ( byte ) ( v . isEnum ( ) ? 2 : ( v . isInt ( ) ? 1 : 0 ) ) , minIn , maxEx , vlen , min_rows , doGrpSplit , isBinom ) ; } return hs ;
public class AnnualTimeZoneRule { /** * { @ inheritDoc } */ @ Override public boolean isEquivalentTo ( TimeZoneRule other ) { } }
if ( ! ( other instanceof AnnualTimeZoneRule ) ) { return false ; } AnnualTimeZoneRule otherRule = ( AnnualTimeZoneRule ) other ; if ( startYear == otherRule . startYear && endYear == otherRule . endYear && dateTimeRule . equals ( otherRule . dateTimeRule ) ) { return super . isEquivalentTo ( other ) ; } return false ;
public class CustomBuiltXML { /** * overrides the visitor to find String concatenations including xml strings * @ param seen * the opcode that is being visited */ @ Override public void sawOpcode ( int seen ) { } }
String strCon = null ; try { stack . precomputation ( this ) ; if ( seen == Const . INVOKESPECIAL ) { String clsName = getClassConstantOperand ( ) ; if ( SignatureUtils . isPlainStringConvertableClass ( clsName ) ) { String methodName = getNameConstantOperand ( ) ; String methodSig = getSigConstantOperand ( ) ; if ( Values . CONSTRUCTOR . equals ( methodName ) && XML_SIG_BUILDER . withReturnType ( "V" ) . toString ( ) . equals ( methodSig ) && ( stack . getStackDepth ( ) > 0 ) ) { OpcodeStack . Item itm = stack . getStackItem ( 0 ) ; strCon = ( String ) itm . getConstant ( ) ; } } } else if ( seen == Const . INVOKEVIRTUAL ) { String clsName = getClassConstantOperand ( ) ; if ( SignatureUtils . isPlainStringConvertableClass ( clsName ) ) { String methodName = getNameConstantOperand ( ) ; String methodSig = getSigConstantOperand ( ) ; if ( "append" . equals ( methodName ) && XML_SIG_BUILDER . withReturnType ( clsName ) . toString ( ) . equals ( methodSig ) && ( stack . getStackDepth ( ) > 0 ) ) { OpcodeStack . Item itm = stack . getStackItem ( 0 ) ; strCon = ( String ) itm . getConstant ( ) ; } } } if ( strCon != null ) { strCon = strCon . trim ( ) ; if ( strCon . length ( ) == 0 ) { return ; } for ( XMLPattern pattern : xmlPatterns ) { Matcher m = pattern . getPattern ( ) . matcher ( strCon ) ; if ( m . matches ( ) ) { xmlItemCount ++ ; if ( pattern . isConfident ( ) ) { xmlConfidentCount ++ ; } if ( ( firstPC < 0 ) && ( xmlConfidentCount > 0 ) ) { firstPC = getPC ( ) ; } break ; } } } } finally { stack . sawOpcode ( this , seen ) ; }
public class SingleFilterAdapter { /** * Adapt the untyped hbaseFilter instance into a RowFilter . * @ param context a { @ link com . google . cloud . bigtable . hbase . adapters . filters . FilterAdapterContext } object . * @ param hbaseFilter a { @ link org . apache . hadoop . hbase . filter . Filter } object . * @ return a { @ link com . google . bigtable . v2 . RowFilter } object . * @ throws java . io . IOException if any . */ public Filters . Filter adapt ( FilterAdapterContext context , Filter hbaseFilter ) throws IOException { } }
T typedFilter = getTypedFilter ( hbaseFilter ) ; return adapter . adapt ( context , typedFilter ) ;
public class Transformation2D { /** * Set this transformation to be a shift . * @ param x * The X coordinate to shift to . * @ param y * The Y coordinate to shift to . */ public void setShift ( double x , double y ) { } }
xx = 1 ; xy = 0 ; xd = x ; yx = 0 ; yy = 1 ; yd = y ;
public class JsonViewSupportFactoryBean { /** * Registering custom serializer allows to the JSonView to deal with custom serializations for certains field types . < br > * This way you could register for instance a JODA serialization as a DateTimeSerializer . < br > * Thus , when JSonView find a field of that type ( DateTime ) , it will delegate the serialization to the serializer specified . < br > * Example : < br > * < code > * JsonViewSupportFactoryBean bean = new JsonViewSupportFactoryBean ( mapper ) ; * bean . registerCustomSerializer ( DateTime . class , new DateTimeSerializer ( ) ) ; * < / code > * @ param < T > Type class of the serializer * @ param cls { @ link Class } the class type you want to add a custom serializer * @ param forType { @ link JsonSerializer } the serializer you want to apply for that type */ public < T > void registerCustomSerializer ( Class < T > cls , JsonSerializer < T > forType ) { } }
this . converter . registerCustomSerializer ( cls , forType ) ;
public class ForwardingTransformerHandlerBase { /** * LexicalHandler */ public void startDTD ( String name , String publicId , String systemId ) throws SAXException { } }
getLexicalTarget ( ) . startDTD ( name , publicId , systemId ) ;
public class AWSCognitoIdentityProviderClient { /** * Gets the header information for the . csv file to be used as input for the user import job . * @ param getCSVHeaderRequest * Represents the request to get the header information for the . csv file for the user import job . * @ return Result of the GetCSVHeader operation returned by the service . * @ throws ResourceNotFoundException * This exception is thrown when the Amazon Cognito service cannot find the requested resource . * @ throws InvalidParameterException * This exception is thrown when the Amazon Cognito service encounters an invalid parameter . * @ throws TooManyRequestsException * This exception is thrown when the user has made too many requests for a given operation . * @ throws NotAuthorizedException * This exception is thrown when a user is not authorized . * @ throws InternalErrorException * This exception is thrown when Amazon Cognito encounters an internal error . * @ sample AWSCognitoIdentityProvider . GetCSVHeader * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cognito - idp - 2016-04-18 / GetCSVHeader " target = " _ top " > AWS API * Documentation < / a > */ @ Override public GetCSVHeaderResult getCSVHeader ( GetCSVHeaderRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetCSVHeader ( request ) ;
public class PropertyChangeSupport { /** * Reports a bound property update to listeners * that have been registered to track updates of * all properties or a property with the specified name . * No event is fired if old and new values are equal and non - null . * This is merely a convenience wrapper around the more general * { @ link # firePropertyChange ( PropertyChangeEvent ) } method . * @ param propertyName the programmatic name of the property that was changed * @ param oldValue the old value of the property * @ param newValue the new value of the property */ public void firePropertyChange ( String propertyName , Object oldValue , Object newValue ) { } }
if ( oldValue == null || newValue == null || ! oldValue . equals ( newValue ) ) { firePropertyChange ( new PropertyChangeEvent ( this . source , propertyName , oldValue , newValue ) ) ; }
public class XMLConfigAdmin { /** * run update from cfml engine * @ throws PageException */ public void runUpdate ( Password password ) throws PageException { } }
checkWriteAccess ( ) ; ConfigServerImpl cs = ( ConfigServerImpl ) ConfigImpl . getConfigServer ( config , password ) ; CFMLEngineFactory factory = cs . getCFMLEngine ( ) . getCFMLEngineFactory ( ) ; synchronized ( factory ) { try { cleanUp ( factory ) ; factory . update ( cs . getPassword ( ) , cs . getIdentification ( ) ) ; } catch ( Exception e ) { throw Caster . toPageException ( e ) ; } }
public class Task { /** * Update the job status record that shows this job has started . */ private void setTaskStart ( ) { } }
m_taskRecord . setProperty ( TaskRecord . PROP_EXECUTOR , m_hostID ) ; m_taskRecord . setProperty ( TaskRecord . PROP_START_TIME , Long . toString ( System . currentTimeMillis ( ) ) ) ; m_taskRecord . setProperty ( TaskRecord . PROP_FINISH_TIME , null ) ; m_taskRecord . setProperty ( TaskRecord . PROP_PROGRESS , null ) ; m_taskRecord . setProperty ( TaskRecord . PROP_PROGRESS_TIME , null ) ; m_taskRecord . setProperty ( TaskRecord . PROP_FAIL_REASON , null ) ; m_taskRecord . setStatus ( TaskStatus . IN_PROGRESS ) ; TaskManagerService . instance ( ) . updateTaskStatus ( m_tenant , m_taskRecord , false ) ;
public class GlobalMercator { /** * Returns bounds of the given tile in EPSG : 900913 coordinates * @ param tx * @ param ty * @ param zoom * @ return the array of [ w , s , e , n ] */ public double [ ] TileBounds ( int tx , int ty , int zoom ) { } }
double [ ] min = PixelsToMeters ( tx * tileSize , ty * tileSize , zoom ) ; double minx = min [ 0 ] , miny = min [ 1 ] ; double [ ] max = PixelsToMeters ( ( tx + 1 ) * tileSize , ( ty + 1 ) * tileSize , zoom ) ; double maxx = max [ 0 ] , maxy = max [ 1 ] ; return new double [ ] { minx , miny , maxx , maxy } ;
public class Scanner { /** * Sets this scanner ' s locale to the specified locale . * < p > A scanner ' s locale affects many elements of its default * primitive matching regular expressions ; see * < a href = " # localized - numbers " > localized numbers < / a > above . * < p > Invoking the { @ link # reset } method will set the scanner ' s locale to * the < a href = " # initial - locale " > initial locale < / a > . * @ param locale A string specifying the locale to use * @ return this scanner */ public Scanner useLocale ( Locale locale ) { } }
if ( locale . equals ( this . locale ) ) return this ; this . locale = locale ; DecimalFormat df = ( DecimalFormat ) NumberFormat . getNumberInstance ( locale ) ; DecimalFormatSymbols dfs = DecimalFormatSymbols . getInstance ( locale ) ; // These must be literalized to avoid collision with regex // metacharacters such as dot or parenthesis groupSeparator = "\\" + dfs . getGroupingSeparator ( ) ; decimalSeparator = "\\" + dfs . getDecimalSeparator ( ) ; // Quoting the nonzero length locale - specific things // to avoid potential conflict with metacharacters nanString = "\\Q" + dfs . getNaN ( ) + "\\E" ; infinityString = "\\Q" + dfs . getInfinity ( ) + "\\E" ; positivePrefix = df . getPositivePrefix ( ) ; if ( positivePrefix . length ( ) > 0 ) positivePrefix = "\\Q" + positivePrefix + "\\E" ; negativePrefix = df . getNegativePrefix ( ) ; if ( negativePrefix . length ( ) > 0 ) negativePrefix = "\\Q" + negativePrefix + "\\E" ; positiveSuffix = df . getPositiveSuffix ( ) ; if ( positiveSuffix . length ( ) > 0 ) positiveSuffix = "\\Q" + positiveSuffix + "\\E" ; negativeSuffix = df . getNegativeSuffix ( ) ; if ( negativeSuffix . length ( ) > 0 ) negativeSuffix = "\\Q" + negativeSuffix + "\\E" ; // Force rebuilding and recompilation of locale dependent // primitive patterns integerPattern = null ; floatPattern = null ; return this ;
public class BsnUtil { /** * Generates random number that could be a BSN . * Based on : http : / / www . testnummers . nl / bsn . js * @ return random BSN . */ public String generateBsn ( ) { } }
String Result1 = "" ; int Nr9 = randomUtil . random ( 3 ) ; int Nr8 = randomUtil . random ( 10 ) ; int Nr7 = randomUtil . random ( 10 ) ; int Nr6 = randomUtil . random ( 10 ) ; int Nr5 = randomUtil . random ( 10 ) ; int Nr4 = randomUtil . random ( 10 ) ; int Nr3 = randomUtil . random ( 10 ) ; int Nr2 = randomUtil . random ( 10 ) ; int Nr1 = 0 ; int SofiNr = 0 ; if ( ( Nr9 == 0 ) && ( Nr8 == 0 ) && ( Nr7 == 0 ) ) { Nr8 = 1 ; } SofiNr = 9 * Nr9 + 8 * Nr8 + 7 * Nr7 + 6 * Nr6 + 5 * Nr5 + 4 * Nr4 + 3 * Nr3 + 2 * Nr2 ; Nr1 = floor ( SofiNr - ( floor ( SofiNr / 11 ) ) * 11 ) ; if ( Nr1 > 9 ) { if ( Nr2 > 0 ) { Nr2 -= 1 ; Nr1 = 8 ; } else { Nr2 += 1 ; Nr1 = 1 ; } } Result1 += Nr9 ; Result1 += Nr8 ; Result1 += Nr7 ; Result1 += Nr6 ; Result1 += Nr5 ; Result1 += Nr4 ; Result1 += Nr3 ; Result1 += Nr2 ; Result1 += Nr1 ; return Result1 ;
public class ShareActionProvider { /** * Sets an intent with information about the share action . Here is a * sample for constructing a share intent : * < pre > * < code > * Intent shareIntent = new Intent ( Intent . ACTION _ SEND ) ; * shareIntent . setType ( " image / * " ) ; * Uri uri = Uri . fromFile ( new File ( getFilesDir ( ) , " foo . jpg " ) ) ; * shareIntent . putExtra ( Intent . EXTRA _ STREAM , uri . toString ( ) ) ; * < / pre > * < / code > * @ param shareIntent The share intent . * @ see Intent # ACTION _ SEND * @ see Intent # ACTION _ SEND _ MULTIPLE */ public void setShareIntent ( Intent shareIntent ) { } }
ActivityChooserModel dataModel = ActivityChooserModel . get ( mContext , mShareHistoryFileName ) ; dataModel . setIntent ( shareIntent ) ;
public class MPIO { /** * Send a control message to a list of MEs * @ param jsMsg The message to be sent * @ param priority The priority at which to send it * @ param fromTo The MEs to send it to */ public void sendDownTree ( SIBUuid8 [ ] targets , int priority , AbstractMessage cMsg ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "sendDownTree" , new Object [ ] { this , cMsg , Integer . valueOf ( priority ) , targets } ) ; // Select a set of connections , then annotate the message . int length = targets . length ; // the unique list of connections MPConnection [ ] send = new MPConnection [ length ] ; int [ ] cCount = new int [ length ] ; int numSendConnections = 0 ; next : for ( int i = 0 ; i < length ; i ++ ) { SIBUuid8 targetMEUuid = targets [ i ] ; MPConnection firstChoice = findMPConnection ( targetMEUuid ) ; // Minimal comms trace of the message we ' re trying to send if ( TraceComponent . isAnyTracingEnabled ( ) ) { MECommsTrc . traceMessage ( tc , _messageProcessor , cMsg . getGuaranteedTargetMessagingEngineUUID ( ) , MECommsTrc . OP_SEND , firstChoice , cMsg ) ; } if ( firstChoice != null ) { // Keep track of the set of unique connections for sending below int j = 0 ; // loop through send until we find the next unused slot for ( j = 0 ; ( j < i ) && ( send [ j ] != null ) ; j ++ ) { // if we have seen the selected connection before , start again if ( send [ j ] . equals ( firstChoice ) ) { cCount [ j ] ++ ; continue next ; } } if ( j + 1 > numSendConnections ) numSendConnections = ( j + 1 ) ; // store the select connection in the chosen send slot send [ j ] = firstChoice ; cCount [ j ] ++ ; } } for ( int i = 0 ; i < numSendConnections ; i ++ ) { if ( send [ i ] != null ) send [ i ] . send ( cMsg , priority ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "sendDownTree" ) ;
public class PluginManager { /** * Performs the installation of the plugins . * @ param plugins The collection of plugins to install . * @ param dynamicLoad If true , the plugin will be dynamically loaded into this Jenkins . If false , * the plugin will only take effect after the reboot . * See { @ link UpdateCenter # isRestartRequiredForCompletion ( ) } * @ return The install job list . * @ since 2.0 */ @ Restricted ( NoExternalUse . class ) public List < Future < UpdateCenter . UpdateCenterJob > > install ( @ Nonnull Collection < String > plugins , boolean dynamicLoad ) { } }
return install ( plugins , dynamicLoad , null ) ;
public class IabHelper { /** * Performs Iab setup with the mService object which must * be properly connected before calling this method . */ private void startSetupIabAsync ( final String packageName , final OnIabSetupFinishedListener listener ) { } }
new Thread ( new Runnable ( ) { @ Override public void run ( ) { try { Logger . d ( "Checking for in-app billing 3 support." ) ; // check for in - app billing v3 support int response = mService . isBillingSupported ( 3 , packageName , ITEM_TYPE_INAPP ) ; if ( response != BILLING_RESPONSE_RESULT_OK ) { if ( listener != null ) listener . onIabSetupFinished ( new IabResult ( response , "Error checking for billing v3 support." ) ) ; // if in - app purchases aren ' t supported , neither are subscriptions . mSubscriptionsSupported = false ; return ; } Logger . d ( "In-app billing version 3 supported for " , packageName ) ; // check for v3 subscriptions support response = mService . isBillingSupported ( 3 , packageName , ITEM_TYPE_SUBS ) ; if ( response == BILLING_RESPONSE_RESULT_OK ) { Logger . d ( "Subscriptions AVAILABLE." ) ; mSubscriptionsSupported = true ; } else { Logger . d ( "Subscriptions NOT AVAILABLE. Response: " , response ) ; } mSetupDone = true ; } catch ( RemoteException e ) { if ( listener != null ) { listener . onIabSetupFinished ( new IabResult ( IABHELPER_REMOTE_EXCEPTION , "RemoteException while setting up in-app billing." ) ) ; } Logger . e ( "RemoteException while setting up in-app billing" , e ) ; return ; } if ( listener != null ) { listener . onIabSetupFinished ( new IabResult ( BILLING_RESPONSE_RESULT_OK , "Setup successful." ) ) ; Logger . d ( "Setup successful." ) ; } } } ) . start ( ) ;
public class ObjectSpace { /** * Causes this ObjectSpace to stop listening to the connections for method invocation messages . */ public void close ( ) { } }
Connection [ ] connections = this . connections ; for ( int i = 0 ; i < connections . length ; i ++ ) connections [ i ] . removeListener ( invokeListener ) ; synchronized ( instancesLock ) { ArrayList < ObjectSpace > temp = new ArrayList ( Arrays . asList ( instances ) ) ; temp . remove ( this ) ; instances = temp . toArray ( new ObjectSpace [ temp . size ( ) ] ) ; } if ( TRACE ) trace ( "kryonet" , "Closed ObjectSpace." ) ;
public class LocLogger { /** * Log a localized message at the INFO level . * @ param key * the key used for localization * @ param args * optional arguments */ public void info ( Enum < ? > key , Object ... args ) { } }
if ( ! logger . isInfoEnabled ( ) ) { return ; } String translatedMsg = imc . getMessage ( key , args ) ; MessageParameterObj mpo = new MessageParameterObj ( key , args ) ; if ( instanceofLAL ) { ( ( LocationAwareLogger ) logger ) . log ( LOCALIZED , FQCN , LocationAwareLogger . INFO_INT , translatedMsg , args , null ) ; } else { logger . info ( LOCALIZED , translatedMsg , mpo ) ; }
public class DockerAndScriptUtils { /** * Builds a map with the variables defined by this class . * @ param instance a non - null instance * @ return a non - null map where all the properties here are mapped to their values for this instance */ public static Map < String , String > buildReferenceMap ( Instance instance ) { } }
Map < String , String > result = new HashMap < > ( ) ; String instancePath = InstanceHelpers . computeInstancePath ( instance ) ; result . put ( ROBOCONF_INSTANCE_NAME , instance . getName ( ) ) ; result . put ( ROBOCONF_INSTANCE_PATH , instancePath ) ; result . put ( ROBOCONF_COMPONENT_NAME , instance . getComponent ( ) . getName ( ) ) ; result . put ( ROBOCONF_CLEAN_INSTANCE_PATH , cleanInstancePath ( instancePath ) ) ; result . put ( ROBOCONF_CLEAN_REVERSED_INSTANCE_PATH , cleanReversedInstancePath ( instancePath ) ) ; return result ;
public class IsotopePatternManipulator { /** * Return the isotope pattern sorted by mass * to the highest abundance . * @ param isotopeP The IsotopePattern object to sort * @ return The IsotopePattern sorted */ public static IsotopePattern sortByMass ( IsotopePattern isotopeP ) { } }
try { IsotopePattern isoSort = ( IsotopePattern ) isotopeP . clone ( ) ; // Do nothing for empty isotope pattern if ( isoSort . getNumberOfIsotopes ( ) == 0 ) return isoSort ; // Sort the isotopes List < IsotopeContainer > listISO = isoSort . getIsotopes ( ) ; Collections . sort ( listISO , new Comparator < IsotopeContainer > ( ) { @ Override public int compare ( IsotopeContainer o1 , IsotopeContainer o2 ) { return Double . compare ( o1 . getMass ( ) , o2 . getMass ( ) ) ; } } ) ; // Set the monoisotopic peak to the one with lowest mass isoSort . setMonoIsotope ( listISO . get ( 0 ) ) ; return isoSort ; } catch ( CloneNotSupportedException e ) { e . printStackTrace ( ) ; } return null ;
public class Threshold { /** * Configures the activation method with the comparison operator and the * threshold . * @ param parameters is the comparison operator and threshold */ @ Override public void configure ( String parameters ) { } }
if ( parameters . isEmpty ( ) ) { return ; } List < String > values = Op . split ( parameters , " " , true ) ; final int required = 2 ; if ( values . size ( ) < required ) { throw new RuntimeException ( MessageFormat . format ( "[configuration error] activation {0} requires {1} parameters" , this . getClass ( ) . getSimpleName ( ) , required ) ) ; } setComparison ( Comparison . fromOperator ( values . get ( 0 ) ) ) ; setValue ( Op . toDouble ( values . get ( 1 ) ) ) ;
public class AbstractExtendedSet { /** * { @ inheritDoc } */ @ Override public int powerSetSize ( int min , int max ) { } }
if ( min < 1 || max < min ) throw new IllegalArgumentException ( ) ; final int size = size ( ) ; // special cases if ( size < min ) return 0 ; if ( size == min ) return 1 ; /* * Compute the sum of binomial coefficients ranging from ( size choose * max ) to ( size choose min ) using dynamic programming */ // trivial cases max = Math . min ( size , max ) ; if ( max == min && ( max == 0 || max == size ) ) return 1 ; // compute all binomial coefficients for " n " int [ ] b = new int [ size + 1 ] ; for ( int i = 0 ; i <= size ; i ++ ) b [ i ] = 1 ; for ( int i = 1 ; i <= size ; i ++ ) for ( int j = i - 1 ; j > 0 ; j -- ) b [ j ] += b [ j - 1 ] ; // sum binomial coefficients int res = 0 ; for ( int i = min ; i <= max ; i ++ ) res += b [ i ] ; return res ;
public class TrConfigurator { /** * Initialize Tr ( and underlying Tr service ) . */ public static synchronized void init ( LogProviderConfig config ) { } }
if ( config == null ) throw new NullPointerException ( "LogProviderConfig must not be null" ) ; if ( loggingConfig . get ( ) . compareAndSet ( null , config ) ) { // Only initialize Tr once - - all subsequent changes go through update // The synchronization of this method is gratuitous ( just makes us feel better ) , // it is called while the system is single threaded at startup . // config . getTrDelegate ( ) must not return null - - it should either // return a dummy / disabled delegate , or throw an exception so that startup // does not proceed . final TrService tr = config . getTrDelegate ( ) ; if ( tr == null ) throw new NullPointerException ( "LogProviderConfig must provide a TrService delegate" ) ; Callable < TrService > result = new Callable < TrService > ( ) { @ Override public TrService call ( ) throws Exception { return tr ; } } ; delegate = StaticValue . mutateStaticValue ( delegate , result ) ; delegate . get ( ) . init ( config ) ; // Validate and propagate the initial trace specification setTraceSpec ( config . getTraceString ( ) ) ; // Set class that we want the LogManager to instantiate when someone // calls getLogger : This logger interacts only with the Tr / FFDC API : // it is not dependent on the implementation of the underlying delegate WsLogManager . setWsLogger ( com . ibm . ws . logging . internal . WsLogger . class ) ; }
public class ProtoUtils { /** * Proto3 enums fields can accept and return unknown values via the get < Field > Value ( ) methods , we * use those methods instead of the methods that deal with the enum constants in order to support * unknown enum values . If we didn ' t , any field with an unknown enum value would throw an * exception when we call { @ code getNumber ( ) } on the enum . * < p > For comparison , in proto2 unknown values always get mapped to 0 , so this problem doesn ' t * exist . Also , in proto2 , the ' Value ' functions don ' t exist , so we can ' t use them . */ private static boolean isProto3EnumField ( FieldDescriptor descriptor ) { } }
return descriptor . getType ( ) == Descriptors . FieldDescriptor . Type . ENUM && descriptor . getFile ( ) . getSyntax ( ) == Syntax . PROTO3 ;
public class ViewStateReader { /** * Entry point for processing saved view state . * @ param file project file * @ param varData view state var data * @ param fixedData view state fixed data * @ throws IOException */ public void process ( ProjectFile file , Var2Data varData , byte [ ] fixedData ) throws IOException { } }
Props props = getProps ( varData ) ; // System . out . println ( props ) ; if ( props != null ) { String viewName = MPPUtility . removeAmpersands ( props . getUnicodeString ( VIEW_NAME ) ) ; byte [ ] listData = props . getByteArray ( VIEW_CONTENTS ) ; List < Integer > uniqueIdList = new LinkedList < Integer > ( ) ; if ( listData != null ) { for ( int index = 0 ; index < listData . length ; index += 4 ) { Integer uniqueID = Integer . valueOf ( MPPUtility . getInt ( listData , index ) ) ; // Ensure that we have a valid task , and that if we have and // ID of zero , this is the first task shown . if ( file . getTaskByUniqueID ( uniqueID ) != null && ( uniqueID . intValue ( ) != 0 || index == 0 ) ) { uniqueIdList . add ( uniqueID ) ; } } } int filterID = MPPUtility . getShort ( fixedData , 128 ) ; ViewState state = new ViewState ( file , viewName , uniqueIdList , filterID ) ; file . getViews ( ) . setViewState ( state ) ; }
public class AbstractGenericHandler { /** * Publishes a response with the attached observable . * @ param response the response to publish . * @ param observable pushing into the event sink . */ protected void publishResponse ( final CouchbaseResponse response , final Subject < CouchbaseResponse , CouchbaseResponse > observable ) { } }
if ( response . status ( ) != ResponseStatus . RETRY && observable != null ) { if ( moveResponseOut ) { Scheduler scheduler = env ( ) . scheduler ( ) ; if ( scheduler instanceof CoreScheduler ) { scheduleDirect ( ( CoreScheduler ) scheduler , response , observable ) ; } else { scheduleWorker ( scheduler , response , observable ) ; } } else { completeResponse ( response , observable ) ; } } else { responseBuffer . publishEvent ( ResponseHandler . RESPONSE_TRANSLATOR , response , observable ) ; }
public class ModeledUserGroup { /** * Stores all restricted ( privileged ) attributes within the given Map , * pulling the values of those attributes from the underlying user group * model . If no value is yet defined for an attribute , that attribute will * be set to null . * @ param attributes * The Map to store all restricted attributes within . */ private void putRestrictedAttributes ( Map < String , String > attributes ) { } }
// Set disabled attribute attributes . put ( DISABLED_ATTRIBUTE_NAME , getModel ( ) . isDisabled ( ) ? "true" : null ) ;
public class JobStateToJsonConverter { /** * Write a list of { @ link JobState } s to json document . * @ param jsonWriter { @ link com . google . gson . stream . JsonWriter } * @ param jobStates list of { @ link JobState } s to write to json document * @ throws IOException */ private void writeJobStates ( JsonWriter jsonWriter , List < ? extends JobState > jobStates ) throws IOException { } }
jsonWriter . beginArray ( ) ; for ( JobState jobState : jobStates ) { writeJobState ( jsonWriter , jobState ) ; } jsonWriter . endArray ( ) ;
public class CPDefinitionVirtualSettingLocalServiceUtil { /** * Deletes the cp definition virtual setting with the primary key from the database . Also notifies the appropriate model listeners . * @ param CPDefinitionVirtualSettingId the primary key of the cp definition virtual setting * @ return the cp definition virtual setting that was removed * @ throws PortalException if a cp definition virtual setting with the primary key could not be found */ public static com . liferay . commerce . product . type . virtual . model . CPDefinitionVirtualSetting deleteCPDefinitionVirtualSetting ( long CPDefinitionVirtualSettingId ) throws com . liferay . portal . kernel . exception . PortalException { } }
return getService ( ) . deleteCPDefinitionVirtualSetting ( CPDefinitionVirtualSettingId ) ;
public class ConfidenceInterval { /** * Adapted from https : / / gist . github . com / gcardone / 5536578. * @ param alpha probability of incorrectly rejecting the null hypothesis ( 1 * - confidence _ level ) * @ param df degrees of freedom * @ param n number of observations * @ param std standard deviation * @ param mean mean * @ return array with the confidence interval : [ mean - margin of error , mean * + margin of error ] */ public static double [ ] getConfidenceInterval ( final double alpha , final int df , final int n , final double std , final double mean ) { } }
// Create T Distribution with df degrees of freedom TDistribution tDist = new TDistribution ( df ) ; // Calculate critical value double critVal = tDist . inverseCumulativeProbability ( 1.0 - alpha ) ; // Calculate confidence interval double ci = critVal * std / Math . sqrt ( n ) ; double lower = mean - ci ; double upper = mean + ci ; double [ ] interval = new double [ ] { lower , upper } ; return interval ;
public class HBCIUtils { /** * Gibt zu einer gegebenen Bankleitzahl zurück , welche HBCI - Version für DDV * bzw . RDH zu verwenden ist . Siehe auch { @ link # getPinTanVersionForBLZ ( String ) } . * @ param blz * @ return HBCI - Version * @ deprecated Bitte { @ link HBCIUtils # getBankInfo ( String ) } verwenden . */ public static String getHBCIVersionForBLZ ( String blz ) { } }
BankInfo info = getBankInfo ( blz ) ; if ( info == null ) return "" ; return info . getRdhVersion ( ) != null ? info . getRdhVersion ( ) . getId ( ) : "" ;
public class User { /** * Deletes this user from Hudson . */ @ RequirePOST public void doDoDelete ( StaplerRequest req , StaplerResponse rsp ) throws IOException { } }
checkPermission ( Jenkins . ADMINISTER ) ; if ( idStrategy ( ) . equals ( id , Jenkins . getAuthentication ( ) . getName ( ) ) ) { rsp . sendError ( HttpServletResponse . SC_BAD_REQUEST , "Cannot delete self" ) ; return ; } delete ( ) ; rsp . sendRedirect2 ( "../.." ) ;
public class CreateSSLCertificateTask { /** * { @ inheritDoc } */ @ Override public String getTaskHelp ( ) { } }
return getTaskHelp ( "sslCert.desc" , "sslCert.usage.options" , "sslCert.required-key." , "sslCert.required-desc." , "sslCert.option-key." , "sslCert.option-desc." , "sslCert.option.addon" , null , scriptName , DefaultSSLCertificateCreator . MINIMUM_PASSWORD_LENGTH , DefaultSSLCertificateCreator . DEFAULT_VALIDITY , DefaultSSLCertificateCreator . MINIMUM_VALIDITY , DefaultSSLCertificateCreator . ALIAS , DefaultSSLCertificateCreator . KEYALG_RSA_TYPE , DefaultSSLCertificateCreator . SIGALG , DefaultSSLCertificateCreator . DEFAULT_SIZE , DefaultSSLCertificateCreator . SIGALG ) ;
public class BitmapUtils { /** * Compress the bitmap to the byte array as the specified format and quality . * @ param bitmap to compress . * @ param format the format . * @ param quality the quality of the compressed bitmap . * @ return the compressed bitmap byte array . */ public static byte [ ] toByteArray ( Bitmap bitmap , Bitmap . CompressFormat format , int quality ) { } }
ByteArrayOutputStream out = null ; try { out = new ByteArrayOutputStream ( ) ; bitmap . compress ( format , quality , out ) ; return out . toByteArray ( ) ; } finally { CloseableUtils . close ( out ) ; }
public class MjdbcLogger { /** * Checks if SLF4j is available ( loaded ) in JVM * @ return true - if SLF4j is available for use */ public static boolean isSLF4jAvailable ( ) { } }
if ( SLF4jAvailable == null ) { try { Class . forName ( "org.slf4j.Logger" ) ; setSLF4jAvailable ( true ) ; } catch ( ClassNotFoundException e ) { setSLF4jAvailable ( false ) ; } } return SLF4jAvailable ;
public class ExpiryPolicyBuilder { /** * Set TTL since last update . * Note : Calling this method on a builder with an existing TTL since last access will override the previous value or function . * @ param update TTL since last update * @ return a new builder with the TTL since last update */ public ExpiryPolicyBuilder < K , V > update ( Duration update ) { } }
if ( update != null && update . isNegative ( ) ) { throw new IllegalArgumentException ( "Update duration must be positive" ) ; } return update ( ( a , b , c ) -> update ) ;
public class ListMath { /** * Performs a linear transformation on inverse value of each number in a list . * @ param data The list of numbers to divide the numerator by * @ param numerator The numerator for each division * @ param offset The additive constant * @ return result [ x ] = numerator / data [ x ] + offset */ public static ListDouble inverseRescale ( final ListNumber data , final double numerator , final double offset ) { } }
return new ListDouble ( ) { @ Override public double getDouble ( int index ) { return numerator / data . getDouble ( index ) + offset ; } @ Override public int size ( ) { return data . size ( ) ; } } ;
public class PrettySharedPreferences { /** * Call to commit changes . * @ see android . content . SharedPreferences . Editor # commit ( ) */ public boolean commit ( ) { } }
if ( editing == null ) { return false ; } final boolean result = editing . commit ( ) ; editing = null ; return result ;
public class MuxInputStream { /** * Gets the raw input stream . Clients will normally not call * this . */ protected InputStream getInputStream ( ) throws IOException { } }
if ( is == null && server != null ) is = server . readChannel ( channel ) ; return is ;
public class RtpChannel { /** * Sets the connection mode of the channel . < br > * Possible modes : send _ only , recv _ only , inactive , send _ recv , conference , network _ loopback . * @ param connectionMode the new connection mode adopted by the channel */ public void updateMode ( ConnectionMode connectionMode ) { } }
switch ( connectionMode ) { case SEND_ONLY : this . rtpHandler . setReceivable ( false ) ; this . rtpHandler . setLoopable ( false ) ; audioComponent . updateMode ( false , true ) ; oobComponent . updateMode ( false , true ) ; this . rtpHandler . deactivate ( ) ; this . transmitter . activate ( ) ; break ; case RECV_ONLY : this . rtpHandler . setReceivable ( true ) ; this . rtpHandler . setLoopable ( false ) ; audioComponent . updateMode ( true , false ) ; oobComponent . updateMode ( true , false ) ; this . rtpHandler . activate ( ) ; this . transmitter . deactivate ( ) ; break ; case INACTIVE : this . rtpHandler . setReceivable ( false ) ; this . rtpHandler . setLoopable ( false ) ; audioComponent . updateMode ( false , false ) ; oobComponent . updateMode ( false , false ) ; this . rtpHandler . deactivate ( ) ; this . transmitter . deactivate ( ) ; break ; case SEND_RECV : case CONFERENCE : this . rtpHandler . setReceivable ( true ) ; this . rtpHandler . setLoopable ( false ) ; audioComponent . updateMode ( true , true ) ; oobComponent . updateMode ( true , true ) ; this . rtpHandler . activate ( ) ; this . transmitter . activate ( ) ; break ; case NETWORK_LOOPBACK : this . rtpHandler . setReceivable ( false ) ; this . rtpHandler . setLoopable ( true ) ; audioComponent . updateMode ( false , false ) ; oobComponent . updateMode ( false , false ) ; this . rtpHandler . deactivate ( ) ; this . transmitter . deactivate ( ) ; break ; default : break ; } boolean connectImmediately = false ; if ( this . remotePeer != null ) { connectImmediately = udpManager . connectImmediately ( ( InetSocketAddress ) this . remotePeer ) ; } if ( udpManager . getRtpTimeout ( ) > 0 && this . remotePeer != null && ! connectImmediately ) { if ( this . rtpHandler . isReceivable ( ) ) { this . statistics . setLastHeartbeat ( scheduler . getClock ( ) . getTime ( ) ) ; scheduler . submitHeatbeat ( heartBeat ) ; } else { heartBeat . cancel ( ) ; } }
public class FlatFileUtils { /** * Removes the characters from the string . */ public static String remove ( String string , char c ) { } }
return string . replaceAll ( String . valueOf ( c ) , "" ) ;
public class SpeechToTextWebSocketListener { /** * Send input stream . * @ param inputStream the input stream */ private void sendInputStream ( InputStream inputStream ) { } }
byte [ ] buffer = new byte [ ONE_KB ] ; int read ; try { // This method uses a blocking while loop to receive all contents of the underlying input stream . // AudioInputStreams , typically used for streaming microphone inputs return 0 only when the stream has been // closed . Elsewise AudioInputStream . read ( ) blocks until enough audio frames are read . while ( ( ( read = inputStream . read ( buffer ) ) > 0 ) && socketOpen ) { // If OkHttp ' s WebSocket queue gets overwhelmed , it ' ll abruptly close the connection // ( see : https : / / github . com / square / okhttp / issues / 3317 ) . This will ensure we wait until the coast is clear . while ( socket . queueSize ( ) > QUEUE_SIZE_LIMIT ) { Thread . sleep ( QUEUE_WAIT_MILLIS ) ; } if ( read == ONE_KB ) { socket . send ( ByteString . of ( buffer ) ) ; } else { socket . send ( ByteString . of ( Arrays . copyOfRange ( buffer , 0 , read ) ) ) ; } } } catch ( IOException | InterruptedException e ) { LOG . log ( Level . SEVERE , e . getMessage ( ) , e ) ; } finally { try { inputStream . close ( ) ; } catch ( IOException e ) { // do nothing - the InputStream may have already been closed externally . } }
public class BaseXmlImporter { /** * Return new node index . * @ param parentData * @ param name * @ param skipIdentifier * @ return * @ throws PathNotFoundException * @ throws IllegalPathException * @ throws RepositoryException */ public int getNodeIndex ( NodeData parentData , InternalQName name , String skipIdentifier ) throws PathNotFoundException , IllegalPathException , RepositoryException { } }
if ( name instanceof QPathEntry ) { name = new InternalQName ( name . getNamespace ( ) , name . getName ( ) ) ; } int newIndex = 1 ; NodeDefinitionData nodedef = nodeTypeDataManager . getChildNodeDefinition ( name , parentData . getPrimaryTypeName ( ) , parentData . getMixinTypeNames ( ) ) ; List < ItemState > transientAddChilds = getItemStatesList ( parentData , name , ItemState . ADDED , skipIdentifier ) ; List < ItemState > transientDeletedChilds ; if ( nodedef . isAllowsSameNameSiblings ( ) ) { transientDeletedChilds = getItemStatesList ( parentData , name , ItemState . DELETED , null ) ; } else { transientDeletedChilds = getItemStatesList ( parentData , new QPathEntry ( name , 0 ) , ItemState . DELETED , null ) ; ItemData sameNameNode = null ; try { sameNameNode = dataConsumer . getItemData ( parentData , new QPathEntry ( name , 0 ) , ItemType . NODE , false ) ; } catch ( PathNotFoundException e ) { // Ok no same name node ; return newIndex ; } if ( ( ( sameNameNode != null ) || ( transientAddChilds . size ( ) > 0 ) ) ) { if ( ( sameNameNode != null ) && ( transientDeletedChilds . size ( ) < 1 ) ) { throw new ItemExistsException ( "The node already exists in " + sameNameNode . getQPath ( ) . getAsString ( ) + " and same name sibling is not allowed " ) ; } else if ( transientAddChilds . size ( ) > 0 ) { throw new ItemExistsException ( "The node already exists in add state " + " and same name sibling is not allowed " ) ; } } } newIndex += transientAddChilds . size ( ) ; List < NodeData > existedChilds = dataConsumer . getChildNodesData ( parentData ) ; // Calculate SNS index for dest root main : for ( int n = 0 , l = existedChilds . size ( ) ; n < l ; n ++ ) { NodeData child = existedChilds . get ( n ) ; if ( child . getQPath ( ) . getName ( ) . equals ( name ) ) { // skip deleted items if ( ! transientDeletedChilds . isEmpty ( ) ) { for ( int i = 0 , length = transientDeletedChilds . size ( ) ; i < length ; i ++ ) { ItemState state = transientDeletedChilds . get ( i ) ; if ( state . getData ( ) . equals ( child ) ) { transientDeletedChilds . remove ( i ) ; continue main ; } } } newIndex ++ ; // next sibling index } } // searching return newIndex ;
public class XMxmlSerializer { /** * ( non - Javadoc ) * @ see * org . deckfour . xes . out . XesSerializer # serialize ( org . deckfour . xes . model . XLog , * java . io . OutputStream ) */ public void serialize ( XLog log , OutputStream out ) throws IOException { } }
XLogging . log ( "start serializing log to MXML" , XLogging . Importance . DEBUG ) ; long start = System . currentTimeMillis ( ) ; SXDocument doc = new SXDocument ( out ) ; doc . addComment ( "This file has been generated with the OpenXES library. It conforms" ) ; doc . addComment ( "to the legacy MXML standard for log storage and management." ) ; doc . addComment ( "OpenXES library version: " + XRuntimeUtils . OPENXES_VERSION ) ; doc . addComment ( "OpenXES is available from http://www.xes-standard.org/" ) ; SXTag root = doc . addNode ( "WorkflowLog" ) ; SXTag source = root . addChildNode ( "Source" ) ; source . addAttribute ( "program" , "XES MXML serialization" ) ; source . addAttribute ( "openxes.version" , XRuntimeUtils . OPENXES_VERSION ) ; SXTag process = root . addChildNode ( "Process" ) ; String id = XConceptExtension . instance ( ) . extractName ( log ) ; process . addAttribute ( "id" , ( id == null ? "none" : id ) ) ; process . addAttribute ( "description" , "process with id " + XConceptExtension . instance ( ) . extractName ( log ) ) ; addModelReference ( log , process ) ; addAttributes ( process , log . getAttributes ( ) . values ( ) ) ; for ( XTrace trace : log ) { SXTag instance = process . addChildNode ( "ProcessInstance" ) ; instance . addAttribute ( "id" , XConceptExtension . instance ( ) . extractName ( trace ) ) ; instance . addAttribute ( "description" , "instance with id " + XConceptExtension . instance ( ) . extractName ( trace ) ) ; addModelReference ( trace , instance ) ; addAttributes ( instance , trace . getAttributes ( ) . values ( ) ) ; for ( XEvent event : trace ) { SXTag ate = instance . addChildNode ( "AuditTrailEntry" ) ; addAttributes ( ate , event . getAttributes ( ) . values ( ) ) ; SXTag wfme = ate . addChildNode ( "WorkflowModelElement" ) ; addModelReference ( event , wfme ) ; wfme . addTextNode ( XConceptExtension . instance ( ) . extractName ( event ) ) ; SXTag type = ate . addChildNode ( "EventType" ) ; XAttributeLiteral typeAttr = ( XAttributeLiteral ) event . getAttributes ( ) . get ( XLifecycleExtension . KEY_TRANSITION ) ; if ( typeAttr != null ) { addModelReference ( typeAttr , type ) ; String typeStr = typeAttr . getValue ( ) . trim ( ) . toLowerCase ( ) ; if ( knownTypes . contains ( typeStr ) ) { type . addTextNode ( typeStr ) ; } else { type . addAttribute ( "unknownType" , typeAttr . getValue ( ) ) ; type . addTextNode ( "unknown" ) ; } } else { type . addTextNode ( "complete" ) ; } XAttributeLiteral originatorAttr = ( XAttributeLiteral ) event . getAttributes ( ) . get ( XOrganizationalExtension . KEY_RESOURCE ) ; if ( originatorAttr == null ) { originatorAttr = ( XAttributeLiteral ) event . getAttributes ( ) . get ( XOrganizationalExtension . KEY_ROLE ) ; } if ( originatorAttr == null ) { originatorAttr = ( XAttributeLiteral ) event . getAttributes ( ) . get ( XOrganizationalExtension . KEY_GROUP ) ; } if ( originatorAttr != null ) { SXTag originator = ate . addChildNode ( "originator" ) ; addModelReference ( originatorAttr , originator ) ; originator . addTextNode ( originatorAttr . getValue ( ) ) ; } XAttributeTimestamp timestampAttr = ( XAttributeTimestamp ) event . getAttributes ( ) . get ( XTimeExtension . KEY_TIMESTAMP ) ; if ( timestampAttr != null ) { SXTag timestamp = ate . addChildNode ( "timestamp" ) ; addModelReference ( timestampAttr , timestamp ) ; Date date = timestampAttr . getValue ( ) ; timestamp . addTextNode ( xsDateTimeConversion . format ( date ) ) ; } } } doc . close ( ) ; String duration = " (" + ( System . currentTimeMillis ( ) - start ) + " msec.)" ; XLogging . log ( "finished serializing log" + duration , XLogging . Importance . DEBUG ) ;
public class AbstractAddStepHandler { /** * < strong > Deprecated < / strong > . Subclasses wishing for custom rollback behavior should instead override * { @ link # rollbackRuntime ( OperationContext , org . jboss . dmr . ModelNode , org . jboss . as . controller . registry . Resource ) } . * This default implementation does nothing . < strong > Subclasses that override this method should not call * { @ code super . performRuntime ( . . . ) } . < / strong > * @ param context the operation context * @ param operation the operation being executed * @ param model persistent configuration model node that corresponds to the address of { @ code operation } * @ param controllers will always be an empty list * @ deprecated instead override { @ link # rollbackRuntime ( OperationContext , org . jboss . dmr . ModelNode , org . jboss . as . controller . registry . Resource ) } */ @ Deprecated protected void rollbackRuntime ( OperationContext context , final ModelNode operation , final ModelNode model , List < ServiceController < ? > > controllers ) { } }
// no - op
public class AWSACMPCAWaiters { /** * Builds a CertificateAuthorityCSRCreated waiter by using custom parameters waiterParameters and other parameters * defined in the waiters specification , and then polls until it determines whether the resource entered the desired * state or not , where polling criteria is bound by either default polling strategy or custom polling strategy . */ public Waiter < GetCertificateAuthorityCsrRequest > certificateAuthorityCSRCreated ( ) { } }
return new WaiterBuilder < GetCertificateAuthorityCsrRequest , GetCertificateAuthorityCsrResult > ( ) . withSdkFunction ( new GetCertificateAuthorityCsrFunction ( client ) ) . withAcceptors ( new HttpSuccessStatusAcceptor ( WaiterState . SUCCESS ) , new CertificateAuthorityCSRCreated . IsRequestInProgressExceptionMatcher ( ) ) . withDefaultPollingStrategy ( new PollingStrategy ( new MaxAttemptsRetryStrategy ( 60 ) , new FixedDelayStrategy ( 3 ) ) ) . withExecutorService ( executorService ) . build ( ) ;
public class IntHashMap { /** * Returns a set view of the keys contained in this map . The set is * backed by the map , so changes to the map are reflected in the set , and * vice - versa . The set supports element removal , which removes the * corresponding mapping from this map , via the < tt > Iterator . remove < / tt > , * < tt > Set . remove < / tt > , < tt > removeAll < / tt > , < tt > retainAll < / tt > , and * < tt > clear < / tt > operations . It does not support the < tt > add < / tt > or * < tt > addAll < / tt > operations . * @ return a set view of the keys contained in this map . */ public Set keySet ( ) { } }
if ( keySet == null ) { keySet = new AbstractSet ( ) { public Iterator iterator ( ) { return new IntHashIterator ( KEYS ) ; } public int size ( ) { return count ; } public boolean contains ( Object o ) { return containsKey ( o ) ; } public boolean remove ( Object o ) { return IntHashMap . this . remove ( o ) != null ; } public void clear ( ) { IntHashMap . this . clear ( ) ; } } ; } return keySet ;
public class GrailsDomainBinder { /** * Creates and binds the discriminator property used in table - per - hierarchy inheritance to * discriminate between sub class instances * @ param table The table to bind onto * @ param entity The root class entity * @ param mappings The mappings instance */ protected void bindDiscriminatorProperty ( Table table , RootClass entity , InFlightMetadataCollector mappings ) { } }
Mapping m = getMapping ( entity . getMappedClass ( ) ) ; SimpleValue d = new SimpleValue ( metadataBuildingContext , table ) ; entity . setDiscriminator ( d ) ; DiscriminatorConfig discriminatorConfig = m != null ? m . getDiscriminator ( ) : null ; boolean hasDiscriminatorConfig = discriminatorConfig != null ; entity . setDiscriminatorValue ( hasDiscriminatorConfig ? discriminatorConfig . getValue ( ) : entity . getClassName ( ) ) ; if ( hasDiscriminatorConfig ) { if ( discriminatorConfig . getInsertable ( ) != null ) { entity . setDiscriminatorInsertable ( discriminatorConfig . getInsertable ( ) ) ; } Object type = discriminatorConfig . getType ( ) ; if ( type != null ) { if ( type instanceof Class ) { d . setTypeName ( ( ( Class ) type ) . getName ( ) ) ; } else { d . setTypeName ( type . toString ( ) ) ; } } } if ( hasDiscriminatorConfig && discriminatorConfig . getFormula ( ) != null ) { Formula formula = new Formula ( ) ; formula . setFormula ( discriminatorConfig . getFormula ( ) ) ; d . addFormula ( formula ) ; } else { bindSimpleValue ( STRING_TYPE , d , false , RootClass . DEFAULT_DISCRIMINATOR_COLUMN_NAME , mappings ) ; ColumnConfig cc = ! hasDiscriminatorConfig ? null : discriminatorConfig . getColumn ( ) ; if ( cc != null ) { Column c = ( Column ) d . getColumnIterator ( ) . next ( ) ; if ( cc . getName ( ) != null ) { c . setName ( cc . getName ( ) ) ; } bindColumnConfigToColumn ( null , c , cc ) ; } } entity . setPolymorphic ( true ) ;
public class SqlExecutor { /** * 执行非查询 SQL语句 * @ param sql 需要执行的非查询SQL语句对象 ( insert / update / delete ) * @ return 是否执行成功 * @ throws SQLException SQL执行异常 */ public boolean execute ( Sql sql ) throws SQLException { } }
long start = System . currentTimeMillis ( ) ; if ( sql . validate ( ) == false ) { return false ; } boolean result = false ; PreparedStatement stmt = null ; try { stmt = this . createStatment ( conn , sql ) ; stmt . execute ( ) ; result = true ; } catch ( SQLException e ) { throw e ; } finally { try { if ( stmt != null && stmt . isClosed ( ) == false ) { stmt . close ( ) ; } } catch ( SQLException e ) { logger . error ( e . getMessage ( ) , e ) ; } } logger . debug ( String . format ( "Execute %s used %d ms" , sql . getSql ( ) , System . currentTimeMillis ( ) - start ) ) ; return result ;
public class ControllableBaseInterceptor { /** * for Generic headache . * @ param annotations The array of annotation . ( NotNull , EmptyAllowed ) * @ return The list of annotation type . ( NotNull , EmptyAllowed ) */ protected List < Class < ? extends Annotation > > createAnnotationTypeList ( Class < ? > ... annotations ) { } }
final List < Class < ? extends Annotation > > annotationList = new ArrayList < Class < ? extends Annotation > > ( ) ; for ( Class < ? > annoType : annotations ) { @ SuppressWarnings ( "unchecked" ) final Class < ? extends Annotation > castType = ( Class < ? extends Annotation > ) annoType ; annotationList . add ( castType ) ; } return annotationList ;
public class Sign { /** * 用私钥对信息生成数字签名 * @ param data 加密数据 * @ return 签名 */ public byte [ ] sign ( byte [ ] data ) { } }
lock . lock ( ) ; try { signature . initSign ( this . privateKey ) ; signature . update ( data ) ; return signature . sign ( ) ; } catch ( Exception e ) { throw new CryptoException ( e ) ; } finally { lock . unlock ( ) ; }
public class Driver { /** * Creates a connection to specified | url | with given configuration | info | . * @ param url the JDBC URL * @ param info the properties for the new connection * @ return the configured connection * @ throws SQLException if fails to connect * @ see # connect ( java . lang . String , java . util . Properties ) */ public acolyte . jdbc . Connection connect ( final String url , final Property ... info ) throws SQLException { } }
return connect ( url , props ( info ) ) ;
public class LoggerWrapper { /** * Log a DOM node list at the FINER level * @ param msg The message to show with the list , or null if no message * needed * @ param nodeList * @ see NodeList */ public void logDomNodeList ( String msg , NodeList nodeList ) { } }
StackTraceElement caller = StackTraceUtils . getCallerStackTraceElement ( ) ; String toLog = ( msg != null ? msg + "\n" : "DOM nodelist:\n" ) ; for ( int i = 0 ; i < nodeList . getLength ( ) ; i ++ ) { toLog += domNodeDescription ( nodeList . item ( i ) , 0 ) + "\n" ; } if ( caller != null ) { logger . logp ( Level . FINER , caller . getClassName ( ) , caller . getMethodName ( ) + "():" + caller . getLineNumber ( ) , toLog ) ; } else { logger . logp ( Level . FINER , "(UnknownSourceClass)" , "(unknownSourceMethod)" , toLog ) ; }
public class ApnsPayloadBuilder { /** * < p > Sets a subtitle for the notification . Clears any previously - set localized subtitle key and arguments . < / p > * < p > By default , no subtitle is included . Requires iOS 10 or newer . < / p > * @ param alertSubtitle the subtitle for this push notification * @ return a reference to this payload builder * @ since 0.8.1 */ public ApnsPayloadBuilder setAlertSubtitle ( final String alertSubtitle ) { } }
this . alertSubtitle = alertSubtitle ; this . localizedAlertSubtitleKey = null ; this . localizedAlertSubtitleArguments = null ; return this ;
public class Peer { /** * Called each time the state of a call changes to determine the Peers * overall state . */ private void evaluateState ( ) { } }
synchronized ( this . callList ) { // Get the highest prioirty state from the set of calls . PeerState newState = PeerState . NOTSET ; for ( CallTracker call : this . callList ) { if ( call . getState ( ) . getPriority ( ) > newState . getPriority ( ) ) newState = call . getState ( ) ; } this . _state = newState ; }
public class BugPrioritySorter { /** * Sorts bug groups on severity first , then on bug pattern name . */ static int compareGroups ( BugGroup m1 , BugGroup m2 ) { } }
int result = m1 . compareTo ( m2 ) ; if ( result == 0 ) { return m1 . getShortDescription ( ) . compareToIgnoreCase ( m2 . getShortDescription ( ) ) ; } return result ;
public class PromptOptions { /** * Creates a new { @ link PromptOptions } . * @ param message * @ return */ public static final PromptOptions newOptions ( final String message ) { } }
PromptOptions options = JavaScriptObject . createObject ( ) . cast ( ) ; options . setMessage ( message ) ; options . setCallback ( PromptCallback . DEFAULT_PROMPT_CALLBACK ) ; return options ;
public class JMLambda { /** * Bi function if true optional . * @ param < T > the type parameter * @ param < U > the type parameter * @ param < R > the type parameter * @ param bool the bool * @ param target1 the target 1 * @ param target2 the target 2 * @ param biFunction the bi function * @ return the optional */ public static < T , U , R > Optional < R > biFunctionIfTrue ( boolean bool , T target1 , U target2 , BiFunction < T , U , R > biFunction ) { } }
return supplierIfTrue ( bool , ( ) -> biFunction . apply ( target1 , target2 ) ) ;
public class AmazonRoute53Client { /** * Creates a new version of an existing traffic policy . When you create a new version of a traffic policy , you * specify the ID of the traffic policy that you want to update and a JSON - formatted document that describes the new * version . You use traffic policies to create multiple DNS resource record sets for one domain name ( such as * example . com ) or one subdomain name ( such as www . example . com ) . You can create a maximum of 1000 versions of a * traffic policy . If you reach the limit and need to create another version , you ' ll need to start a new traffic * policy . * @ param createTrafficPolicyVersionRequest * A complex type that contains information about the traffic policy that you want to create a new version * for . * @ return Result of the CreateTrafficPolicyVersion operation returned by the service . * @ throws NoSuchTrafficPolicyException * No traffic policy exists with the specified ID . * @ throws InvalidInputException * The input is not valid . * @ throws TooManyTrafficPolicyVersionsForCurrentPolicyException * This traffic policy version can ' t be created because you ' ve reached the limit of 1000 on the number of * versions that you can create for the current traffic policy . < / p > * To create more traffic policy versions , you can use < a * href = " https : / / docs . aws . amazon . com / Route53 / latest / APIReference / API _ GetTrafficPolicy . html " * > GetTrafficPolicy < / a > to get the traffic policy document for a specified traffic policy version , and then * use < a href = " https : / / docs . aws . amazon . com / Route53 / latest / APIReference / API _ CreateTrafficPolicy . html " > * CreateTrafficPolicy < / a > to create a new traffic policy using the traffic policy document . * @ throws ConcurrentModificationException * Another user submitted a request to create , update , or delete the object at the same time that you did . * Retry the request . * @ throws InvalidTrafficPolicyDocumentException * The format of the traffic policy document that you specified in the < code > Document < / code > element is * invalid . * @ sample AmazonRoute53 . CreateTrafficPolicyVersion * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / route53-2013-04-01 / CreateTrafficPolicyVersion " * target = " _ top " > AWS API Documentation < / a > */ @ Override public CreateTrafficPolicyVersionResult createTrafficPolicyVersion ( CreateTrafficPolicyVersionRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateTrafficPolicyVersion ( request ) ;
public class SchemaUpdater { /** * Executes the given statement . */ private void execute ( String sql ) throws SQLException { } }
Statement statement = connection . createStatement ( ) ; try { statement . executeUpdate ( substitute ( sql ) ) ; } finally { DbUtils . closeQuietly ( statement ) ; }
public class NodeImpl { /** * { @ inheritDoc } */ public void checkPermission ( String actions ) throws AccessControlException , RepositoryException { } }
checkValid ( ) ; if ( ! session . getAccessManager ( ) . hasPermission ( getACL ( ) , actions , session . getUserState ( ) . getIdentity ( ) ) ) { throw new AccessControlException ( "Permission denied " + getPath ( ) + " : " + actions ) ; }
public class NodeEntryImpl { /** * Set the value for a specific attribute * @ param name attribute name * @ param value attribute value * @ return value */ public String setAttribute ( final String name , final String value ) { } }
if ( null != value ) { return getAttributes ( ) . put ( name , value ) ; } else { getAttributes ( ) . remove ( name ) ; return value ; }
public class AbstractCli { /** * Adds common options to { @ code parser } . * @ param parser */ private void initializeCommonOptions ( OptionParser parser ) { } }
helpOpt = parser . acceptsAll ( Arrays . asList ( "help" , "h" ) , "Print this help message." ) ; randomSeed = parser . accepts ( "randomSeed" , "Seed to use for generating random numbers. " + "Program execution may still be nondeterministic, if multithreading is used." ) . withRequiredArg ( ) . ofType ( Long . class ) . defaultsTo ( 0L ) ; noPrintOptions = parser . accepts ( "noPrintOptions" , "Don't print out the command-line options " + "passed in to this program or final runtime statistics." ) ; if ( opts . contains ( CommonOptions . STOCHASTIC_GRADIENT ) ) { sgdIterations = parser . accepts ( "iterations" , "Number of iterations (passes over the data) for stochastic gradient descent." ) . withRequiredArg ( ) . ofType ( Long . class ) . defaultsTo ( 10L ) ; sgdBatchSize = parser . accepts ( "batchSize" , "Minibatch size, i.e., the number of examples processed per gradient computation. If unspecified, defaults to using the entire data set (gradient descent)." ) . withRequiredArg ( ) . ofType ( Integer . class ) ; sgdInitialStep = parser . accepts ( "initialStepSize" , "Initial step size for stochastic gradient descent." ) . withRequiredArg ( ) . ofType ( Double . class ) . defaultsTo ( 1.0 ) ; sgdNoDecayStepSize = parser . accepts ( "noDecayStepSize" , "Don't use a 1/sqrt(t) step size decay during stochastic gradient descent." ) ; sgdNoReturnAveragedParameters = parser . accepts ( "noReturnAveragedParameters" , "Get the average of the parameter iterates of stochastic gradient descent." ) ; sgdL2Regularization = parser . accepts ( "l2Regularization" , "Regularization parameter for the L2 norm of the parameter vector." ) . withRequiredArg ( ) . ofType ( Double . class ) . defaultsTo ( 0.0 ) ; sgdRegularizationFrequency = parser . accepts ( "regularizationFrequency" , "Fraction of iterations on which to apply regularization. Must be between 0 and 1" ) . withRequiredArg ( ) . ofType ( Double . class ) . defaultsTo ( 1.0 ) ; sgdClipGradients = parser . accepts ( "clipGradients" , "Clip gradients to a max l2 norm of the given value." ) . withRequiredArg ( ) . ofType ( Double . class ) . defaultsTo ( Double . MAX_VALUE ) ; sgdAdagrad = parser . accepts ( "adagrad" , "Use the adagrad algorithm for stochastic gradient descent." ) ; } if ( opts . contains ( CommonOptions . LBFGS ) ) { lbfgs = parser . accepts ( "lbfgs" ) ; lbfgsIterations = parser . accepts ( "lbfgsIterations" , "Maximum number of iterations (passes over the data) for LBFGS." ) . withRequiredArg ( ) . ofType ( Integer . class ) . defaultsTo ( 100 ) ; lbfgsHessianRank = parser . accepts ( "lbfgsHessianRank" , "Rank (number of vectors) of LBFGS's inverse Hessian approximation." ) . withRequiredArg ( ) . ofType ( Integer . class ) . defaultsTo ( 30 ) ; lbfgsL2Regularization = parser . accepts ( "lbfgsL2Regularization" , "L2 regularization imposed by LBFGS" ) . withRequiredArg ( ) . ofType ( Double . class ) . defaultsTo ( 0.0 ) ; // Providing either of these options triggers the use of minibatch LBFGS lbfgsMinibatchIterations = parser . accepts ( "lbfgsMinibatchIterations" , "If specified, run LBFGS on minibatches of the data with the specified number of iterations per minibatch." ) . withRequiredArg ( ) . ofType ( Integer . class ) . defaultsTo ( - 1 ) ; lbfgsMinibatchSize = parser . accepts ( "lbfgsMinibatchSize" , "If specified, run LBFGS on minibatches of the data with the specified number of examples per minibatch." ) . withRequiredArg ( ) . ofType ( Integer . class ) . defaultsTo ( - 1 ) ; lbfgsAdaptiveMinibatches = parser . accepts ( "lbfgsAdaptiveMinibatches" , "If given, LBFGS is run on minibatches of exponentially increasing size." ) ; } if ( opts . contains ( CommonOptions . STOCHASTIC_GRADIENT ) || opts . contains ( CommonOptions . LBFGS ) ) { logInterval = parser . accepts ( "logInterval" , "Number of training iterations between logging outputs." ) . withRequiredArg ( ) . ofType ( Integer . class ) . defaultsTo ( 1 ) ; logParametersInterval = parser . accepts ( "logParametersInterval" , "Number of training iterations between serializing parameters to disk during training. " + "If unspecified, model parameters are not serialized to disk during training." ) . withRequiredArg ( ) . ofType ( Integer . class ) . defaultsTo ( - 1 ) ; logParametersDir = parser . accepts ( "logParametersDir" , "Directory where serialized model " + "parameters are stored. Must be specified if logParametersInterval is specified." ) . withRequiredArg ( ) . ofType ( String . class ) ; logBrief = parser . accepts ( "logBrief" , "Hides training output." ) ; } if ( opts . contains ( CommonOptions . MAP_REDUCE ) ) { mrMaxThreads = parser . accepts ( "maxThreads" , "Maximum number of threads to use during parallel execution." ) . withRequiredArg ( ) . ofType ( Integer . class ) . defaultsTo ( Runtime . getRuntime ( ) . availableProcessors ( ) ) ; mrMaxBatchesPerThread = parser . accepts ( "maxBatchesPerThread" , "Number of batches of items to create per thread." ) . withRequiredArg ( ) . ofType ( Integer . class ) . defaultsTo ( 20 ) ; } if ( opts . contains ( CommonOptions . FUNCTIONAL_GRADIENT_ASCENT ) ) { fgaIterations = parser . accepts ( "fgaIterations" , "Number of iterations of functional gradient ascent to perform." ) . withRequiredArg ( ) . ofType ( Integer . class ) . defaultsTo ( 10 ) ; fgaBatchSize = parser . accepts ( "fgaBatchSize" , "Number of examples to process before each functional gradient update. If not provided, use the entire data set." ) . withRequiredArg ( ) . ofType ( Integer . class ) ; fgaInitialStep = parser . accepts ( "fgaInitialStepSize" , "Initial step size for functional gradient ascent." ) . withRequiredArg ( ) . ofType ( Double . class ) . defaultsTo ( 1.0 ) ; fgaNoDecayStepSize = parser . accepts ( "fgaNoDecayStepSize" , "Don't use a 1/sqrt(t) step size decay during functional gradient ascent." ) ; } if ( opts . contains ( CommonOptions . REGRESSION_TREE ) ) { rtreeMaxDepth = parser . accepts ( "rtreeMaxDepth" , "Maximum depth of trained regression trees" ) . withRequiredArg ( ) . ofType ( Integer . class ) . required ( ) ; }
public class BplusTreeFile { /** * Read metadata from file * @ return true if file is clean or not * @ throws InvalidDataException if metadata is invalid */ private boolean readMetaData ( ) throws InvalidDataException { } }
final ByteBuffer buf = storage . get ( 0 ) ; int magic1 , magic2 , t_b_order_leaf , t_b_order_internal , t_blockSize ; // sanity boolean isClean = false ; magic1 = buf . getInt ( ) ; if ( magic1 != MAGIC_1 ) { throw new InvalidDataException ( "Invalid metadata (MAGIC1)" ) ; } t_blockSize = buf . getInt ( ) ; if ( t_blockSize != blockSize ) { throw new InvalidDataException ( "Invalid metadata (blockSize) " + t_blockSize + " != " + blockSize ) ; } t_b_order_leaf = buf . getInt ( ) ; t_b_order_internal = buf . getInt ( ) ; if ( t_b_order_leaf != b_order_leaf ) { throw new InvalidDataException ( "Invalid metadata (b-order leaf) " + t_b_order_leaf + " != " + b_order_leaf ) ; } if ( t_b_order_internal != b_order_internal ) { throw new InvalidDataException ( "Invalid metadata (b-order internal) " + t_b_order_internal + " != " + b_order_internal ) ; } storageBlock = buf . getInt ( ) ; rootIdx = buf . getInt ( ) ; lowIdx = buf . getInt ( ) ; highIdx = buf . getInt ( ) ; elements = buf . getInt ( ) ; height = buf . getInt ( ) ; maxInternalNodes = buf . getInt ( ) ; maxLeafNodes = buf . getInt ( ) ; isClean = ( ( buf . get ( ) == ( ( byte ) 0xEA ) ) ? true : false ) ; magic2 = buf . getInt ( ) ; if ( magic2 != MAGIC_2 ) { throw new InvalidDataException ( "Invalid metadata (MAGIC2)" ) ; } if ( log . isDebugEnabled ( ) ) { log . debug ( this . getClass ( ) . getName ( ) + "::readMetaData() elements=" + elements + " rootIdx=" + rootIdx ) ; } storage . release ( buf ) ; // Clear Caches clearReadCaches ( ) ; clearWriteCaches ( ) ; if ( isClean && fileFreeBlocks . exists ( ) ) { try { final SimpleBitSet newFreeBlocks = SimpleBitSet . deserializeFromFile ( fileFreeBlocks ) ; freeBlocks = newFreeBlocks ; } catch ( IOException e ) { log . error ( "IOException in readMetaData()" , e ) ; } } return isClean ;
public class MutableBigInteger { /** * Sets this MutableBigInteger ' s value array to a copy of the specified * array . The intLen is set to the length of the specified array . */ void copyValue ( int [ ] val ) { } }
int len = val . length ; if ( value . length < len ) value = new int [ len ] ; System . arraycopy ( val , 0 , value , 0 , len ) ; intLen = len ; offset = 0 ;
public class HashtableOnDisk { /** * This invokes the action ' s " execute " method once for every * object passing only the key to the method , to avoid the * overhead of reading the object if it is not necessary . * The iteration is synchronized with concurrent get and put operations * to avoid locking the HTOD for long periods of time . Some objects which * are added during iteration may not be seen by the iteration . * @ param action The object to be " invoked " for each object . */ public int iterateKeys ( HashtableAction action , int index , int length ) throws IOException , EOFException , FileManagerException , ClassNotFoundException , HashtableOnDiskException { } }
return walkHash ( action , RETRIEVE_KEY , index , length ) ;
public class ApplicationContextProvider { /** * Gets cas properties . * @ return the cas properties */ public static Optional < CasConfigurationProperties > getCasConfigurationProperties ( ) { } }
if ( CONTEXT != null ) { return Optional . of ( CONTEXT . getBean ( CasConfigurationProperties . class ) ) ; } return Optional . empty ( ) ;
public class CharInfo { /** * Map a character to a String . For example given * the character ' > ' this method would return the fully decorated * entity name " & lt ; " . * Strings for entity references are loaded from a properties file , * but additional mappings defined through calls to defineChar2String ( ) * are possible . Such entity reference mappings could be over - ridden . * This is reusing a stored key object , in an effort to avoid * heap activity . Unfortunately , that introduces a threading risk . * Simplest fix for now is to make it a synchronized method , or to give * up the reuse ; I see very little performance difference between them . * Long - term solution would be to replace the hashtable with a sparse array * keyed directly from the character ' s integer value ; see DTM ' s * string pool for a related solution . * @ param value The character that should be resolved to * a String , e . g . resolve ' > ' to " & lt ; " . * @ return The String that the character is mapped to , or null if not found . * @ xsl . usage internal */ String getOutputStringForChar ( char value ) { } }
// CharKey m _ charKey = new CharKey ( ) ; / / Alternative to synchronized m_charKey . setChar ( value ) ; return ( String ) m_charToString . get ( m_charKey ) ;
public class DeploymentMetadataParse { /** * Transform a < code > & lt ; plugin . . . / & gt ; < / code > structure . */ protected void parseProcessEnginePlugin ( Element element , List < ProcessEnginePluginXml > plugins ) { } }
ProcessEnginePluginXmlImpl plugin = new ProcessEnginePluginXmlImpl ( ) ; Map < String , String > properties = new HashMap < String , String > ( ) ; for ( Element childElement : element . elements ( ) ) { if ( PLUGIN_CLASS . equals ( childElement . getTagName ( ) ) ) { plugin . setPluginClass ( childElement . getText ( ) ) ; } else if ( PROPERTIES . equals ( childElement . getTagName ( ) ) ) { parseProperties ( childElement , properties ) ; } } plugin . setProperties ( properties ) ; plugins . add ( plugin ) ;
public class DescribeTrailsResult { /** * The list of trail objects . * @ param trailList * The list of trail objects . */ public void setTrailList ( java . util . Collection < Trail > trailList ) { } }
if ( trailList == null ) { this . trailList = null ; return ; } this . trailList = new com . amazonaws . internal . SdkInternalList < Trail > ( trailList ) ;
public class SocketController { /** * creates application lifecycle and network connectivity callbacks . * @ return Application lifecycle and network connectivity callbacks . */ public LifecycleListener createLifecycleListener ( ) { } }
return new LifecycleListener ( ) { @ Override public void onForegrounded ( Context context ) { synchronized ( lock ) { if ( ! isForegrounded ) { isForegrounded = true ; connectSocket ( ) ; if ( receiver == null ) { receiver = new InternetConnectionReceiver ( socketConnection ) ; } context . registerReceiver ( receiver , new IntentFilter ( ConnectivityManager . CONNECTIVITY_ACTION ) ) ; } lock . notifyAll ( ) ; } } @ Override public void onBackgrounded ( Context context ) { synchronized ( lock ) { if ( isForegrounded ) { isForegrounded = false ; disconnectSocket ( ) ; if ( receiver != null && ! isForegrounded ) { context . unregisterReceiver ( receiver ) ; } } lock . notifyAll ( ) ; } } } ;
public class ExceptionHandling { /** * Find a location ( class and line number ) where an exception occurred * - ignore Assert and ChorusAssert if these are at the top of the exception stack , * we ' re trying to provide the user class which used the assertions */ public static String getExceptionLocation ( Throwable t ) { } }
StackTraceElement element = findStackTraceElement ( t ) ; return element != null ? "(" + getSimpleClassName ( element ) + ":" + element . getLineNumber ( ) + ")-" : "" ;