signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class AdminLabeltypeAction { private HtmlResponse asListHtml ( ) { } }
return asHtml ( path_AdminLabeltype_AdminLabeltypeJsp ) . renderWith ( data -> { RenderDataUtil . register ( data , "labelTypeItems" , labelTypeService . getLabelTypeList ( labelTypePager ) ) ; // page navi } ) . useForm ( SearchForm . class , setup -> { setup . setup ( form -> { copyBeanToBean ( labelTypePager , form , op -> op . include ( "id" ) ) ; } ) ; } ) ;
public class DeleteBrokerRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteBrokerRequest deleteBrokerRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteBrokerRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteBrokerRequest . getBrokerId ( ) , BROKERID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SingleFileStore { /** * Coalesces adjacent free entries to create larger free entries ( so that the probability of finding a free entry during allocation increases ) */ private void mergeFreeEntries ( List < FileEntry > entries ) { } }
long startTime = 0 ; if ( trace ) startTime = timeService . wallClockTime ( ) ; FileEntry lastEntry = null ; FileEntry newEntry = null ; int mergeCounter = 0 ; for ( FileEntry fe : entries ) { if ( fe . isLocked ( ) ) continue ; // Merge any holes created ( consecutive free entries ) in the file if ( ( lastEntry != null ) && ( lastEntry . offset == ( fe . offset + fe . size ) ) ) { if ( newEntry == null ) { newEntry = new FileEntry ( fe . offset , fe . size + lastEntry . size ) ; freeList . remove ( lastEntry ) ; mergeCounter ++ ; } else { newEntry = new FileEntry ( fe . offset , fe . size + newEntry . size ) ; } freeList . remove ( fe ) ; mergeCounter ++ ; } else { if ( newEntry != null ) { mergeAndLogEntry ( newEntry , mergeCounter ) ; newEntry = null ; mergeCounter = 0 ; } } lastEntry = fe ; } if ( newEntry != null ) mergeAndLogEntry ( newEntry , mergeCounter ) ; if ( trace ) log . tracef ( "Total time taken for mergeFreeEntries: " + ( timeService . wallClockTime ( ) - startTime ) + " (ms)" ) ;
public class CmsSystemConfiguration { /** * Adds a new instance of a request handler class . < p > * @ param clazz the class name of the request handler to instantiate and add */ public void addRequestHandler ( String clazz ) { } }
Object initClass ; try { initClass = Class . forName ( clazz ) . newInstance ( ) ; } catch ( Throwable t ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_INIT_REQUEST_HANDLER_FAILURE_1 , clazz ) , t ) ; return ; } if ( initClass instanceof I_CmsRequestHandler ) { m_requestHandlers . add ( ( I_CmsRequestHandler ) initClass ) ; if ( CmsLog . INIT . isInfoEnabled ( ) ) { CmsLog . INIT . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_REQUEST_HANDLER_SUCCESS_1 , clazz ) ) ; } } else { if ( CmsLog . INIT . isErrorEnabled ( ) ) { CmsLog . INIT . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_REQUEST_HANDLER_INVALID_1 , clazz ) ) ; } }
public class ObjectMapperProvider { /** * Writes out the flow object * @ param jsonGenerator * @ param aFlow * @ param selectedSerialization * @ param includeFilter * @ throws JsonGenerationException * @ throws IOException */ @ SuppressWarnings ( "deprecation" ) public static void writeFlowDetails ( JsonGenerator jsonGenerator , Flow aFlow , DetailLevel selectedSerialization , Predicate < String > includeFilter ) throws JsonGenerationException , IOException { } }
jsonGenerator . writeStartObject ( ) ; // serialize the FlowKey object filteredWrite ( "flowKey" , includeFilter , aFlow . getFlowKey ( ) , jsonGenerator ) ; // serialize individual members of this class filteredWrite ( "flowName" , includeFilter , aFlow . getFlowName ( ) , jsonGenerator ) ; filteredWrite ( "userName" , includeFilter , aFlow . getUserName ( ) , jsonGenerator ) ; filteredWrite ( "jobCount" , includeFilter , aFlow . getJobCount ( ) , jsonGenerator ) ; filteredWrite ( "totalMaps" , includeFilter , aFlow . getTotalMaps ( ) , jsonGenerator ) ; filteredWrite ( "totalReduces" , includeFilter , aFlow . getTotalReduces ( ) , jsonGenerator ) ; filteredWrite ( "mapFileBytesRead" , includeFilter , aFlow . getMapFileBytesRead ( ) , jsonGenerator ) ; filteredWrite ( "mapFileBytesWritten" , includeFilter , aFlow . getMapFileBytesWritten ( ) , jsonGenerator ) ; filteredWrite ( "reduceFileBytesRead" , includeFilter , aFlow . getReduceFileBytesRead ( ) , jsonGenerator ) ; filteredWrite ( "hdfsBytesRead" , includeFilter , aFlow . getHdfsBytesRead ( ) , jsonGenerator ) ; filteredWrite ( "hdfsBytesWritten" , includeFilter , aFlow . getHdfsBytesWritten ( ) , jsonGenerator ) ; filteredWrite ( "mapSlotMillis" , includeFilter , aFlow . getMapSlotMillis ( ) , jsonGenerator ) ; filteredWrite ( "reduceSlotMillis" , includeFilter , aFlow . getReduceSlotMillis ( ) , jsonGenerator ) ; filteredWrite ( "megabyteMillis" , includeFilter , aFlow . getMegabyteMillis ( ) , jsonGenerator ) ; filteredWrite ( "cost" , includeFilter , aFlow . getCost ( ) , jsonGenerator ) ; filteredWrite ( "reduceShuffleBytes" , includeFilter , aFlow . getReduceShuffleBytes ( ) , jsonGenerator ) ; filteredWrite ( "duration" , includeFilter , aFlow . getDuration ( ) , jsonGenerator ) ; filteredWrite ( "wallClockTime" , includeFilter , aFlow . getWallClockTime ( ) , jsonGenerator ) ; filteredWrite ( "cluster" , includeFilter , aFlow . getCluster ( ) , jsonGenerator ) ; filteredWrite ( "appId" , includeFilter , aFlow . getAppId ( ) , jsonGenerator ) ; filteredWrite ( "runId" , includeFilter , aFlow . getRunId ( ) , jsonGenerator ) ; filteredWrite ( "version" , includeFilter , aFlow . getVersion ( ) , jsonGenerator ) ; filteredWrite ( "hadoopVersion" , includeFilter , aFlow . getHadoopVersion ( ) , jsonGenerator ) ; if ( selectedSerialization == SerializationContext . DetailLevel . EVERYTHING ) { filteredWrite ( "submitTime" , includeFilter , aFlow . getSubmitTime ( ) , jsonGenerator ) ; filteredWrite ( "launchTime" , includeFilter , aFlow . getLaunchTime ( ) , jsonGenerator ) ; filteredWrite ( "finishTime" , includeFilter , aFlow . getFinishTime ( ) , jsonGenerator ) ; } filteredWrite ( Constants . HRAVEN_QUEUE , includeFilter , aFlow . getQueue ( ) , jsonGenerator ) ; filteredWrite ( "counters" , includeFilter , aFlow . getCounters ( ) , jsonGenerator ) ; filteredWrite ( "mapCounters" , includeFilter , aFlow . getMapCounters ( ) , jsonGenerator ) ; filteredWrite ( "reduceCounters" , includeFilter , aFlow . getReduceCounters ( ) , jsonGenerator ) ; // if flag , include job details if ( ( selectedSerialization == SerializationContext . DetailLevel . FLOW_SUMMARY_STATS_WITH_JOB_STATS ) || ( selectedSerialization == SerializationContext . DetailLevel . EVERYTHING ) ) { jsonGenerator . writeFieldName ( "jobs" ) ; jsonGenerator . writeObject ( aFlow . getJobs ( ) ) ; } jsonGenerator . writeEndObject ( ) ;
public class ImageParser { /** * Parse a string of base64 encoded image data . 2011-09-08 PwD */ public static Document parseBase64 ( String base64Data , Element instruction ) throws Exception { } }
byte [ ] imageData = Base64 . decodeBase64 ( base64Data ) ; ByteArrayInputStream bais = new ByteArrayInputStream ( imageData ) ; StringWriter swLogger = new StringWriter ( ) ; PrintWriter pwLogger = new PrintWriter ( swLogger ) ; return parse ( bais , instruction , pwLogger ) ;
public class AbstractPool { /** * { @ inheritDoc } */ public void emptyManagedConnectionPool ( ManagedConnectionPool mcp ) { } }
if ( pools . values ( ) . remove ( mcp ) ) { mcp . shutdown ( ) ; if ( Tracer . isEnabled ( ) ) Tracer . destroyManagedConnectionPool ( poolConfiguration . getId ( ) , mcp ) ; }
public class Seb { /** * Modify Seb label joining given labels . * @ param labels * Seb labels * @ deprecated As of Seb version 0.3.22 , replaced by * < code > Seb . withLabel ( String . . . labels ) < / code > . */ @ Deprecated public void setLabel ( String ... labels ) { } }
this . label = utils . join ( LABEL_DELIMITER , ( Object [ ] ) labels ) ;
public class Matrix3x2f { /** * Apply shearing to this matrix by shearing along the X axis using the Y axis factor < code > yFactor < / code > , * and store the result in < code > dest < / code > . * @ param yFactor * the factor for the Y component to shear along the X axis * @ param dest * will hold the result * @ return dest */ public Matrix3x2f shearX ( float yFactor , Matrix3x2f dest ) { } }
float nm10 = m00 * yFactor + m10 ; float nm11 = m01 * yFactor + m11 ; dest . m00 = m00 ; dest . m01 = m01 ; dest . m10 = nm10 ; dest . m11 = nm11 ; dest . m20 = m20 ; dest . m21 = m21 ; return dest ;
public class BaseStepControllerImpl { /** * The only valid states at this point are STARTED , STOPPING , or FAILED . * been able to get to STOPPED , or COMPLETED yet at this point in the code . */ private void transitionToFinalBatchStatus ( ) { } }
StopLock stopLock = getStopLock ( ) ; // Store in local variable to facilitate Ctrl + Shift + G search in Eclipse synchronized ( stopLock ) { BatchStatus currentBatchStatus = runtimeStepExecution . getBatchStatus ( ) ; if ( currentBatchStatus . equals ( BatchStatus . STARTED ) ) { updateStepBatchStatus ( BatchStatus . COMPLETED ) ; } else if ( currentBatchStatus . equals ( BatchStatus . STOPPING ) ) { updateStepBatchStatus ( BatchStatus . STOPPED ) ; } else if ( currentBatchStatus . equals ( BatchStatus . FAILED ) ) { updateStepBatchStatus ( BatchStatus . FAILED ) ; // Should have already been done but maybe better for possible code refactoring to have it here . } else { throw new IllegalStateException ( "Step batch status should not be in a " + currentBatchStatus . name ( ) + " state" ) ; } }
public class JCacheProducers { /** * Allow @ Inject of Cache identified by its unique name * @ param ip to retrieve cache name * @ param < K > Key type * @ param < V > Value type * @ return unique instance of requested named cache * @ throws IllegalStateException if cache is not found in current context */ @ Produces @ NamedJCache public < K , V > Cache < K , V > produceCache ( InjectionPoint ip ) { } }
String cacheName = ip . getAnnotated ( ) . getAnnotation ( NamedJCache . class ) . name ( ) ; Cache < K , V > cache = produceCacheManager ( ) . getCache ( cacheName ) ; if ( cache == null ) throw new IllegalStateException ( "Cannot @Produces cache : Named JCache '" + cacheName + "' does not exists. Make sure you created it at startup" ) ; return cache ;
public class ThreadLocalRandom { /** * Returns an effectively unlimited stream of pseudorandom { @ code * double } values , each conforming to the given origin ( inclusive ) and bound * ( exclusive ) . * @ implNote This method is implemented to be equivalent to { @ code * doubles ( Long . MAX _ VALUE , randomNumberOrigin , randomNumberBound ) } . * @ param randomNumberOrigin the origin ( inclusive ) of each random value * @ param randomNumberBound the bound ( exclusive ) of each random value * @ return a stream of pseudorandom { @ code double } values , * each with the given origin ( inclusive ) and bound ( exclusive ) * @ throws IllegalArgumentException if { @ code randomNumberOrigin } * is greater than or equal to { @ code randomNumberBound } * @ since 1.8 */ public DoubleStream doubles ( double randomNumberOrigin , double randomNumberBound ) { } }
if ( ! ( randomNumberOrigin < randomNumberBound ) ) throw new IllegalArgumentException ( BAD_RANGE ) ; return StreamSupport . doubleStream ( new RandomDoublesSpliterator ( 0L , Long . MAX_VALUE , randomNumberOrigin , randomNumberBound ) , false ) ;
public class CharMatcher { /** * Sets bits in { @ code table } matched by this matcher . * @ param table the new bits */ void setBits ( BitSet table ) { } }
for ( int c = Character . MAX_VALUE ; c >= Character . MIN_VALUE ; c -- ) { if ( matches ( ( char ) c ) ) { table . set ( c ) ; } }
public class Util { /** * Creates a regexp from < code > likePattern < / code > . * @ param likePattern the pattern . * @ return the regular expression < code > Pattern < / code > . */ public static Pattern createRegexp ( String likePattern ) { } }
// - escape all non alphabetic characters // - escape constructs like \ < alphabetic char > into \ \ < alphabetic char > // - replace non escaped _ % into . and . * StringBuilder regexp = new StringBuilder ( ) ; boolean escaped = false ; for ( int i = 0 ; i < likePattern . length ( ) ; i ++ ) { if ( likePattern . charAt ( i ) == '\\' ) { if ( escaped ) { regexp . append ( "\\\\" ) ; escaped = false ; } else { escaped = true ; } } else { if ( Character . isLetterOrDigit ( likePattern . charAt ( i ) ) ) { if ( escaped ) { regexp . append ( "\\\\" ) . append ( likePattern . charAt ( i ) ) ; escaped = false ; } else { regexp . append ( likePattern . charAt ( i ) ) ; } } else { if ( escaped ) { regexp . append ( '\\' ) . append ( likePattern . charAt ( i ) ) ; escaped = false ; } else { switch ( likePattern . charAt ( i ) ) { case '_' : regexp . append ( '.' ) ; break ; case '%' : regexp . append ( ".*" ) ; break ; default : regexp . append ( '\\' ) . append ( likePattern . charAt ( i ) ) ; } } } } } return Pattern . compile ( regexp . toString ( ) , Pattern . DOTALL ) ;
public class Profile { /** * Sets the background interval . If the background profile hasn ' t started , start it . */ public void setBackgroundPeriod ( long period ) { } }
if ( period < 1 ) { throw new ConfigException ( L . l ( "profile period '{0}ms' is too small. The period must be greater than 10ms." , period ) ) ; } _profilerService . setBackgroundInterval ( period , TimeUnit . MILLISECONDS ) ;
public class VertxGenerator { /** * copied from jOOQ ' s JavaGenerator * @ param table * @ param out1 */ @ Override protected void generateDao ( TableDefinition table , JavaWriter out1 ) { } }
UniqueKeyDefinition key = table . getPrimaryKey ( ) ; if ( key == null ) { logger . info ( "Skipping DAO generation" , out1 . file ( ) . getName ( ) ) ; return ; } VertxJavaWriter out = ( VertxJavaWriter ) out1 ; generateDAO ( key , table , out ) ;
public class MultiUserChat { /** * Grants voice to a visitor in the room . In a moderated room , a moderator may want to manage * who does and does not have " voice " in the room . To have voice means that a room occupant * is able to send messages to the room occupants . * @ param nickname the nickname of the visitor to grant voice in the room ( e . g . " john " ) . * @ throws XMPPErrorException if an error occurs granting voice to a visitor . In particular , a * 403 error can occur if the occupant that intended to grant voice is not * a moderator in this room ( i . e . Forbidden error ) ; or a * 400 error can occur if the provided nickname is not present in the room . * @ throws NoResponseException if there was no response from the server . * @ throws NotConnectedException * @ throws InterruptedException */ public void grantVoice ( Resourcepart nickname ) throws XMPPErrorException , NoResponseException , NotConnectedException , InterruptedException { } }
changeRole ( nickname , MUCRole . participant , null ) ;
public class ArticleFeatureExtractor { /** * Computes the markedness of the area . The markedness generally describes the visual importance of the area based on different criteria . * @ return the computed expressiveness */ public double getMarkedness ( Area node ) { } }
double fsz = node . getFontSize ( ) / avgfont ; // use relative font size , 0 is the normal font double fwt = node . getFontWeight ( ) ; double fst = node . getFontStyle ( ) ; double ind = getIndentation ( node ) ; double cen = isCentered ( node ) ? 1.0 : 0.0 ; double contrast = getContrast ( node ) ; double cp = 1.0 - ca . getColorPercentage ( node ) ; double bcp = bca . getColorPercentage ( node ) ; bcp = ( bcp < 0.0 ) ? 0.0 : ( 1.0 - bcp ) ; // weighting double exp = weights [ WFSZ ] * fsz + weights [ WFWT ] * fwt + weights [ WFST ] * fst + weights [ WIND ] * ind + weights [ WCON ] * contrast + weights [ WCEN ] * cen + weights [ WCP ] * cp + weights [ WBCP ] * bcp ; return exp ;
public class AbstractJobVertex { /** * Returns the index of this vertex ' s first free input gate . * @ return the index of the first free input gate */ protected int getFirstFreeInputGateIndex ( ) { } }
for ( int i = 0 ; i < this . backwardEdges . size ( ) ; i ++ ) { if ( this . backwardEdges . get ( i ) == null ) { return i ; } } return this . backwardEdges . size ( ) ;
public class JsonSerializerMiddlewares { /** * Middleware that serializes the result of the inner handler using the supplied * { @ link ObjectWriter } , and sets the Content - Type header to application / json . */ public static < T > Middleware < AsyncHandler < T > , AsyncHandler < Response < ByteString > > > jsonSerialize ( ObjectWriter objectWriter ) { } }
return handler -> requestContext -> handler . invoke ( requestContext ) . thenApply ( result -> Response . forPayload ( serialize ( objectWriter , result ) ) . withHeader ( CONTENT_TYPE , JSON ) ) ;
public class PharmacophoreQueryAngleBond { /** * Checks whether the query angle constraint matches a target distance . * This method checks whether a query constraint is satisfied by an observed * angle ( represented by a { @ link org . openscience . cdk . pharmacophore . PharmacophoreAngleBond } in the target molecule . * Note that angles are compared upto 2 decimal places . * @ param bond The angle relationship in a target molecule * @ return true if the target angle lies within the range of the query constraint */ @ Override public boolean matches ( IBond bond ) { } }
bond = BondRef . deref ( bond ) ; if ( bond instanceof PharmacophoreAngleBond ) { PharmacophoreAngleBond pbond = ( PharmacophoreAngleBond ) bond ; double bondLength = round ( pbond . getBondLength ( ) , 2 ) ; return bondLength >= lower && bondLength <= upper ; } else return false ;
public class SessionDialog { /** * Initialises the given panel with the current session and the corresponding UI shared context . * @ param contextPanel the context panel to initialise * @ see AbstractContextPropertiesPanel # initContextData ( Session , Context ) */ private void initContextPanel ( AbstractContextPropertiesPanel contextPanel ) { } }
Context ctx = uiContexts . get ( contextPanel . getContextIndex ( ) ) ; if ( ctx != null ) { contextPanel . initContextData ( session , ctx ) ; }
public class Agent { /** * This method reconfigures the agent . * It is invoked by iPojo when the configuration changes . * It may be invoked before the start ( ) method is . */ @ Override public void reconfigure ( ) { } }
// This method is invoked when properties change . // It is not related to life cycle ( start / stop ) . this . logger . info ( "Reconfiguration requested in agent " + getAgentId ( ) ) ; if ( this . messagingClient == null ) { this . logger . info ( "The agent has not yet been started. Configuration is dropped." ) ; return ; } // Update the messaging connection this . messagingClient . setApplicationName ( this . applicationName ) ; this . messagingClient . setScopedInstancePath ( this . scopedInstancePath ) ; this . messagingClient . setIpAddress ( this . ipAddress ) ; this . messagingClient . setNeedsModel ( needsModel ( ) ) ; this . messagingClient . setDomain ( this . domain ) ; this . messagingClient . switchMessagingType ( this . messagingType ) ; // Deal with injected configurations AgentUtils . injectConfigurations ( this . karafEtc , this . applicationName , this . scopedInstancePath , this . domain , this . ipAddress ) ; this . logger . info ( "The agent was successfully (re)configured." ) ;
public class MutableBigInteger { /** * This method is used for division . It multiplies an n word input a by one * word input x , and subtracts the n word product from q . This is needed * when subtracting qhat * divisor from dividend . */ private int mulsub ( int [ ] q , int [ ] a , int x , int len , int offset ) { } }
long xLong = x & LONG_MASK ; long carry = 0 ; offset += len ; for ( int j = len - 1 ; j >= 0 ; j -- ) { long product = ( a [ j ] & LONG_MASK ) * xLong + carry ; long difference = q [ offset ] - product ; q [ offset -- ] = ( int ) difference ; carry = ( product >>> 32 ) + ( ( ( difference & LONG_MASK ) > ( ( ( ~ ( int ) product ) & LONG_MASK ) ) ) ? 1 : 0 ) ; } return ( int ) carry ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link AbstractTopologyType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link AbstractTopologyType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "_Topology" , substitutionHeadNamespace = "http://www.opengis.net/gml" , substitutionHeadName = "_GML" ) public JAXBElement < AbstractTopologyType > create_Topology ( AbstractTopologyType value ) { } }
return new JAXBElement < AbstractTopologyType > ( __Topology_QNAME , AbstractTopologyType . class , null , value ) ;
public class CmdLineCLA { /** * { @ inheritDoc } */ @ Override protected void exportCommandLineData ( final StringBuilder str , final int occ ) { } }
str . append ( "[" ) ; getValue ( occ ) . exportCommandLine ( str ) ; str . append ( "]" ) ;
public class PdfObject { /** * Whether this object can be contained in an object stream . * PdfObjects of type STREAM OR INDIRECT can not be contained in an * object stream . * @ return < CODE > true < / CODE > if this object can be in an object stream . * Otherwise < CODE > false < / CODE > */ public boolean canBeInObjStm ( ) { } }
switch ( type ) { case NULL : case BOOLEAN : case NUMBER : case STRING : case NAME : case ARRAY : case DICTIONARY : return true ; case STREAM : case INDIRECT : default : return false ; }
public class ReconciliationReportRow { /** * Gets the dfpRevenue value for this ReconciliationReportRow . * @ return dfpRevenue * The revenue calculated based on the { @ link # costPerUnit } , { @ link * # costType } , * { @ link # dfpClicks } , { @ link # dfpImpressions } and { @ link * # dfpLineItemDays } . * This attribute is calculated by Google and is read - only . */ public com . google . api . ads . admanager . axis . v201808 . Money getDfpRevenue ( ) { } }
return dfpRevenue ;
public class NodeTaskServiceImpl { /** * 获取匹配stage的TaskEvent对象 */ private TaskEvent getMatchStage ( NodeTask nodeTask , StageType stage ) { } }
List < StageType > stages = nodeTask . getStage ( ) ; List < TaskEvent > events = nodeTask . getEvent ( ) ; for ( int i = 0 ; i < stages . size ( ) ; i ++ ) { if ( stages . get ( i ) == stage ) { return events . get ( i ) ; } } return null ;
public class NodeTypeDataManagerImpl { /** * Registers all the remote commands */ private void initRemoteCommands ( ) { } }
this . id = UUID . randomUUID ( ) . toString ( ) ; registerNodeTypes = rpcService . registerCommand ( new RemoteCommand ( ) { public String getId ( ) { return "org.exoplatform.services.jcr.impl.core.nodetype.NodeTypeDataManagerImpl-registerNodeTypes-" + repositoryName ; } public Serializable execute ( Serializable [ ] args ) throws Throwable { if ( ! id . equals ( args [ 0 ] ) ) { try { String [ ] names = ( String [ ] ) args [ 1 ] ; final List < NodeTypeData > allNodeTypes = new ArrayList < NodeTypeData > ( ) ; for ( int i = 0 ; i < names . length ; i ++ ) { NodeTypeData nodeType = persister . getNodeType ( InternalQName . parse ( names [ i ] ) ) ; if ( nodeType != null ) allNodeTypes . add ( nodeType ) ; } // register nodetypes in runtime final Map < InternalQName , NodeTypeData > volatileNodeTypes = new HashMap < InternalQName , NodeTypeData > ( ) ; // create map from list for ( final NodeTypeData nodeTypeData : allNodeTypes ) { volatileNodeTypes . put ( nodeTypeData . getName ( ) , nodeTypeData ) ; } for ( final NodeTypeData nodeTypeData : allNodeTypes ) { nodeTypeRepository . addNodeType ( nodeTypeData , volatileNodeTypes ) ; for ( NodeTypeManagerListener listener : listeners . values ( ) ) { listener . nodeTypeRegistered ( nodeTypeData . getName ( ) ) ; } } } catch ( Exception e ) { LOG . warn ( "Could not register the node types" , e ) ; } } return true ; } } ) ; unregisterNodeType = rpcService . registerCommand ( new RemoteCommand ( ) { public String getId ( ) { return "org.exoplatform.services.jcr.impl.core.nodetype.NodeTypeDataManagerImpl-unregisterNodeType-" + repositoryName ; } public Serializable execute ( Serializable [ ] args ) throws Throwable { if ( ! id . equals ( args [ 0 ] ) ) { try { String name = ( String ) args [ 1 ] ; NodeTypeData nodeType = nodeTypeRepository . getNodeType ( InternalQName . parse ( name ) ) ; if ( nodeType != null ) { nodeTypeRepository . removeNodeType ( nodeType ) ; for ( NodeTypeManagerListener listener : listeners . values ( ) ) { listener . nodeTypeUnregistered ( nodeType . getName ( ) ) ; } } } catch ( Exception e ) { LOG . warn ( "Could not register the node type" , e ) ; } } return true ; } } ) ;
public class MetadataProviderImpl { /** * Creates the method metadata for methods annotated with { @ link ResultOf } . * @ param annotatedMethod * The annotated method - * @ param resultOf * The { @ link com . buschmais . xo . api . annotation . ResultOf } annotation . * @ return The method metadata . */ private MethodMetadata < ? , ? > createResultOfMetadata ( AnnotatedMethod annotatedMethod , ResultOf resultOf ) { } }
Method method = annotatedMethod . getAnnotatedElement ( ) ; // Determine query type Class < ? > methodReturnType = method . getReturnType ( ) ; Class < ? > returnType ; if ( Result . class . isAssignableFrom ( methodReturnType ) ) { Type genericReturnType = method . getGenericReturnType ( ) ; ParameterizedType parameterizedType = ( ParameterizedType ) genericReturnType ; returnType = ( Class < ? > ) parameterizedType . getActualTypeArguments ( ) [ 0 ] ; } else { returnType = methodReturnType ; } // Determine query type AnnotatedElement query = resultOf . query ( ) ; if ( Object . class . equals ( query ) ) { if ( annotatedMethod . getByMetaAnnotation ( QueryDefinition . class ) != null ) { query = annotatedMethod . getAnnotatedElement ( ) ; } else { query = returnType ; } } // Determine parameter bindings Annotation [ ] [ ] parameterAnnotations = method . getParameterAnnotations ( ) ; List < Parameter > parameters = new ArrayList < > ( ) ; for ( Annotation [ ] parameterAnnotation : parameterAnnotations ) { Parameter parameter = null ; for ( Annotation annotation : parameterAnnotation ) { if ( Parameter . class . equals ( annotation . annotationType ( ) ) ) { parameter = ( Parameter ) annotation ; } } if ( parameter == null ) { throw new XOException ( "Cannot determine parameter names for '" + method . getName ( ) + "', all parameters must be annotated with '" + Parameter . class . getName ( ) + "'." ) ; } parameters . add ( parameter ) ; } boolean singleResult = ! Iterable . class . isAssignableFrom ( methodReturnType ) ; return new ResultOfMethodMetadata < > ( annotatedMethod , query , returnType , resultOf . usingThisAs ( ) , parameters , singleResult ) ;
public class Bond { /** * Returns the spread value such that the sum of cash flows of the bond discounted with a given reference curve * with the additional spread coincides with a given price . * @ param bondPrice The target price as double . * @ param referenceCurve The reference curve used for discounting the coupon payments . * @ param model The model under which the product is valued . * @ return The optimal spread value . */ public double getSpread ( double bondPrice , Curve referenceCurve , AnalyticModel model ) { } }
GoldenSectionSearch search = new GoldenSectionSearch ( - 2.0 , 2.0 ) ; while ( search . getAccuracy ( ) > 1E-11 && ! search . isDone ( ) ) { double x = search . getNextPoint ( ) ; double fx = getValueWithGivenSpreadOverCurve ( 0.0 , referenceCurve , x , model ) ; double y = ( bondPrice - fx ) * ( bondPrice - fx ) ; search . setValue ( y ) ; } return search . getBestPoint ( ) ;
public class HsqlTimer { /** * Causes the specified Runnable to be executed periodically in the * background , starting after the specified delay . * @ return opaque reference to the internal task * @ param period the cycle period * @ param relative if true , fixed rate sheduling else fixed delay scheduling * @ param delay in milliseconds * @ param runnable the Runnable to execute . * @ throws IllegalArgumentException if runnable is null or period is < = 0 */ public Object schedulePeriodicallyAfter ( final long delay , final long period , final Runnable runnable , final boolean relative ) throws IllegalArgumentException { } }
if ( period <= 0 ) { throw new IllegalArgumentException ( "period <= 0" ) ; } else if ( runnable == null ) { throw new IllegalArgumentException ( "runnable == null" ) ; } return addTask ( now ( ) + delay , runnable , period , relative ) ;
public class S { /** * Format a date with specified pattern , lang , locale and timezone . The locale * comes from the engine instance specified * @ param template * @ param date * @ param pattern * @ param locale * @ param timezone * @ return format result */ public static String format ( ITemplate template , Date date , String pattern , Locale locale , String timezone ) { } }
if ( null == date ) throw new NullPointerException ( ) ; RythmEngine engine = null == template ? RythmEngine . get ( ) : template . __engine ( ) ; DateFormat df = ( null == engine ? IDateFormatFactory . DefaultDateFormatFactory . INSTANCE : engine . dateFormatFactory ( ) ) . createDateFormat ( template , pattern , locale , timezone ) ; return df . format ( date ) ;
public class ClassicLockView { /** * Triggers the back action on the form . * @ return true if it was handled , false otherwise */ public boolean onBackPressed ( ) { } }
if ( subForm != null ) { final boolean shouldDisplayPreviousForm = configuration . allowLogIn ( ) || configuration . allowSignUp ( ) ; if ( shouldDisplayPreviousForm ) { resetHeaderTitle ( ) ; showSignUpTerms ( subForm instanceof CustomFieldsFormView ) ; removeSubForm ( ) ; clearFocus ( ) ; return true ; } } return formLayout != null && formLayout . onBackPressed ( ) ;
public class BoxFactory { /** * Creates a single new box from an element . * @ param n The source DOM element * @ param display the display : property value that is used when the box style is not known ( e . g . anonymous boxes ) * @ return A new box of a subclass of { @ link ElementBox } based on the value of the ' display ' CSS property */ public ElementBox createBox ( ElementBox parent , Element n , String display ) { } }
ElementBox root = null ; // New box style NodeData style = decoder . getElementStyleInherited ( n ) ; if ( style == null ) style = createAnonymousStyle ( display ) ; // Special ( HTML ) tag names if ( config . getUseHTML ( ) && html . isTagSupported ( n ) ) { root = html . createBox ( parent , n , viewport , style ) ; } // Not created yet - - create a box according to the display value if ( root == null ) { root = createElementInstance ( parent , n , style ) ; } root . setBase ( baseurl ) ; root . setViewport ( viewport ) ; root . setParent ( parent ) ; root . setOrder ( next_order ++ ) ; return root ;
public class LoadJobConfiguration { /** * Creates a builder for a BigQuery Load Job configuration given the destination table and source * URIs . */ public static Builder newBuilder ( TableId destinationTable , List < String > sourceUris ) { } }
return new Builder ( ) . setDestinationTable ( destinationTable ) . setSourceUris ( sourceUris ) ;
public class PoolsImpl { /** * Changes the number of compute nodes that are assigned to a pool . * You can only resize a pool when its allocation state is steady . If the pool is already resizing , the request fails with status code 409 . When you resize a pool , the pool ' s allocation state changes from steady to resizing . You cannot resize pools which are configured for automatic scaling . If you try to do this , the Batch service returns an error 409 . If you resize a pool downwards , the Batch service chooses which nodes to remove . To remove specific nodes , use the pool remove nodes API instead . * @ param poolId The ID of the pool to resize . * @ param poolResizeParameter The parameters for the request . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws BatchErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void resize ( String poolId , PoolResizeParameter poolResizeParameter ) { } }
resizeWithServiceResponseAsync ( poolId , poolResizeParameter ) . toBlocking ( ) . single ( ) . body ( ) ;
public class FormValidator { /** * Start live validation - whenever focus changes from view with validations upon itself , validators will run . < br / > * Don ' t forget to call { @ link # stopLiveValidation ( Object ) } once you are done . * @ param fragment fragment with views to validate , there can be only one continuous validation per target object ( fragment ) * @ param callback callback invoked whenever there is some validation fail */ public static void startLiveValidation ( final Fragment fragment , final IValidationCallback callback ) { } }
startLiveValidation ( fragment , fragment . getView ( ) , callback ) ;
public class MonitoringServiceImpl { /** * Filter events . * @ param events the events * @ return the list of filtered events */ private List < Event > filterEvents ( List < Event > events ) { } }
List < Event > filteredEvents = new ArrayList < Event > ( ) ; for ( Event event : events ) { if ( ! filter ( event ) ) { filteredEvents . add ( event ) ; } } return filteredEvents ;
public class NetworkClient { /** * Switches the network mode from auto subnet mode to custom subnet mode . * < p > Sample code : * < pre > < code > * try ( NetworkClient networkClient = NetworkClient . create ( ) ) { * ProjectGlobalNetworkName network = ProjectGlobalNetworkName . of ( " [ PROJECT ] " , " [ NETWORK ] " ) ; * Operation response = networkClient . switchToCustomModeNetwork ( network ) ; * < / code > < / pre > * @ param network Name of the network to be updated . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation switchToCustomModeNetwork ( ProjectGlobalNetworkName network ) { } }
SwitchToCustomModeNetworkHttpRequest request = SwitchToCustomModeNetworkHttpRequest . newBuilder ( ) . setNetwork ( network == null ? null : network . toString ( ) ) . build ( ) ; return switchToCustomModeNetwork ( request ) ;
public class SightResourcesImpl { /** * Creates s copy of the specified Sight . * It mirrors to the following Smartsheet REST API method : POST / sights / { sightId } / move * @ param sightId the Id of the Sight * @ param destination the destination to copy to * @ return the newly created Sight resource . * @ throws IllegalArgumentException if any argument is null or empty string * @ throws InvalidRequestException if there is any problem with the REST API request * @ throws AuthorizationException if there is any problem with the REST API authorization ( access token ) * @ throws ResourceNotFoundException if the resource cannot be found * @ throws ServiceUnavailableException if the REST API service is not available ( possibly due to rate limiting ) * @ throws SmartsheetException if there is any other error during the operation */ public Sight moveSight ( long sightId , ContainerDestination destination ) throws SmartsheetException { } }
return this . createResource ( "sights/" + sightId + "/move" , Sight . class , destination ) ;
public class TreeVectorizer { /** * Vectorizes the passed in sentences * @ param sentences the sentences to convert to trees * @ param label the label for the sentence * @ param labels all of the possible labels for the trees * @ return a list of trees pre converted with CNF and * binarized and word vectors at the leaves of the trees * @ throws Exception */ public List < Tree > getTreesWithLabels ( String sentences , String label , List < String > labels ) throws Exception { } }
List < Tree > ret = new ArrayList < > ( ) ; List < Tree > baseTrees = parser . getTreesWithLabels ( sentences , label , labels ) ; for ( Tree t : baseTrees ) { Tree binarized = treeTransformer . transform ( t ) ; binarized = cnfTransformer . transform ( binarized ) ; ret . add ( binarized ) ; } return ret ;
public class AltsHandshakerClient { /** * Sets the start server fields for the passed handshake request . */ private void setStartServerFields ( HandshakerReq . Builder req , ByteBuffer inBytes ) { } }
ServerHandshakeParameters serverParameters = ServerHandshakeParameters . newBuilder ( ) . addRecordProtocols ( RECORD_PROTOCOL ) . build ( ) ; StartServerHandshakeReq . Builder startServerReq = StartServerHandshakeReq . newBuilder ( ) . addApplicationProtocols ( APPLICATION_PROTOCOL ) . putHandshakeParameters ( HandshakeProtocol . ALTS . getNumber ( ) , serverParameters ) . setInBytes ( ByteString . copyFrom ( inBytes . duplicate ( ) ) ) ; if ( handshakerOptions . getRpcProtocolVersions ( ) != null ) { startServerReq . setRpcVersions ( handshakerOptions . getRpcProtocolVersions ( ) ) ; } req . setServerStart ( startServerReq ) ;
public class MathBindings { /** * Binding for { @ link java . lang . Math # incrementExact ( long ) } * @ param a the value to increment * @ return the result * @ throws ArithmeticException if the result overflows a long */ public static LongBinding incrementExact ( final ObservableLongValue a ) { } }
return createLongBinding ( ( ) -> Math . incrementExact ( a . get ( ) ) , a ) ;
public class NativeLibraryLoader { /** * Load the helper { @ link Class } as a byte array , to be redefined in specified { @ link ClassLoader } . * @ param clazz - The helper { @ link Class } provided by this bundle * @ return The binary content of helper { @ link Class } . * @ throws ClassNotFoundException Helper class not found or loading failed */ private static byte [ ] classToByteArray ( Class < ? > clazz ) throws ClassNotFoundException { } }
String fileName = clazz . getName ( ) ; int lastDot = fileName . lastIndexOf ( '.' ) ; if ( lastDot > 0 ) { fileName = fileName . substring ( lastDot + 1 ) ; } URL classUrl = clazz . getResource ( fileName + ".class" ) ; if ( classUrl == null ) { throw new ClassNotFoundException ( clazz . getName ( ) ) ; } byte [ ] buf = new byte [ 1024 ] ; ByteArrayOutputStream out = new ByteArrayOutputStream ( 4096 ) ; InputStream in = null ; try { in = classUrl . openStream ( ) ; for ( int r ; ( r = in . read ( buf ) ) != - 1 ; ) { out . write ( buf , 0 , r ) ; } return out . toByteArray ( ) ; } catch ( IOException ex ) { throw new ClassNotFoundException ( clazz . getName ( ) , ex ) ; } finally { closeQuietly ( in ) ; closeQuietly ( out ) ; }
public class CommerceWarehouseUtil { /** * Returns a range of all the commerce warehouses where groupId = & # 63 ; and commerceCountryId = & # 63 ; and primary = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceWarehouseModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param groupId the group ID * @ param commerceCountryId the commerce country ID * @ param primary the primary * @ param start the lower bound of the range of commerce warehouses * @ param end the upper bound of the range of commerce warehouses ( not inclusive ) * @ return the range of matching commerce warehouses */ public static List < CommerceWarehouse > findByG_C_P ( long groupId , long commerceCountryId , boolean primary , int start , int end ) { } }
return getPersistence ( ) . findByG_C_P ( groupId , commerceCountryId , primary , start , end ) ;
public class ElementBox { /** * Updates the stacking parent values and registers the z - index for this parent . */ @ Override protected void updateStackingContexts ( ) { } }
super . updateStackingContexts ( ) ; if ( stackingParent != null ) { if ( formsStackingContext ( ) ) // all the positioned boxes are considered as separate stacking contexts { stackingParent . getStackingContext ( ) . registerChildContext ( this ) ; if ( scontext != null ) // clear this context if it exists ( remove old children ) scontext . clear ( ) ; } }
public class TargetCleanupTask { /** * Performs a cleanup on all loaded targets . */ @ Scheduled ( cron = "${deployer.main.targets.cleanup.cron}" ) public void cleanupAllTargets ( ) { } }
try { logger . info ( "Starting cleanup for all targets" ) ; targetService . getAllTargets ( ) . forEach ( Target :: cleanup ) ; } catch ( TargetServiceException e ) { logger . error ( "Error getting loaded targets" , e ) ; }
public class HadoopImageDownload { /** * Download of an image by URL . * @ return The image as a BufferedImage object . * @ throws Exception */ private BufferedImage downloadImage ( ) throws Exception { } }
BufferedImage image = null ; InputStream in = null ; try { // first try reading with the default class URL url = new URL ( imageUrl ) ; HttpURLConnection conn = null ; boolean success = false ; try { conn = ( HttpURLConnection ) url . openConnection ( ) ; conn . setInstanceFollowRedirects ( followRedirects ) ; conn . setConnectTimeout ( connectionTimeout ) ; // TO DO : add retries when connections times out conn . setReadTimeout ( readTimeout ) ; conn . connect ( ) ; success = true ; } catch ( Exception e ) { // System . out . println ( " Connection related exception at url : " + imageUrl ) ; // throw e ; } finally { if ( ! success ) { conn . disconnect ( ) ; } } success = false ; try { in = conn . getInputStream ( ) ; success = true ; } catch ( Exception e ) { /* System . out . println ( " Exception when getting the input stream from the connection at url : " + imageUrl ) ; throw e ; */ } finally { if ( ! success ) { in . close ( ) ; } } image = ImageIO . read ( in ) ; } catch ( IllegalArgumentException e ) { // this exception is probably thrown because of a greyscale jpeg image System . out . println ( "Exception: " + e . getMessage ( ) + " | Image: " + imageUrl ) ; image = ImageIOGreyScale . read ( in ) ; // retry with the modified class } catch ( MalformedURLException e ) { System . out . println ( "Malformed url exception. Url: " + imageUrl ) ; // throw e ; } return image ;
public class FunctionTypeBuilder { /** * Infer the return type from JSDocInfo . * @ param fromInlineDoc Indicates whether return type is inferred from inline * doc attached to function name */ FunctionTypeBuilder inferReturnType ( @ Nullable JSDocInfo info , boolean fromInlineDoc ) { } }
if ( info != null ) { JSTypeExpression returnTypeExpr = fromInlineDoc ? info . getType ( ) : info . getReturnType ( ) ; if ( returnTypeExpr != null ) { returnType = returnTypeExpr . evaluate ( templateScope , typeRegistry ) ; returnTypeInferred = false ; } } return this ;
public class GridRowSet { /** * Give the regular grid * @ return ResultSet * @ throws SQLException */ public ResultSet getResultSet ( ) throws SQLException { } }
SimpleResultSet srs = new SimpleResultSet ( this ) ; srs . addColumn ( "THE_GEOM" , Types . JAVA_OBJECT , "GEOMETRY" , 0 , 0 ) ; srs . addColumn ( "ID" , Types . INTEGER , 10 , 0 ) ; srs . addColumn ( "ID_COL" , Types . INTEGER , 10 , 0 ) ; srs . addColumn ( "ID_ROW" , Types . INTEGER , 10 , 0 ) ; return srs ;
public class LinearSolverFactory_DDRM { /** * Linear solver which uses QR pivot decomposition . These solvers can handle singular systems * and should never fail . For singular systems , the solution might not be as accurate as a * pseudo inverse that uses SVD . * For singular systems there are multiple correct solutions . The optimal 2 - norm solution is the * solution vector with the minimal 2 - norm and is unique . If the optimal solution is not computed * then the basic solution is returned . See { @ link org . ejml . dense . row . linsol . qr . BaseLinearSolverQrp _ DDRM } * for details . There is only a runtime difference for small matrices , 2 - norm solution is slower . * Two different solvers are available . Compute Q will compute the Q matrix once then use it multiple times . * If the solution for a single vector is being found then this should be set to false . If the pseudo inverse * is being found or the solution matrix has more than one columns AND solve is being called numerous multiples * times then this should be set to true . * @ param computeNorm2 true to compute the minimum 2 - norm solution for singular systems . Try true . * @ param computeQ Should it precompute Q or use house holder . Try false ; * @ return Pseudo inverse type solver using QR with column pivots . */ public static LinearSolverDense < DMatrixRMaj > leastSquaresQrPivot ( boolean computeNorm2 , boolean computeQ ) { } }
QRColPivDecompositionHouseholderColumn_DDRM decomposition = new QRColPivDecompositionHouseholderColumn_DDRM ( ) ; if ( computeQ ) return new SolvePseudoInverseQrp_DDRM ( decomposition , computeNorm2 ) ; else return new LinearSolverQrpHouseCol_DDRM ( decomposition , computeNorm2 ) ;
public class LoganSquare { /** * Parse a parameterized object from a String . Note : parsing from an InputStream should be preferred over parsing from a String if possible . * @ param jsonString The JSON string being parsed . * @ param jsonObjectType The ParameterizedType describing the object . Ex : LoganSquare . parse ( is , new ParameterizedType & lt ; MyModel & lt ; OtherModel & gt ; & gt ; ( ) { } ) ; */ public static < E > E parse ( String jsonString , ParameterizedType < E > jsonObjectType ) throws IOException { } }
return mapperFor ( jsonObjectType ) . parse ( jsonString ) ;
public class StringParser { /** * Parse the given { @ link String } as short with the specified radix . * @ param sStr * The string to parse . May be < code > null < / code > . * @ param nRadix * The radix to use . Must be & ge ; { @ link Character # MIN _ RADIX } and & le ; * { @ link Character # MAX _ RADIX } . * @ param nDefault * The default value to be returned if the passed object could not be * converted to a valid value . * @ return The default if the string does not represent a valid value . */ public static short parseShort ( @ Nullable final String sStr , @ Nonnegative final int nRadix , final short nDefault ) { } }
if ( sStr != null && sStr . length ( ) > 0 ) try { return Short . parseShort ( sStr , nRadix ) ; } catch ( final NumberFormatException ex ) { // Fall through } return nDefault ;
public class FunctionLepKeyResolver { /** * { @ inheritDoc } */ @ Override protected String [ ] getAppendSegments ( SeparatorSegmentedLepKey baseKey , LepMethod method , LepManagerService managerService ) { } }
// function key String translatedFuncKey = translateToLepConvention ( getRequiredStrParam ( method , PARAM_FUNCTION_KEY ) ) ; return new String [ ] { translatedFuncKey } ;
public class ImageFrame { /** * Displays the specified image in a new ImageFrame instance . * Creates a new ImageFrame , sets the specified image , and schedules a * call to < tt > setVisible ( true ) < / tt > on the AWT event dispatching thread . * @ param img to be displayed * @ return the frame that displays the image . * @ see # display ( Img ) * @ since 1.4 */ public static ImageFrame display ( final Image img ) { } }
ImageFrame frame = new ImageFrame ( ) . useDefaultSettings ( ) ; SwingUtilities . invokeLater ( ( ) -> { frame . setImage ( img ) ; frame . setVisible ( true ) ; } ) ; return frame ;
public class CalendarText { /** * / * [ deutsch ] * < p > Liefert einen { @ code Accessor } f & uuml ; r alle Textformen des * angegebenen chronologischen Elements . < / p > * < p > Textformen k & ouml ; nnen unter Umst & auml ; nden in verschiedenen * Varianten vorkommen . Als Variantenbezug dient bei enum - Varianten * der Name der Enum - Auspr & auml ; gung ( Beispiel & quot ; WIDE & quot ; in * der Variante { @ code TextWidth } ) , im boolean - Fall sind die Literale * & quot ; true & quot ; und & quot ; false & quot ; zu verwenden . < / p > * < p > W & auml ; hrend { @ code getStdMonths ( ) } , { @ code getWeekdays ( ) } etc . in * erster Linie auf JDK - Vorgaben beruhen , dient diese Methode dazu , * chronologiespezifische Texte zu beschaffen , die nicht im JDK enthalten * sind . Textformen werden intern im Verzeichnis & quot ; calendar & quot ; * des Klassenpfads mit Hilfe von properties - Dateien im UTF - 8 - Format * gespeichert . Der Basisname dieser Ressourcen ist der Kalendertyp . Als * Textschluuml ; ssel dient die Kombination aus Elementname , optional * Varianten in der Form & quot ; ( variant1 | variant2 | . . . | variantN ) & quot ; , * dem Unterstrich und schlie & szlig ; lich einem numerischen Suffix mit * Basis 1 ( f & uuml ; r & Auml ; ra - Elemente Basis 0 ) . Wird in den Ressourcen zum * angegebenen Schl & uuml ; ssel kein Eintrag gefunden , liefert diese Methode * einfach den Namen des mit dem Element assoziierten enum - Werts . < / p > * < p > Zum Beispiel versucht die Suche nach der abgek & uuml ; rzten Form der historischen & Auml ; ra * { @ code HistoricEra . AD } in der alternativen Form Schl & uuml ; ssel in dieser Reihenfolge zu finden * ( mit dem Pr & auml ; fix E , falls es einen Eintrag & quot ; useShortKeys = true & quot ; gibt ) : < / p > * < ol > * < li > value of & quot ; E ( a | alt ) _ 1 & quot ; < / li > * < li > value of & quot ; E ( a ) _ 1 & quot ; < / li > * < li > value of & quot ; E _ 1 & quot ; < / li > * < li > < i > fallback = & gt ; AD < / i > < / li > * < / ol > * @ param < V > generic type of element values based on enums * @ param element element text forms are searched for * @ param variants text form variants ( optional ) * @ return accessor for any text forms * @ throws MissingResourceException if for given calendar type there are no text resource files */ public < V extends Enum < V > > TextAccessor getTextForms ( ChronoElement < V > element , String ... variants ) { } }
return this . getTextForms ( element . name ( ) , element . getType ( ) , variants ) ;
public class CATBrowseConsumer { /** * Helper method . Wraps the action of getting the next message for a browser session * and deals with the exceptions which may be thrown . If an exception is thrown the * client is notified ( if appropriate ) and an OperationFailedException is thrown . * This allows the caller to notice something bad has happened , but it need not take * any further action . * @ param browserSession The browser session to get the next message from . * @ param conversation The conversation to notify of any exceptions . * @ param requestNumber The request number to use when performing exception notification . * @ return SIBusMessage The message ( if any ) returned from the browser session next call . * @ throws OperationFailedException Thrown if something bad happens . Before this is * thrown the appropriate client notification of badness will have happened . */ private SIBusMessage getNextMessage ( BrowserSession browserSession , Conversation conversation , short requestNumber ) throws OperationFailedException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getNextMessage" , new Object [ ] { browserSession , conversation , "" + requestNumber } ) ; SIBusMessage msg = null ; try { msg = browserSession . next ( ) ; } catch ( SIException e ) { // No FFDC code needed // Only FFDC if we haven ' t received a meTerminated event . if ( ! ( ( ConversationState ) conversation . getAttachment ( ) ) . hasMETerminated ( ) ) { FFDCFilter . processException ( e , CLASS_NAME + ".getNextMessage" , CommsConstants . CATBROWSECONSUMER_GETNEXTMESSAGE_01 , this ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , e . getMessage ( ) , e ) ; StaticCATHelper . sendExceptionToClient ( e , CommsConstants . CATBROWSECONSUMER_GETNEXTMESSAGE_01 , conversation , requestNumber ) ; throw new OperationFailedException ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getNextMessage" , msg ) ; return msg ;
public class AccessControlEntryImpl { /** * Adds specified privileges to this entry . * @ param privileges privileges to add . * @ return true if at least one of privileges was added . */ protected boolean addIfNotPresent ( Privilege [ ] privileges ) { } }
ArrayList < Privilege > list = new ArrayList < Privilege > ( ) ; Collections . addAll ( list , privileges ) ; boolean res = combineRecursively ( list , privileges ) ; this . privileges . addAll ( list ) ; return res ;
public class MapsInner { /** * Gets an integration account map . * @ param resourceGroupName The resource group name . * @ param integrationAccountName The integration account name . * @ param mapName The integration account map name . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < IntegrationAccountMapInner > getAsync ( String resourceGroupName , String integrationAccountName , String mapName , final ServiceCallback < IntegrationAccountMapInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getWithServiceResponseAsync ( resourceGroupName , integrationAccountName , mapName ) , serviceCallback ) ;
public class AbstractPendingLinkingCandidate { /** * Returns the resolved string representation of the argument types . The simple names of * the types are used . The string representation includes the parenthesis . */ protected String getArgumentTypesAsString ( ) { } }
if ( ! getArguments ( ) . isEmpty ( ) ) { StringBuilder b = new StringBuilder ( ) ; b . append ( "(" ) ; for ( int i = 0 ; i < getArguments ( ) . size ( ) ; ++ i ) { LightweightTypeReference actualType = getActualType ( getArguments ( ) . get ( i ) ) ; if ( actualType != null ) b . append ( actualType . getHumanReadableName ( ) ) ; else b . append ( "null" ) ; if ( i < getArguments ( ) . size ( ) - 1 ) b . append ( "," ) ; } b . append ( ")" ) ; return b . toString ( ) ; } return "" ;
public class ThrottledApiHandler { /** * Retrieve a specific summoner spell * This method does not count towards the rate limit and is not affected by the throttle * @ param id The id of the spell * @ param data Additional information to retrieve * @ return The spell * @ see < a href = https : / / developer . riotgames . com / api / methods # ! / 649/2167 > Official API documentation < / a > */ public Future < SummonerSpell > getSummonerSpell ( int id , SpellData data ) { } }
return new DummyFuture < > ( handler . getSummonerSpell ( id , data ) ) ;
public class BaseHolder { /** * Find the key for the baseholder that holds this session . */ public String find ( RemoteBaseSession obj ) { } }
if ( m_mapChildHolders == null ) return null ; return m_mapChildHolders . find ( obj ) ;
public class RouteFilterRulesInner { /** * Updates a route in the specified route filter . * @ param resourceGroupName The name of the resource group . * @ param routeFilterName The name of the route filter . * @ param ruleName The name of the route filter rule . * @ param routeFilterRuleParameters Parameters supplied to the update route filter rule operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the RouteFilterRuleInner object if successful . */ public RouteFilterRuleInner update ( String resourceGroupName , String routeFilterName , String ruleName , PatchRouteFilterRule routeFilterRuleParameters ) { } }
return updateWithServiceResponseAsync ( resourceGroupName , routeFilterName , ruleName , routeFilterRuleParameters ) . toBlocking ( ) . last ( ) . body ( ) ;
public class ChronoLocalDateTimeImpl { /** * Returns a copy of this date - time with the new date and time , checking * to see if a new object is in fact required . * @ param newDate the date of the new date - time , not null * @ param newTime the time of the new date - time , not null * @ return the date - time , not null */ private ChronoLocalDateTimeImpl < D > with ( Temporal newDate , LocalTime newTime ) { } }
if ( date == newDate && time == newTime ) { return this ; } // Validate that the new Temporal is a ChronoLocalDate ( and not something else ) D cd = ChronoLocalDateImpl . ensureValid ( date . getChronology ( ) , newDate ) ; return new ChronoLocalDateTimeImpl < > ( cd , newTime ) ;
public class SarlFormalParameterImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case SarlPackage . SARL_FORMAL_PARAMETER__DEFAULT_VALUE : setDefaultValue ( ( XExpression ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class RollupUtils { /** * Returns the offset in milliseconds from the row base timestamp from a data * point qualifier at the given offset ( for compacted columns ) * @ param qualifier The qualifier to parse * @ param byte _ offset An offset within the byte array * @ param interval The RollupInterval object with data about the interval * @ return The offset in milliseconds from the base time */ public static long getOffsetFromRollupQualifier ( final byte [ ] qualifier , final int byte_offset , final RollupInterval interval ) { } }
long offset = 0 ; if ( ( qualifier [ byte_offset ] & Const . MS_BYTE_FLAG ) == Const . MS_BYTE_FLAG ) { offset = ( ( Bytes . getUnsignedInt ( qualifier , byte_offset ) & 0x0FFFFFC0 ) >>> Const . MS_FLAG_BITS ) / 1000 ; } else { offset = ( Bytes . getUnsignedShort ( qualifier , byte_offset ) & 0xFFFF ) >>> Const . FLAG_BITS ; } return offset * interval . getIntervalSeconds ( ) * 1000 ;
public class LongIntVectorSlice { /** * Gets the idx ' th entry in the vector . * @ param idx The index of the element to get . * @ return The value of the element to get . */ public int get ( long idx ) { } }
if ( idx < 0 || idx >= size ) { return 0 ; } return elements [ SafeCast . safeLongToInt ( idx + start ) ] ;
public class Span { /** * Adds a MessageEvent to the { @ code Span } . * < p > This function can be used by higher level applications to record messaging event . * < p > This method should always be overridden by users whose API versions are larger or equal to * { @ code 0.12 } . * @ param messageEvent the message to add . * @ since 0.12 */ public void addMessageEvent ( MessageEvent messageEvent ) { } }
// Default implementation by invoking addNetworkEvent ( ) so that any existing derived classes , // including implementation and the mocked ones , do not need to override this method explicitly . Utils . checkNotNull ( messageEvent , "messageEvent" ) ; addNetworkEvent ( BaseMessageEventUtils . asNetworkEvent ( messageEvent ) ) ;
public class RaftImpl { /** * Finds the first index at which conflicting _ term starts , going back from start _ index towards the head of the log */ protected int getFirstIndexOfConflictingTerm ( int start_index , int conflicting_term ) { } }
Log log = raft . log_impl ; int first = Math . max ( 1 , log . firstAppended ( ) ) , last = log . lastAppended ( ) ; int retval = Math . min ( start_index , last ) ; for ( int i = retval ; i >= first ; i -- ) { LogEntry entry = log . get ( i ) ; if ( entry == null || entry . term != conflicting_term ) break ; retval = i ; } return retval ;
public class ZooClassDef { /** * Methods used for bootstrapping the schema of newly created databases . * @ return Meta schema instance */ public static ZooClassDef bootstrapZooClassDef ( ) { } }
ZooClassDef meta = new ZooClassDef ( ZooClassDef . class . getName ( ) , 51 , 50 , 51 , 0 ) ; ArrayList < ZooFieldDef > fields = new ArrayList < ZooFieldDef > ( ) ; fields . add ( new ZooFieldDef ( meta , "className" , String . class . getName ( ) , 0 , JdoType . STRING , 70 ) ) ; fields . add ( new ZooFieldDef ( meta , "oidSuper" , long . class . getName ( ) , 0 , JdoType . PRIMITIVE , 71 ) ) ; fields . add ( new ZooFieldDef ( meta , "schemaId" , long . class . getName ( ) , 0 , JdoType . PRIMITIVE , 72 ) ) ; fields . add ( new ZooFieldDef ( meta , "versionId" , short . class . getName ( ) , 0 , JdoType . PRIMITIVE , 73 ) ) ; fields . add ( new ZooFieldDef ( meta , "localFields" , ArrayList . class . getName ( ) , 0 , JdoType . SCO , 74 ) ) ; fields . add ( new ZooFieldDef ( meta , "prevVersionOid" , long . class . getName ( ) , 0 , JdoType . PRIMITIVE , 75 ) ) ; fields . add ( new ZooFieldDef ( meta , "evolutionOperations" , ArrayList . class . getName ( ) , 0 , JdoType . SCO , 76 ) ) ; // new ZooFieldDef ( this , allFields , ZooFieldDef [ ] . class . getName ( ) , typeOid , JdoType . ARRAY ) ; meta . registerFields ( fields ) ; meta . cls = ZooClassDef . class ; meta . className = ZooClassDef . class . getName ( ) ; meta . associateFields ( ) ; meta . associateJavaTypes ( ) ; return meta ;
public class Ledgers { /** * Fences out a Log made up of the given ledgers . * @ param ledgers An ordered list of LedgerMetadata objects representing all the Ledgers in the log . * @ param bookKeeper A reference to the BookKeeper client to use . * @ param config Configuration to use . * @ param traceObjectId Used for logging . * @ return A Map of LedgerId to LastAddConfirmed for those Ledgers that were fenced out and had a different * LastAddConfirmed than what their LedgerMetadata was indicating . * @ throws DurableDataLogException If an exception occurred . The causing exception is wrapped inside it . */ static Map < Long , Long > fenceOut ( List < LedgerMetadata > ledgers , BookKeeper bookKeeper , BookKeeperConfig config , String traceObjectId ) throws DurableDataLogException { } }
// Fence out the ledgers , in descending order . During the process , we need to determine whether the ledgers we // fenced out actually have any data in them , and update the LedgerMetadata accordingly . // We need to fence out at least MIN _ FENCE _ LEDGER _ COUNT ledgers that are not empty to properly ensure we fenced // the log correctly and identify any empty ledgers ( Since this algorithm is executed upon every recovery , any // empty ledgers should be towards the end of the Log ) . int nonEmptyCount = 0 ; val result = new HashMap < Long , Long > ( ) ; val iterator = ledgers . listIterator ( ledgers . size ( ) ) ; while ( iterator . hasPrevious ( ) && ( nonEmptyCount < MIN_FENCE_LEDGER_COUNT ) ) { LedgerMetadata ledgerMetadata = iterator . previous ( ) ; LedgerHandle handle = openFence ( ledgerMetadata . getLedgerId ( ) , bookKeeper , config ) ; if ( handle . getLastAddConfirmed ( ) != NO_ENTRY_ID ) { // Non - empty . nonEmptyCount ++ ; } if ( ledgerMetadata . getStatus ( ) == LedgerMetadata . Status . Unknown ) { // We did not know the status of this Ledger before , but now we do . result . put ( ledgerMetadata . getLedgerId ( ) , handle . getLastAddConfirmed ( ) ) ; } close ( handle ) ; log . info ( "{}: Fenced out Ledger {}." , traceObjectId , ledgerMetadata ) ; } return result ;
public class PTXImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . PTX__CS : getCS ( ) . clear ( ) ; return ; } super . eUnset ( featureID ) ;
public class CommerceShipmentUtil { /** * Returns the commerce shipments before and after the current commerce shipment in the ordered set where groupId = & # 63 ; . * @ param commerceShipmentId the primary key of the current commerce shipment * @ param groupId the group ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next commerce shipment * @ throws NoSuchShipmentException if a commerce shipment with the primary key could not be found */ public static CommerceShipment [ ] findByGroupId_PrevAndNext ( long commerceShipmentId , long groupId , OrderByComparator < CommerceShipment > orderByComparator ) throws com . liferay . commerce . exception . NoSuchShipmentException { } }
return getPersistence ( ) . findByGroupId_PrevAndNext ( commerceShipmentId , groupId , orderByComparator ) ;
public class HostName { /** * Returns whether the given host matches this one . For hosts to match , they must represent the same addresses or have the same host names . * Hosts are not resolved when matching . Also , hosts must have the same port and service . They must have the same masks if they are host names . * Even if two hosts are invalid , they match if they have the same invalid string . * @ param host * @ return */ public boolean matches ( HostName host ) { } }
if ( this == host ) { return true ; } if ( isValid ( ) ) { if ( host . isValid ( ) ) { if ( isAddressString ( ) ) { return host . isAddressString ( ) && asAddressString ( ) . equals ( host . asAddressString ( ) ) && Objects . equals ( getPort ( ) , host . getPort ( ) ) && Objects . equals ( getService ( ) , host . getService ( ) ) ; } if ( host . isAddressString ( ) ) { return false ; } String thisHost = parsedHost . getHost ( ) ; String otherHost = host . parsedHost . getHost ( ) ; if ( ! thisHost . equals ( otherHost ) ) { return false ; } return Objects . equals ( parsedHost . getEquivalentPrefixLength ( ) , host . parsedHost . getEquivalentPrefixLength ( ) ) && Objects . equals ( parsedHost . getMask ( ) , host . parsedHost . getMask ( ) ) && Objects . equals ( parsedHost . getPort ( ) , host . parsedHost . getPort ( ) ) && Objects . equals ( parsedHost . getService ( ) , host . parsedHost . getService ( ) ) ; } return false ; } return ! host . isValid ( ) && toString ( ) . equals ( host . toString ( ) ) ;
public class EnumElement { /** * Though not mentioned in the spec , enum names use C + + scoping rules , meaning that enum constants * are siblings of their declaring element , not children of it . */ static void validateValueUniquenessInScope ( String qualifiedName , List < TypeElement > nestedElements ) { } }
Set < String > names = new LinkedHashSet < > ( ) ; for ( TypeElement nestedElement : nestedElements ) { if ( nestedElement instanceof EnumElement ) { EnumElement enumElement = ( EnumElement ) nestedElement ; for ( EnumConstantElement constant : enumElement . constants ( ) ) { String name = constant . name ( ) ; if ( ! names . add ( name ) ) { throw new IllegalStateException ( "Duplicate enum constant " + name + " in scope " + qualifiedName ) ; } } } }
public class SimpleSectionSkin { /** * * * * * * Methods * * * * * */ @ Override protected void handleEvents ( final String EVENT_TYPE ) { } }
super . handleEvents ( EVENT_TYPE ) ; if ( "VISIBILITY" . equals ( EVENT_TYPE ) ) { Helper . enableNode ( valueText , gauge . isValueVisible ( ) ) ; Helper . enableNode ( titleText , ! gauge . getTitle ( ) . isEmpty ( ) ) ; Helper . enableNode ( unitText , gauge . isValueVisible ( ) && ! gauge . getUnit ( ) . isEmpty ( ) ) ; } else if ( "SECTION" . equals ( EVENT_TYPE ) ) { sections = gauge . getSections ( ) ; resize ( ) ; } else if ( "RECALC" . equals ( EVENT_TYPE ) ) { redraw ( ) ; setBar ( gauge . getCurrentValue ( ) ) ; }
public class GlobalizationPreferences { /** * Restore the object to the initial state . * @ return this , for chaining * @ hide draft / provisional / internal are hidden on Android */ public GlobalizationPreferences reset ( ) { } }
if ( isFrozen ( ) ) { throw new UnsupportedOperationException ( "Attempt to modify immutable object" ) ; } locales = null ; territory = null ; calendar = null ; collator = null ; breakIterators = null ; timezone = null ; currency = null ; dateFormats = null ; numberFormats = null ; implicitLocales = null ; return this ;
public class StrategyRunner { /** * Instantiates an strategy , according to the provided properties mapping . * @ param properties the properties to be used . * @ param trainingModel datamodel containing the training interactions to be * considered when generating the strategy . * @ param testModel datamodel containing the interactions in the test split * to be considered when generating the strategy . * @ return the strategy generated according to the provided properties * @ throws ClassNotFoundException when { @ link Class # forName ( java . lang . String ) } * fails * @ throws IllegalAccessException when { @ link java . lang . reflect . Constructor # newInstance ( java . lang . Object [ ] ) } * fails * @ throws InstantiationException when { @ link java . lang . reflect . Constructor # newInstance ( java . lang . Object [ ] ) } * fails * @ throws InvocationTargetException when { @ link java . lang . reflect . Constructor # newInstance ( java . lang . Object [ ] ) } * fails * @ throws NoSuchMethodException when { @ link Class # getConstructor ( java . lang . Class [ ] ) } * fails */ public static EvaluationStrategy < Long , Long > instantiateStrategy ( final Properties properties , final DataModelIF < Long , Long > trainingModel , final DataModelIF < Long , Long > testModel ) throws ClassNotFoundException , IllegalAccessException , InstantiationException , InvocationTargetException , NoSuchMethodException { } }
Double threshold = Double . parseDouble ( properties . getProperty ( RELEVANCE_THRESHOLD ) ) ; String strategyClassName = properties . getProperty ( STRATEGY ) ; Class < ? > strategyClass = Class . forName ( strategyClassName ) ; // get strategy EvaluationStrategy < Long , Long > strategy = null ; if ( strategyClassName . contains ( "RelPlusN" ) ) { Integer number = Integer . parseInt ( properties . getProperty ( RELPLUSN_N ) ) ; Long seed = Long . parseLong ( properties . getProperty ( RELPLUSN_SEED ) ) ; strategy = new RelPlusN ( trainingModel , testModel , number , threshold , seed ) ; } else { Object strategyObj = strategyClass . getConstructor ( DataModelIF . class , DataModelIF . class , double . class ) . newInstance ( trainingModel , testModel , threshold ) ; if ( strategyObj instanceof EvaluationStrategy ) { @ SuppressWarnings ( "unchecked" ) EvaluationStrategy < Long , Long > strategyTemp = ( EvaluationStrategy < Long , Long > ) strategyObj ; strategy = strategyTemp ; } } return strategy ;
public class N { /** * Convert the specified Map to a two columns < code > DataSet < / code > : one column is for keys and one column is for values * @ param keyColumnName * @ param valueColumnName * @ param m * @ return */ public static DataSet newDataSet ( final String keyColumnName , final String valueColumnName , final Map < ? , ? > m ) { } }
final List < Object > keyColumn = new ArrayList < > ( m . size ( ) ) ; final List < Object > valueColumn = new ArrayList < > ( m . size ( ) ) ; for ( Map . Entry < ? , ? > entry : m . entrySet ( ) ) { keyColumn . add ( entry . getKey ( ) ) ; valueColumn . add ( entry . getValue ( ) ) ; } final List < String > columnNameList = N . asList ( keyColumnName , valueColumnName ) ; final List < List < Object > > columnList = N . asList ( keyColumn , valueColumn ) ; return newDataSet ( columnNameList , columnList ) ;
public class TextUtil { /** * Compute the better metric representing * the given time amount and reply a string representation * of the given amount with this selected unit . * < p > This function try to use a greater metric unit . * @ param amount is the amount expressed in the given unit . * @ param unit is the unit of the given amount . * @ return a string representation of the given amount . */ @ Pure @ SuppressWarnings ( { } }
"checkstyle:magicnumber" , "checkstyle:cyclomaticcomplexity" , "checkstyle:npathcomplexity" } ) public static String formatTime ( double amount , TimeUnit unit ) { double amt ; double coef = 1. ; switch ( unit ) { case DAYS : coef = 86400. ; break ; case HOURS : coef = 3600. ; break ; case MINUTES : coef = 60. ; break ; case SECONDS : break ; case MILLISECONDS : coef = 1e-3 ; break ; case MICROSECONDS : coef = 1e-6 ; break ; case NANOSECONDS : coef = 1e-9 ; break ; default : throw new IllegalArgumentException ( ) ; } // amount is in seconds amt = amount * coef ; final StringBuilder text = new StringBuilder ( ) ; String centuries = "" ; // $ NON - NLS - 1 $ String years = "" ; // $ NON - NLS - 1 $ String days = "" ; // $ NON - NLS - 1 $ String hours = "" ; // $ NON - NLS - 1 $ String minutes = "" ; // $ NON - NLS - 1 $ String seconds = "" ; // $ NON - NLS - 1 $ long ah = 0 ; long am = 0 ; long as = 0 ; int idx = 0 ; if ( amt >= 3153600000. ) { final long a = ( long ) Math . floor ( amt / 3153600000. ) ; centuries = Locale . getString ( ( a >= 2 ) ? "TIME_FORMAT_Cs" : "TIME_FORMAT_C" , // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ Long . toString ( a ) ) ; amt -= a * 3153600000. ; text . append ( centuries ) ; idx |= 32 ; } if ( amt >= 31536000. ) { final long a = ( long ) Math . floor ( amt / 31536000. ) ; years = Locale . getString ( ( a >= 2 ) ? "TIME_FORMAT_Ys" : "TIME_FORMAT_Y" , // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ Long . toString ( a ) ) ; amt -= a * 31536000. ; if ( text . length ( ) > 0 ) { text . append ( ' ' ) ; } text . append ( years ) ; idx |= 16 ; } if ( amt >= 86400. ) { final long a = ( long ) Math . floor ( amt / 86400. ) ; days = Locale . getString ( ( a >= 2 ) ? "TIME_FORMAT_Ds" : "TIME_FORMAT_D" , // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ Long . toString ( a ) ) ; amt -= a * 86400. ; if ( text . length ( ) > 0 ) { text . append ( ' ' ) ; } text . append ( days ) ; idx |= 8 ; } if ( amt >= 3600. ) { ah = ( long ) Math . floor ( amt / 3600. ) ; hours = Long . toString ( ah ) ; if ( ah < 10. ) { hours = "0" + hours ; // $ NON - NLS - 1 $ } amt -= ah * 3600. ; idx |= 4 ; } if ( amt >= 60. ) { am = ( long ) Math . floor ( amt / 60. ) ; minutes = Long . toString ( am ) ; if ( am < 10. ) { minutes = "0" + minutes ; // $ NON - NLS - 1 $ } amt -= am * 60. ; idx |= 2 ; } if ( amt >= 0. || idx == 0 ) { if ( idx >= 8 ) { as = ( long ) Math . floor ( amt ) ; seconds = Long . toString ( as ) ; } else { final NumberFormat fmt = new DecimalFormat ( "#0.000" ) ; // $ NON - NLS - 1 $ seconds = fmt . format ( amt ) ; } idx |= 1 ; } if ( ( idx & 7 ) == 7 ) { if ( text . length ( ) > 0 ) { text . append ( ' ' ) ; } if ( idx >= 8 && as > 0 ) { if ( as < 10. ) { seconds = "0" + seconds ; // $ NON - NLS - 1 $ } } else if ( idx < 8 && amt > 0. && amt < 10. ) { seconds = "0" + seconds ; // $ NON - NLS - 1 $ } text . append ( Locale . getString ( "TIME_FORMAT_HMS" , hours , minutes , seconds ) ) ; // $ NON - NLS - 1 $ } else { if ( ah > 0 ) { if ( text . length ( ) > 0 ) { text . append ( ' ' ) ; } text . append ( Locale . getString ( ( ah >= 2 ) ? "TIME_FORMAT_Hs" : "TIME_FORMAT_H" , // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ hours ) ) ; } if ( am > 0 ) { if ( text . length ( ) > 0 ) { text . append ( ' ' ) ; } text . append ( Locale . getString ( ( am >= 2 ) ? "TIME_FORMAT_Ms" : "TIME_FORMAT_M" , // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ minutes ) ) ; } if ( idx >= 8 && as > 0 ) { if ( text . length ( ) > 0 ) { text . append ( ' ' ) ; } text . append ( Locale . getString ( ( as >= 2 ) ? "TIME_FORMAT_Ss" : "TIME_FORMAT_S" , // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ seconds ) ) ; } else if ( idx < 8 && amt > 0. ) { if ( text . length ( ) > 0 ) { text . append ( ' ' ) ; } text . append ( Locale . getString ( ( amt >= 2. ) ? "TIME_FORMAT_Ss" : "TIME_FORMAT_S" , // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ seconds ) ) ; } } return text . toString ( ) ;
public class EvaluateSilhouette { /** * Evaluate a single clustering . * @ param db Database * @ param rel Data relation * @ param dq Distance query * @ param c Clustering * @ return Average silhouette */ public double evaluateClustering ( Database db , Relation < O > rel , DistanceQuery < O > dq , Clustering < ? > c ) { } }
List < ? extends Cluster < ? > > clusters = c . getAllClusters ( ) ; MeanVariance msil = new MeanVariance ( ) ; int ignorednoise = 0 ; for ( Cluster < ? > cluster : clusters ) { // Note : we treat 1 - element clusters the same as noise . if ( cluster . size ( ) <= 1 || cluster . isNoise ( ) ) { switch ( noiseOption ) { case IGNORE_NOISE : ignorednoise += cluster . size ( ) ; continue ; // Ignore noise elements case TREAT_NOISE_AS_SINGLETONS : // As suggested in Rousseeuw , we use 0 for singletons . msil . put ( 0. , cluster . size ( ) ) ; continue ; case MERGE_NOISE : break ; // Treat as cluster below } } ArrayDBIDs ids = DBIDUtil . ensureArray ( cluster . getIDs ( ) ) ; double [ ] as = new double [ ids . size ( ) ] ; // temporary storage . DBIDArrayIter it1 = ids . iter ( ) , it2 = ids . iter ( ) ; for ( it1 . seek ( 0 ) ; it1 . valid ( ) ; it1 . advance ( ) ) { // a : In - cluster distances double a = as [ it1 . getOffset ( ) ] ; // Already computed distances for ( it2 . seek ( it1 . getOffset ( ) + 1 ) ; it2 . valid ( ) ; it2 . advance ( ) ) { final double dist = dq . distance ( it1 , it2 ) ; a += dist ; as [ it2 . getOffset ( ) ] += dist ; } a /= ( ids . size ( ) - 1 ) ; // b : minimum average distance to other clusters : double b = Double . POSITIVE_INFINITY ; for ( Cluster < ? > ocluster : clusters ) { if ( ocluster == /* yes , reference identity */ cluster ) { continue ; // Same cluster } if ( ocluster . size ( ) <= 1 || ocluster . isNoise ( ) ) { switch ( noiseOption ) { case IGNORE_NOISE : continue ; // Ignore noise elements case TREAT_NOISE_AS_SINGLETONS : // Treat noise cluster as singletons : for ( DBIDIter it3 = ocluster . getIDs ( ) . iter ( ) ; it3 . valid ( ) ; it3 . advance ( ) ) { final double dist = dq . distance ( it1 , it3 ) ; b = dist < b ? dist : b ; // Minimum average } continue ; case MERGE_NOISE : break ; // Treat as cluster below } } final DBIDs oids = ocluster . getIDs ( ) ; double btmp = 0. ; for ( DBIDIter it3 = oids . iter ( ) ; it3 . valid ( ) ; it3 . advance ( ) ) { btmp += dq . distance ( it1 , it3 ) ; } btmp /= oids . size ( ) ; // Average b = btmp < b ? btmp : b ; // Minimum average } // One cluster only ? b = b < Double . POSITIVE_INFINITY ? b : a ; msil . put ( ( b - a ) / ( b > a ? b : a ) ) ; } } double penalty = 1. ; // Only if { @ link NoiseHandling # IGNORE _ NOISE } : if ( penalize && ignorednoise > 0 ) { penalty = ( rel . size ( ) - ignorednoise ) / ( double ) rel . size ( ) ; } final double meansil = penalty * msil . getMean ( ) ; final double stdsil = penalty * msil . getSampleStddev ( ) ; if ( LOG . isStatistics ( ) ) { LOG . statistics ( new StringStatistic ( key + ".silhouette.noise-handling" , noiseOption . toString ( ) ) ) ; if ( ignorednoise > 0 ) { LOG . statistics ( new LongStatistic ( key + ".silhouette.noise" , ignorednoise ) ) ; } LOG . statistics ( new DoubleStatistic ( key + ".silhouette.mean" , meansil ) ) ; LOG . statistics ( new DoubleStatistic ( key + ".silhouette.stddev" , stdsil ) ) ; } EvaluationResult ev = EvaluationResult . findOrCreate ( db . getHierarchy ( ) , c , "Internal Clustering Evaluation" , "internal evaluation" ) ; MeasurementGroup g = ev . findOrCreateGroup ( "Distance-based Evaluation" ) ; g . addMeasure ( "Silhouette +-" + FormatUtil . NF2 . format ( stdsil ) , meansil , - 1. , 1. , 0. , false ) ; db . getHierarchy ( ) . resultChanged ( ev ) ; return meansil ;
public class RestService { /** * Override if entity has a meaningful ID . */ protected Long getEntityId ( String path , Object content , Map < String , String > headers ) { } }
return 0L ;
public class KeyValueSource { /** * A helper method to build a KeyValueSource implementation based on the specified { @ link IList } . < br / > * The key returned by this KeyValueSource implementation is < b > ALWAYS < / b > the name of the list itself , * whereas the value are the entries of the list , one by one . This implementation behaves like a MultiMap * with a single key but multiple values . * @ param list list to build a KeyValueSource implementation * @ param < V > value type of the list * @ return KeyValueSource implementation based on the specified list */ public static < V > KeyValueSource < String , V > fromList ( IList < ? extends V > list ) { } }
return new ListKeyValueSource < V > ( list . getName ( ) ) ;
public class GVRCompressedTextureLoader { /** * Register a loader with the ' sniffer ' . * ' Factory loaders ' are pre - registered . To load a format we don ' t support , * create a { @ link GVRCompressedTextureLoader } descendant . Then , before * trying to load any files in that format , create an instance and call * { @ link # register ( ) } : * < pre > * new MyCompressedFormat ( ) . register ( ) ; * < / pre > */ public void register ( ) { } }
synchronized ( loaders ) { loaders . add ( this ) ; maximumHeaderLength = 0 ; for ( GVRCompressedTextureLoader loader : loaders ) { int headerLength = loader . headerLength ( ) ; if ( headerLength > maximumHeaderLength ) { maximumHeaderLength = headerLength ; } } }
public class DFSEvaluatorPreserver { /** * Closes the readerWriter , which in turn closes the FileSystem . * @ throws Exception */ @ Override public synchronized void close ( ) throws Exception { } }
if ( this . readerWriter != null && ! this . writerClosed ) { this . readerWriter . close ( ) ; this . writerClosed = true ; }
public class JPAWSJarURLConnection { /** * Passthrough operations for archive referencing wsjar URL support . Synchronized because calling getInputStream ( ) * while an InputStream is still active should return the active InputStream . */ @ Override public synchronized InputStream getInputStream ( ) throws IOException { } }
if ( connected == false ) { // Implicitly open the connection if it has not yet been done so . connect ( ) ; } Object token = ThreadIdentityManager . runAsServer ( ) ; try { if ( inputStream == null ) { if ( "" . equals ( archivePath ) ) { inputStream = new FileInputStream ( urlTargetFile ) ; } else { inputStream = new FilterZipFileInputStream ( urlTargetFile , archivePath ) ; } } } finally { ThreadIdentityManager . reset ( token ) ; } return inputStream ;
public class DeleteValues { /** * { @ inheritDoc } */ public void twoPhaseCommit ( ) throws IOException { } }
if ( locks != null ) try { if ( bckFiles == null ) return ; for ( int i = 0 , length = bckFiles . length ; i < length ; i ++ ) { File f = bckFiles [ i ] ; if ( f != null && ! f . delete ( ) ) // Possible place of error : FileNotFoundException when we delete / update existing // Value and then add / update again . // After the time the Cleaner will delete the file which is mapped to the Value . // Don ' t use cleaner ! Care about transaction - style files isolation per - user etc . cleaner . addFile ( f ) ; } } finally { for ( int i = 0 , length = locks . length ; i < length ; i ++ ) { ValueFileLock fl = locks [ i ] ; fl . unlock ( ) ; } }
public class StringUtils { /** * Joins the given { @ code list } into a comma - separated string . * @ param list * The list to join . * @ return A comma - separated string representation of the given { @ code list } . */ public static String join ( List < String > list ) { } }
if ( list == null ) { return null ; } StringBuilder joined = new StringBuilder ( ) ; boolean first = true ; for ( String element : list ) { if ( first ) { first = false ; } else { joined . append ( "," ) ; } joined . append ( element ) ; } return joined . toString ( ) ;
public class KeystoreManager { /** * Given an object representing a keystore , returns an actual stream for that keystore . * Allows you to provide an actual keystore as an InputStream or a byte [ ] array , * or a reference to a keystore file as a File object or a String path . * @ param keystore a keystore containing your private key and the certificate signed by Apple ( File , InputStream , byte [ ] , KeyStore or String for a file path ) * @ return A stream to the keystore . * @ throws FileNotFoundException */ static InputStream streamKeystore ( Object keystore ) throws InvalidKeystoreReferenceException { } }
validateKeystoreParameter ( keystore ) ; try { if ( keystore instanceof InputStream ) return ( InputStream ) keystore ; else if ( keystore instanceof KeyStore ) return new WrappedKeystore ( ( KeyStore ) keystore ) ; else if ( keystore instanceof File ) return new BufferedInputStream ( new FileInputStream ( ( File ) keystore ) ) ; else if ( keystore instanceof String ) return new BufferedInputStream ( new FileInputStream ( ( String ) keystore ) ) ; else if ( keystore instanceof byte [ ] ) return new ByteArrayInputStream ( ( byte [ ] ) keystore ) ; else return null ; // we should not get here since validateKeystore ensures that the reference is valid } catch ( Exception e ) { throw new InvalidKeystoreReferenceException ( "Invalid keystore reference: " + e . getMessage ( ) ) ; }
public class AbsDiff { /** * Move to next following node . * @ param paramRtx * the { @ link IReadTransaction } to use * @ param paramRevision * the { @ link ERevision } constant * @ return true , if cursor moved , false otherwise * @ throws TTIOException */ private boolean moveToFollowingNode ( final INodeReadTrx paramRtx , final ERevision paramRevision ) throws TTIOException { } }
boolean moved = false ; while ( ! ( ( ITreeStructData ) paramRtx . getNode ( ) ) . hasRightSibling ( ) && ( ( ITreeStructData ) paramRtx . getNode ( ) ) . hasParent ( ) && paramRtx . getNode ( ) . getDataKey ( ) != mRootKey ) { moved = paramRtx . moveTo ( paramRtx . getNode ( ) . getParentKey ( ) ) ; if ( moved ) { switch ( paramRevision ) { case NEW : mDepth . decrementNewDepth ( ) ; break ; case OLD : mDepth . decrementOldDepth ( ) ; break ; } } } if ( paramRtx . getNode ( ) . getDataKey ( ) == mRootKey ) { paramRtx . moveTo ( ROOT_NODE ) ; } moved = paramRtx . moveTo ( ( ( ITreeStructData ) paramRtx . getNode ( ) ) . getRightSiblingKey ( ) ) ; return moved ;
public class SourceStreamSetControl { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . SIMPDeliveryStreamSetTransmitControllable # clearMessagesAtSource ( byte ) * Function Name : clearMessagesAtSource * Parameters : indoubtAction determines how indoubt messages are handled . * INDOUBT _ EXCEPTION causes indoubt messages to be sent to the exception destination , * this gives the possibility that the messages are duplicated . * INDOUBT _ REALLOCATE causes the messages to be reallocated , * possbly to the exception destination , and possibly duplicated . * INDOUBT _ DELETE causes indoubt messages to be discarded , risking their loss . * INDOUBT _ LEAVE means no action is taken for indoubt messages , so that * the target system must recover for progress to be made . * Description : If the target system has been deleted this routine will reallocate the * not indoubt messages and then deal with the indoubts according to the indoubtAction . * It will then send a Flushed message to the target and remove the sourcestream * Throws : invalidStreamTypeException if the stream is not a source stream . */ public void clearMessagesAtSource ( IndoubtAction indoubtAction ) throws SIMPControllableNotFoundException , SIMPRuntimeOperationFailedException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "clearMessagesAtSource" ) ; // This method used to be exposed by the MBeans , // but this is no longer the case . Instead , the exposed methods will be delete / moveAll // which will implicitly use this path assertValidControllable ( ) ; synchronized ( _streamSet ) { boolean foundUncommittedMsgs = false ; boolean reallocateAllMsgs = false ; // Start by reallocating all messages which have not yet been sent // If inDoubtAction = = INDOUBT _ REALLOCATE then reallocate all messages // including those which have already been sent if ( indoubtAction == IndoubtAction . INDOUBT_REALLOCATE ) { reallocateAllMsgs = true ; // This will be PtoP Only try { _sourceStreamManager . reallocate ( reallocateAllMsgs ) ; } catch ( SIException e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.runtime.SourceStreamSetControl.clearMessagesAtSource" , "1:424:1.39" , this ) ; SIMPRuntimeOperationFailedException finalE = new SIMPRuntimeOperationFailedException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0003" , new Object [ ] { "SourceStreamSetControl.clearMessagesAtSource" , "1:432:1.39" , e } , null ) , e ) ; SibTr . exception ( tc , finalE ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "clearMessagesAtSource" , finalE ) ; throw finalE ; } } // Next delete all indoubt messages from the Message store // Unless we have been told to leave them there // Since any reallocated messages have been replace by Silence // in the streams we can assume we want to work on all messages which // remain in the stream if ( indoubtAction != IndoubtAction . INDOUBT_LEAVE ) { boolean discard = false ; if ( indoubtAction == IndoubtAction . INDOUBT_DELETE ) discard = true ; // This is an iterator over all messages in all the source streams // in this streamSet . There may be Uncommitted messages in this list // but getSIMPMessage will return null for these as they don ' t have a // valid ItemStream id so we won ' t try to delete them Iterator itr = this . getTransmitMessagesIterator ( SIMPConstants . SIMPCONTROL_RETURN_ALL_MESSAGES ) ; TransmitMessage xmitMsg = null ; String state = null ; while ( itr . hasNext ( ) ) { xmitMsg = ( TransmitMessage ) itr . next ( ) ; state = xmitMsg . getState ( ) ; if ( state . equals ( State . COMMITTING . toString ( ) ) ) { // Ignore committing messages . Theres nothing we can do with them . foundUncommittedMsgs = true ; } else { try { // Try to delete it SIMPMessage msg = xmitMsg . getSIMPMessage ( ) ; if ( msg != null ) { _sourceStreamManager . removeMessage ( msg ) ; QueuedMessage queuedMessage = ( QueuedMessage ) msg . getControlAdapter ( ) ; // A null message implies it ' s already gone from the MsgStore if ( queuedMessage != null ) queuedMessage . moveMessage ( discard ) ; } } catch ( SIException e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.runtime.SourceStreamSetControl.clearMessagesAtSource" , "1:496:1.39" , this ) ; SIMPRuntimeOperationFailedException finalE = new SIMPRuntimeOperationFailedException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0003" , new Object [ ] { "SourceStreamSetControl.clearMessagesAtSource" , "1:504:1.39" , e } , null ) , e ) ; SibTr . exception ( tc , finalE ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "clearMessagesAtSource" , finalE ) ; throw finalE ; } } } } // d477883 - COMMENT THE FOLLOWING CODE // We no longer flush the streams as part of a deleteAll / reallocateAll // This stream may still be used afterwards and therefore we dont want to // null out any resources . // / / Now send a Flushed message and remove streamSet from sourcestreamManager // if ( foundUncommittedMsgs = = false ) // try // _ sourceStreamManager . forceFlush ( ) ; // catch ( SIException e ) // FFDCFilter . processException ( // " com . ibm . ws . sib . processor . runtime . SourceStreamSetControl . clearMessagesAtSource " , // "1:535:1.39 " , // this ) ; // SIMPRuntimeOperationFailedException finalE = // new SIMPRuntimeOperationFailedException ( // nls . getFormattedMessage ( // " INTERNAL _ MESSAGING _ ERROR _ CWSIP0003 " , // new Object [ ] { " SourceStreamSetControl . clearMessagesAtSource " , // "1:543:1.39 " , // null ) , e ) ; // SibTr . exception ( tc , finalE ) ; // if ( TraceComponent . isAnyTracingEnabled ( ) & & tc . isEntryEnabled ( ) ) SibTr . exit ( tc , " clearMessagesAtSource " , finalE ) ; // throw finalE ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "clearMessagesAtSource" ) ;
public class LayoutViews { /** * Gets a List of Views matching a given class . * @ param root the root ViewGroup to traverse . * @ param classType the class to find . * @ param < T > the class to find . * @ return a List of every matching View encountered . */ @ NonNull public static < T > List < T > findByClass ( @ NonNull ViewGroup root , Class < T > classType ) { } }
FinderByClass < T > finderByClass = new FinderByClass < > ( classType ) ; LayoutTraverser . build ( finderByClass ) . traverse ( root ) ; return finderByClass . getViews ( ) ;
public class PoiUtil { /** * 用密码保护工作簿 。 只有在xlsx格式才起作用 。 * @ param workbook 工作簿 。 * @ param password 保护密码 。 */ public static void protectWorkbook ( Workbook workbook , String password ) { } }
if ( StringUtils . isEmpty ( password ) ) return ; if ( workbook instanceof XSSFWorkbook ) { val xsswb = ( XSSFWorkbook ) workbook ; for ( int i = 0 , ii = xsswb . getNumberOfSheets ( ) ; i < ii ; ++ i ) { xsswb . getSheetAt ( i ) . protectSheet ( password ) ; } }
public class BatchResult { /** * Use { @ link # getResponses ( ) } */ @ Deprecated public List < E > subList ( int fromIndex , int toIndex ) { } }
return responses . subList ( fromIndex , toIndex ) ;
public class RequestTemplate { /** * Returns an immutable copy of the Headers for this request . * @ return the currently applied headers . */ public Map < String , Collection < String > > headers ( ) { } }
Map < String , Collection < String > > headerMap = new TreeMap < > ( String . CASE_INSENSITIVE_ORDER ) ; this . headers . forEach ( ( key , headerTemplate ) -> { List < String > values = new ArrayList < > ( headerTemplate . getValues ( ) ) ; /* add the expanded collection , but only if it has values */ if ( ! values . isEmpty ( ) ) { headerMap . put ( key , Collections . unmodifiableList ( values ) ) ; } } ) ; return Collections . unmodifiableMap ( headerMap ) ;
public class CompositeReportPlugin { /** * Execute a { @ link ReportOperation } on the provided { @ link ReportPlugin } s but * assure that this happens only once . * @ param reportPlugins * The { @ link ReportPlugin } s . * @ param operation * The { @ link ReportOperation } . * @ param executedPlugins * The already executed { @ link ReportPlugin } s . * @ throws ReportException * If a problem is reported . */ private void run ( Map < String , ReportPlugin > reportPlugins , ReportOperation operation , Set < String > executedPlugins ) throws ReportException { } }
for ( Map . Entry < String , ReportPlugin > entry : reportPlugins . entrySet ( ) ) { if ( executedPlugins . add ( entry . getKey ( ) ) ) { operation . run ( entry . getValue ( ) ) ; } }
public class RuleHelper { /** * This method casts the supplied object to the nominated * class . If the object cannot be cast to the provided type , * then a null will be returned . * @ param obj The object * @ param clz The class to cast to * @ return The cast object , or null if the object cannot be cast */ public < T > T cast ( Object obj , Class < T > clz ) { } }
if ( ! clz . isAssignableFrom ( obj . getClass ( ) ) ) { return null ; } return clz . cast ( obj ) ;
public class ItemRule { /** * Convert the given item parameters list into an { @ code ItemRuleMap } . * @ param itemParametersList the item parameters list to convert * @ return the item rule map * @ throws IllegalRuleException if an illegal rule is found */ public static ItemRuleMap toItemRuleMap ( List < ItemParameters > itemParametersList ) throws IllegalRuleException { } }
if ( itemParametersList == null || itemParametersList . isEmpty ( ) ) { return null ; } ItemRuleMap itemRuleMap = new ItemRuleMap ( ) ; for ( ItemParameters parameters : itemParametersList ) { itemRuleMap . putItemRule ( toItemRule ( parameters ) ) ; } return itemRuleMap ;