signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class EmbeddedXMLConfigValidator { /** * { @ inheritDoc } * @ throws ConfigValidationException */ @ Override public void validateConfig ( ServerConfiguration configuration ) throws ConfigValidationException { } }
// Default is that drop - ins are enabled boolean dropinsEnabled = configuration . isDropinsEnabled ( ) ; // Drop - ins are enabled if ( dropinsEnabled ) { Tr . fatal ( tc , "fatal.configValidator.dropinsEnabled" ) ; // Throw exception to terminate the server throw new ConfigValidationException ( "Drop-ins enabled in embedded environment" ) ; }
public class SystemUtil { /** * Gets the host name of the local machine . * @ return host name */ public static String getHostName ( ) { } }
try { return java . net . InetAddress . getLocalHost ( ) . getHostName ( ) ; } catch ( java . net . UnknownHostException _ex ) { return null ; }
public class BlockingListMaker { /** * Specify the total time to wait for the elements of the list to become available */ public BlockingListMaker < K , E > waitFor ( long time , TimeUnit unit ) { } }
this . nanoTimeout = time == 0 ? 1 : NANOSECONDS . convert ( time , unit ) ; return this ;
public class AnnotationCollectorTransform { /** * Adds a new syntax error to the source unit and then continues . * @ param message the message * @ param node the node for the error report * @ param source the source unit for the error report */ protected void addError ( String message , ASTNode node , SourceUnit source ) { } }
source . getErrorCollector ( ) . addErrorAndContinue ( new SyntaxErrorMessage ( new SyntaxException ( message , node . getLineNumber ( ) , node . getColumnNumber ( ) , node . getLastLineNumber ( ) , node . getLastColumnNumber ( ) ) , source ) ) ;
public class DistanceTreeEvaluator { /** * Evaluate the goodness of fit of a given tree to the original distance * matrix . The returned value is the coefficient of variation , i . e . the * square root of the LS error normalized by the mean . * This measure can also give an estimate of the quality of the distance * matrix , because a bad fit may mean that the distance is non - additive . * @ param tree * Phylogenetic Distance Tree to evaluate * @ param matrix * Distance Matrix with the original distances * @ return the square root of the average tree LS error normalized by the * average tree distance ( coefficient of variation , CV ) . */ public static double evaluate ( Phylogeny tree , DistanceMatrix matrix ) { } }
int numSequences = matrix . getSize ( ) ; List < PhylogenyNode > externalNodes = tree . getExternalNodes ( ) ; HashMap < String , PhylogenyNode > externalNodesHashMap = new HashMap < String , PhylogenyNode > ( ) ; Set < PhylogenyNode > path = new HashSet < PhylogenyNode > ( ) ; for ( PhylogenyNode node : externalNodes ) { externalNodesHashMap . put ( node . getName ( ) , node ) ; } int count = 0 ; double averageMatrixDistance = 0.0 ; double averageTreeDistance = 0.0 ; double averageTreeErrorDistance = 0.0 ; for ( int row = 0 ; row < numSequences - 1 ; row ++ ) { String nodeName1 = matrix . getIdentifier ( row ) ; PhylogenyNode node1 = externalNodesHashMap . get ( nodeName1 ) ; markPathToRoot ( node1 , path ) ; for ( int col = row + 1 ; col < numSequences ; col ++ ) { count ++ ; String nodeName2 = matrix . getIdentifier ( col ) ; PhylogenyNode node2 = externalNodesHashMap . get ( nodeName2 ) ; double distance = matrix . getValue ( col , row ) ; averageMatrixDistance = averageMatrixDistance + distance ; PhylogenyNode commonParent = findCommonParent ( node2 , path ) ; if ( commonParent != null ) { double treeDistance = getNodeDistance ( commonParent , node1 ) + getNodeDistance ( commonParent , node2 ) ; averageTreeDistance += treeDistance ; averageTreeErrorDistance += ( distance - treeDistance ) * ( distance - treeDistance ) ; logger . info ( "{} {} Distance: {}Tree: {} difference: {}" , nodeName1 , nodeName2 , distance , treeDistance , Math . abs ( distance - treeDistance ) ) ; } else { logger . warn ( "Unable to find common parent with {} {}" , node1 , node2 ) ; } } path . clear ( ) ; } averageMatrixDistance /= count ; averageTreeDistance /= count ; averageTreeErrorDistance /= count ; logger . info ( "Average matrix distance: {}" , averageMatrixDistance ) ; logger . info ( "Average tree distance: {}" , averageTreeDistance ) ; logger . info ( "Average LS error: {}" , averageTreeErrorDistance ) ; return Math . sqrt ( averageTreeErrorDistance ) / averageMatrixDistance ;
public class UserApi { /** * Revokes an impersonation token . Available only for admin users . * < pre > < code > GitLab Endpoint : DELETE / users / : user _ id / impersonation _ tokens / : impersonation _ token _ id < / code > < / pre > * @ param userIdOrUsername the user in the form of an Integer ( ID ) , String ( username ) , or User instance * @ param tokenId the impersonation token ID to revoke * @ throws GitLabApiException if any exception occurs */ public void revokeImpersonationToken ( Object userIdOrUsername , Integer tokenId ) throws GitLabApiException { } }
if ( tokenId == null ) { throw new RuntimeException ( "tokenId cannot be null" ) ; } Response . Status expectedStatus = ( isApiVersion ( ApiVersion . V3 ) ? Response . Status . OK : Response . Status . NO_CONTENT ) ; delete ( expectedStatus , null , "users" , getUserIdOrUsername ( userIdOrUsername ) , "impersonation_tokens" , tokenId ) ;
public class Aligner { /** * Performs global alignment * @ param alignmentScoring scoring system * @ param seq1 first sequence * @ param seq2 second sequence * @ return array of mutations */ public static < S extends Sequence < S > > Alignment < S > alignGlobal ( AlignmentScoring < S > alignmentScoring , S seq1 , S seq2 ) { } }
if ( alignmentScoring instanceof AffineGapAlignmentScoring ) return alignGlobalAffine ( ( AffineGapAlignmentScoring < S > ) alignmentScoring , seq1 , seq2 ) ; if ( alignmentScoring instanceof LinearGapAlignmentScoring ) return alignGlobalLinear ( ( LinearGapAlignmentScoring < S > ) alignmentScoring , seq1 , seq2 ) ; throw new RuntimeException ( "Unknown scoring type." ) ;
public class References { /** * wrapper for reference replacement */ static String replace ( final String text , final boolean toReference ) { } }
return toReference ? replaceWithRef ( text ) : replaceFromRef ( text ) ;
public class Tuple { /** * Adds a binding to this tuple . */ public Tuple add ( VarBindingDef binding ) { } }
if ( binding != null ) { VarDef var = binding . getVarDef ( ) ; if ( var == null ) { throw new IllegalArgumentException ( "Invalid binding=" + binding + ": variable undefined" ) ; } VarValueDef value = binding . getValueDef ( ) ; if ( value == null ) { throw new IllegalArgumentException ( "Invalid binding=" + binding + ": value undefined" ) ; } remove ( var ) ; bindings_ . put ( var , binding ) ; properties_ . addAll ( value . getProperties ( ) . iterator ( ) ) ; } return this ;
public class SpringComponent { /** * Gets the bean invocation return entries . */ protected Object [ ] invoke ( Object [ ] arguments ) throws InvocationTargetException , IllegalAccessException { } }
Object returnValue = invoke ( method , arguments ) ; if ( ! scatterOutput ) { logger . trace ( "Using return as is" ) ; return new Object [ ] { returnValue } ; } if ( returnValue instanceof Object [ ] ) { logger . trace ( "Scatter array return" ) ; return ( Object [ ] ) returnValue ; } if ( returnValue instanceof Collection ) { logger . trace ( "Scatter collection return" ) ; return ( ( Collection ) returnValue ) . toArray ( ) ; } logger . debug ( "Scatter singleton return" ) ; return returnValue == null ? EMPTY_ARRAY : new Object [ ] { returnValue } ;
public class VariationalAutoencoder { /** * Return the reconstruction error for this variational autoencoder . < br > * < b > NOTE ( important ) : < / b > This method is used ONLY for VAEs that have a standard neural network loss function ( i . e . , * an { @ link org . nd4j . linalg . lossfunctions . ILossFunction } instance such as mean squared error ) instead of using a * probabilistic reconstruction distribution P ( x | z ) for the reconstructions ( as presented in the VAE architecture by * Kingma and Welling ) . < br > * You can check if the VAE has a loss function using { @ link # hasLossFunction ( ) } < br > * Consequently , the reconstruction error is a simple deterministic function ( no Monte - Carlo sampling is required , * unlike { @ link # reconstructionProbability ( INDArray , int ) } and { @ link # reconstructionLogProbability ( INDArray , int ) } ) * @ param data The data to calculate the reconstruction error on * @ return Column vector of reconstruction errors for each example ( shape : [ numExamples , 1 ] ) */ public INDArray reconstructionError ( INDArray data ) { } }
if ( ! hasLossFunction ( ) ) { throw new IllegalStateException ( "Cannot use reconstructionError method unless the variational autoencoder is " + "configured with a standard loss function (via LossFunctionWrapper). For VAEs utilizing a reconstruction " + "distribution, use the reconstructionProbability or reconstructionLogProbability methods " + layerId ( ) ) ; } INDArray pZXMean = activate ( data , false , LayerWorkspaceMgr . noWorkspaces ( ) ) ; INDArray reconstruction = generateAtMeanGivenZ ( pZXMean ) ; // Not probabilistic - > " mean " = = output if ( reconstructionDistribution instanceof CompositeReconstructionDistribution ) { CompositeReconstructionDistribution c = ( CompositeReconstructionDistribution ) reconstructionDistribution ; return c . computeLossFunctionScoreArray ( data , reconstruction ) ; } else { LossFunctionWrapper lfw = ( LossFunctionWrapper ) reconstructionDistribution ; ILossFunction lossFunction = lfw . getLossFunction ( ) ; // Re : the activation identity here - the reconstruction array already has the activation function applied , // so we don ' t want to apply it again . i . e . , we are passing the output , not the pre - output . return lossFunction . computeScoreArray ( data , reconstruction , new ActivationIdentity ( ) , null ) ; }
public class EventDestination { /** * The types of events that Amazon Pinpoint sends to the specified event destinations . * @ param matchingEventTypes * The types of events that Amazon Pinpoint sends to the specified event destinations . * @ return Returns a reference to this object so that method calls can be chained together . * @ see EventType */ public EventDestination withMatchingEventTypes ( EventType ... matchingEventTypes ) { } }
java . util . ArrayList < String > matchingEventTypesCopy = new java . util . ArrayList < String > ( matchingEventTypes . length ) ; for ( EventType value : matchingEventTypes ) { matchingEventTypesCopy . add ( value . toString ( ) ) ; } if ( getMatchingEventTypes ( ) == null ) { setMatchingEventTypes ( matchingEventTypesCopy ) ; } else { getMatchingEventTypes ( ) . addAll ( matchingEventTypesCopy ) ; } return this ;
public class ApiOvhTelephony { /** * Get this object properties * REST : GET / telephony / { billingAccount } / ovhPabx / { serviceName } / hunting / queue / { queueId } * @ param billingAccount [ required ] The name of your billingAccount * @ param serviceName [ required ] * @ param queueId [ required ] */ public OvhOvhPabxHuntingQueue billingAccount_ovhPabx_serviceName_hunting_queue_queueId_GET ( String billingAccount , String serviceName , Long queueId ) throws IOException { } }
String qPath = "/telephony/{billingAccount}/ovhPabx/{serviceName}/hunting/queue/{queueId}" ; StringBuilder sb = path ( qPath , billingAccount , serviceName , queueId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhOvhPabxHuntingQueue . class ) ;
public class AttrValue { /** * < pre > * This is a placeholder only used in nodes defined inside a * function . It indicates the attr value will be supplied when * the function is instantiated . For example , let us suppose a * node " N " in function " FN " . " N " has an attr " A " with value * placeholder = " foo " . When FN is instantiated with attr " foo " * set to " bar " , the instantiated node N ' s attr A will have been * given the value " bar " . * < / pre > * < code > optional string placeholder = 9 ; < / code > */ public com . google . protobuf . ByteString getPlaceholderBytes ( ) { } }
java . lang . Object ref = "" ; if ( valueCase_ == 9 ) { ref = value_ ; } if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; if ( valueCase_ == 9 ) { value_ = b ; } return b ; } else { return ( com . google . protobuf . ByteString ) ref ; }
public class SolarisUtils { /** * Registers a new file extension in the operating system . * @ param fileTypeName Name of the file extension . Must be atomic , e . g . * < code > foocorp . fooapp . v1 < / code > . * @ param fileTypeExtension File extension with leading dot , e . g . * < code > . bar < / code > . * @ param application Path to the application , which should open the new * file extension . * @ return < code > true < / code > if registration was successful , * < code > false < / code > otherwise . * @ throws OSException */ @ Override public boolean registerFileExtension ( String fileTypeName , String fileTypeExtension , String application ) throws OSException { } }
throw new UnsupportedOperationException ( "Not supported yet." ) ;
public class VersionUtils { /** * Determine how much of two versions match . Returns null if the versions do not match at all . * @ return null for no match or the name of the most specific field that matches . */ public static Version . Field getMostSpecificMatchingField ( Version v1 , Version v2 ) { } }
if ( v1 . getMajor ( ) != v2 . getMajor ( ) ) { return null ; } if ( v1 . getMinor ( ) != v2 . getMinor ( ) ) { return Version . Field . MAJOR ; } if ( v1 . getPatch ( ) != v2 . getPatch ( ) ) { return Version . Field . MINOR ; } final Integer l1 = v1 . getLocal ( ) ; final Integer l2 = v2 . getLocal ( ) ; if ( l1 != l2 && ( l1 == null || l2 == null || ! l1 . equals ( l2 ) ) ) { return Version . Field . PATCH ; } return Version . Field . LOCAL ;
public class InternalSimpleAntlrParser { /** * InternalSimpleAntlr . g : 73:1 : entryRuleAntlrGrammar returns [ EObject current = null ] : iv _ ruleAntlrGrammar = ruleAntlrGrammar EOF ; */ public final EObject entryRuleAntlrGrammar ( ) throws RecognitionException { } }
EObject current = null ; EObject iv_ruleAntlrGrammar = null ; try { // InternalSimpleAntlr . g : 74:2 : ( iv _ ruleAntlrGrammar = ruleAntlrGrammar EOF ) // InternalSimpleAntlr . g : 75:2 : iv _ ruleAntlrGrammar = ruleAntlrGrammar EOF { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getAntlrGrammarRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; iv_ruleAntlrGrammar = ruleAntlrGrammar ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = iv_ruleAntlrGrammar ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return current ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class BlobW { /** * Write blob data . * @ param b byte array to be written at current position . */ public void write ( byte [ ] b ) throws IOException { } }
if ( b != null && b . length > 0 ) { pos += blob . write ( b , 0 , pos , b . length ) ; }
public class HystrixObservableCollapser { /** * A lazy { @ link Observable } that will execute when subscribed to . * See https : / / github . com / Netflix / RxJava / wiki for more information . * @ param observeOn * The { @ link Scheduler } to execute callbacks on . * @ return { @ code Observable < R > } that lazily executes and calls back with the result of of { @ link HystrixCommand } { @ code < BatchReturnType > } execution after mapping the * { @ code < BatchReturnType > } into { @ code < ResponseType > } */ public Observable < ResponseType > toObservable ( Scheduler observeOn ) { } }
return Observable . defer ( new Func0 < Observable < ResponseType > > ( ) { @ Override public Observable < ResponseType > call ( ) { final boolean isRequestCacheEnabled = getProperties ( ) . requestCacheEnabled ( ) . get ( ) ; /* try from cache first */ if ( isRequestCacheEnabled ) { HystrixCachedObservable < ResponseType > fromCache = requestCache . get ( getCacheKey ( ) ) ; if ( fromCache != null ) { metrics . markResponseFromCache ( ) ; return fromCache . toObservable ( ) ; } } RequestCollapser < BatchReturnType , ResponseType , RequestArgumentType > requestCollapser = collapserFactory . getRequestCollapser ( collapserInstanceWrapper ) ; Observable < ResponseType > response = requestCollapser . submitRequest ( getRequestArgument ( ) ) ; metrics . markRequestBatched ( ) ; if ( isRequestCacheEnabled ) { /* * A race can occur here with multiple threads queuing but only one will be cached . * This means we can have some duplication of requests in a thread - race but we ' re okay * with having some inefficiency in duplicate requests in the same batch * and then subsequent requests will retrieve a previously cached Observable . * If this is an issue we can make a lazy - future that gets set in the cache * then only the winning ' put ' will be invoked to actually call ' submitRequest ' */ HystrixCachedObservable < ResponseType > toCache = HystrixCachedObservable . from ( response ) ; HystrixCachedObservable < ResponseType > fromCache = requestCache . putIfAbsent ( getCacheKey ( ) , toCache ) ; if ( fromCache == null ) { return toCache . toObservable ( ) ; } else { return fromCache . toObservable ( ) ; } } return response ; } } ) ;
public class FreePool { /** * This method should only be called when a fatal error occurs or * when an attempt is made to remove all of the connections from * the pool . */ protected void incrementFatalErrorValue ( int value1 ) { } }
/* * value1 and value2 are index values for the free pools . * When value1 and value2 are 0 , we are in free pool . */ if ( fatalErrorNotificationTime == Integer . MAX_VALUE - 1 ) { /* * We need to start over at zero . All connection * fatal error values need to be reset to zero . */ fatalErrorNotificationTime = 0 ; if ( value1 == 0 ) { /* * We only want to do this once . When the values value1 and value2 are 0 * are are processing the first free pool . When we are processing all of the * rest of the free pool , this code will not be executed . */ pm . mcToMCWMapWrite . lock ( ) ; try { Collection < MCWrapper > mcWrappers = pm . mcToMCWMap . values ( ) ; Iterator < MCWrapper > mcWrapperIt = mcWrappers . iterator ( ) ; while ( mcWrapperIt . hasNext ( ) ) { MCWrapper mcw = mcWrapperIt . next ( ) ; if ( ! mcw . isParkedWrapper ( ) ) { /* * Reset the fatal error value to zero . * This connection will be cleaned up and * destroyed when returned to the free pool . */ mcw . setFatalErrorValue ( 0 ) ; } } } finally { pm . mcToMCWMapWrite . unlock ( ) ; } } } else { ++ fatalErrorNotificationTime ; }
public class HttpListenerMBean { protected void defineManagedResource ( ) { } }
super . defineManagedResource ( ) ; defineAttribute ( "defaultScheme" ) ; defineAttribute ( "lowOnResources" , false ) ; defineAttribute ( "outOfResources" , false ) ; defineAttribute ( "confidentialPort" ) ; defineAttribute ( "confidentialScheme" ) ; defineAttribute ( "integralPort" ) ; defineAttribute ( "integralScheme" ) ; defineAttribute ( "bufferSize" ) ; defineAttribute ( "bufferReserve" ) ;
public class CassandraStorage { /** * get a list of index expression */ private List < IndexExpression > getIndexExpressions ( ) throws IOException { } }
UDFContext context = UDFContext . getUDFContext ( ) ; Properties property = context . getUDFProperties ( AbstractCassandraStorage . class ) ; if ( property . getProperty ( PARTITION_FILTER_SIGNATURE ) != null ) return indexExpressionsFromString ( property . getProperty ( PARTITION_FILTER_SIGNATURE ) ) ; else return null ;
public class CellConverterFactorySupport { /** * 数式を処理する { @ link CellFormulaHandler } を作成する 。 * @ param formulaAnno 数式のアノテーション * @ param field フィールド情報 * @ param config システム情報 * @ return { @ link CellFormulaHandler } のインスタンス */ protected CellFormulaHandler createCellFormulaHandler ( final XlsFormula formulaAnno , final FieldAccessor field , final Configuration config ) { } }
if ( ! formulaAnno . value ( ) . isEmpty ( ) ) { final String formulaExpression = formulaAnno . value ( ) ; try { // EL式として正しいか検証する config . getFormulaFormatter ( ) . interpolate ( formulaExpression , Collections . emptyMap ( ) ) ; } catch ( ExpressionEvaluationException e ) { throw new AnnotationInvalidException ( formulaAnno , MessageBuilder . create ( "anno.attr.invalidEL" ) . var ( "property" , field . getNameWithClass ( ) ) . varWithAnno ( "anno" , XlsFormula . class ) . var ( "attrName" , "value" ) . var ( "attrValue" , formulaExpression ) . format ( ) ) ; } CellFormulaHandler handler = new CellFormulaHandler ( formulaExpression ) ; handler . setPrimaryFormula ( formulaAnno . primary ( ) ) ; return handler ; } else if ( ! formulaAnno . methodName ( ) . isEmpty ( ) ) { // 戻り値が文字列の数式を返すメソッドを探す final Class < ? > targetClass = field . getDeclaringClass ( ) ; Method method = null ; for ( Method m : targetClass . getDeclaredMethods ( ) ) { if ( m . getName ( ) . equals ( formulaAnno . methodName ( ) ) && m . getReturnType ( ) . equals ( String . class ) ) { method = m ; break ; } } if ( method == null ) { throw new AnnotationInvalidException ( formulaAnno , MessageBuilder . create ( "anno.attr.notFoundMethod" ) . var ( "property" , field . getNameWithClass ( ) ) . varWithAnno ( "anno" , XlsFormula . class ) . var ( "attrName" , "methodName" ) . var ( "attrValue" , formulaAnno . methodName ( ) ) . varWithClass ( "definedClass" , targetClass ) . format ( ) ) ; } method . setAccessible ( true ) ; CellFormulaHandler handler = new CellFormulaHandler ( method ) ; handler . setPrimaryFormula ( formulaAnno . primary ( ) ) ; return handler ; } else { throw new AnnotationInvalidException ( formulaAnno , MessageBuilder . create ( "anno.attr.required.any" ) . var ( "property" , field . getNameWithClass ( ) ) . varWithAnno ( "anno" , XlsFormula . class ) . varWithArrays ( "attrNames" , "value" , "methodName" ) . format ( ) ) ; }
public class Functions { /** * Returns the number of items in a collection or the number of characters in a string . * The collection can be of any type supported for the < code > items < / code > attribute of * the < code > & lt ; c : forEach & gt ; < / code > action . * @ param obj the collection or string whose length should be computed * @ return the length of the collection or string ; 0 if obj is null * @ throws JspTagException if the type is not valid */ public static int length ( Object obj ) throws JspTagException { } }
if ( obj == null ) { return 0 ; } if ( obj instanceof String ) { return ( ( String ) obj ) . length ( ) ; } if ( obj instanceof Collection ) { return ( ( Collection ) obj ) . size ( ) ; } if ( obj instanceof Map ) { return ( ( Map ) obj ) . size ( ) ; } if ( obj instanceof Iterator ) { int count = 0 ; Iterator iter = ( Iterator ) obj ; while ( iter . hasNext ( ) ) { count ++ ; iter . next ( ) ; } return count ; } if ( obj instanceof Enumeration ) { Enumeration enum_ = ( Enumeration ) obj ; int count = 0 ; while ( enum_ . hasMoreElements ( ) ) { count ++ ; enum_ . nextElement ( ) ; } return count ; } if ( obj . getClass ( ) . isArray ( ) ) { return Array . getLength ( obj ) ; } throw new JspTagException ( Resources . getMessage ( "PARAM_BAD_VALUE" ) ) ;
public class RestrictedFeatureRespository { /** * ( non - Javadoc ) * @ see com . ibm . ws . kernel . feature . resolver . FeatureResolver . Repository # getFeature ( java . lang . String ) */ @ Override public ProvisioningFeatureDefinition getFeature ( String featureName ) { } }
ProvisioningFeatureDefinition result = repo . getFeature ( featureName ) ; if ( result == null ) { return null ; } if ( restricted . contains ( result . getSymbolicName ( ) ) ) { // record the restricted attemp if ( ! restrictedAttempts . contains ( result . getSymbolicName ( ) ) ) { restrictedAttempts . add ( result . getSymbolicName ( ) ) ; } return null ; } return result ;
public class InboundHeadersBenchmark { /** * Checkstyle . */ @ Benchmark @ BenchmarkMode ( Mode . AverageTime ) @ OutputTimeUnit ( TimeUnit . NANOSECONDS ) public void defaultHeaders_serverHandler ( Blackhole bh ) { } }
serverHandler ( bh , new DefaultHttp2Headers ( true , 9 ) ) ;
public class NativeJavaObject { /** * Type - munging for field setting and method invocation . * Conforms to LC3 specification */ static Object coerceTypeImpl ( Class < ? > type , Object value ) { } }
if ( value != null && value . getClass ( ) == type ) { return value ; } switch ( getJSTypeCode ( value ) ) { case JSTYPE_NULL : // raise error if type . isPrimitive ( ) if ( type . isPrimitive ( ) ) { reportConversionError ( value , type ) ; } return null ; case JSTYPE_UNDEFINED : if ( type == ScriptRuntime . StringClass || type == ScriptRuntime . ObjectClass ) { return "undefined" ; } reportConversionError ( "undefined" , type ) ; break ; case JSTYPE_BOOLEAN : // Under LC3 , only JS Booleans can be coerced into a Boolean value if ( type == Boolean . TYPE || type == ScriptRuntime . BooleanClass || type == ScriptRuntime . ObjectClass ) { return value ; } else if ( type == ScriptRuntime . StringClass ) { return value . toString ( ) ; } else { reportConversionError ( value , type ) ; } break ; case JSTYPE_NUMBER : if ( type == ScriptRuntime . StringClass ) { return ScriptRuntime . toString ( value ) ; } else if ( type == ScriptRuntime . ObjectClass ) { Context context = Context . getCurrentContext ( ) ; if ( context . hasFeature ( Context . FEATURE_INTEGER_WITHOUT_DECIMAL_PLACE ) ) { // to process numbers like 2.0 as 2 without decimal place long roundedValue = Math . round ( toDouble ( value ) ) ; if ( roundedValue == toDouble ( value ) ) { return coerceToNumber ( Long . TYPE , value ) ; } } return coerceToNumber ( Double . TYPE , value ) ; } else if ( ( type . isPrimitive ( ) && type != Boolean . TYPE ) || ScriptRuntime . NumberClass . isAssignableFrom ( type ) ) { return coerceToNumber ( type , value ) ; } else { reportConversionError ( value , type ) ; } break ; case JSTYPE_STRING : if ( type == ScriptRuntime . StringClass || type . isInstance ( value ) ) { return value . toString ( ) ; } else if ( type == Character . TYPE || type == ScriptRuntime . CharacterClass ) { // Special case for converting a single char string to a // character // Placed here because it applies * only * to JS strings , // not other JS objects converted to strings if ( ( ( CharSequence ) value ) . length ( ) == 1 ) { return Character . valueOf ( ( ( CharSequence ) value ) . charAt ( 0 ) ) ; } return coerceToNumber ( type , value ) ; } else if ( ( type . isPrimitive ( ) && type != Boolean . TYPE ) || ScriptRuntime . NumberClass . isAssignableFrom ( type ) ) { return coerceToNumber ( type , value ) ; } else { reportConversionError ( value , type ) ; } break ; case JSTYPE_JAVA_CLASS : if ( value instanceof Wrapper ) { value = ( ( Wrapper ) value ) . unwrap ( ) ; } if ( type == ScriptRuntime . ClassClass || type == ScriptRuntime . ObjectClass ) { return value ; } else if ( type == ScriptRuntime . StringClass ) { return value . toString ( ) ; } else { reportConversionError ( value , type ) ; } break ; case JSTYPE_JAVA_OBJECT : case JSTYPE_JAVA_ARRAY : if ( value instanceof Wrapper ) { value = ( ( Wrapper ) value ) . unwrap ( ) ; } if ( type . isPrimitive ( ) ) { if ( type == Boolean . TYPE ) { reportConversionError ( value , type ) ; } return coerceToNumber ( type , value ) ; } if ( type == ScriptRuntime . StringClass ) { return value . toString ( ) ; } if ( type . isInstance ( value ) ) { return value ; } reportConversionError ( value , type ) ; break ; case JSTYPE_OBJECT : if ( type == ScriptRuntime . StringClass ) { return ScriptRuntime . toString ( value ) ; } else if ( type . isPrimitive ( ) ) { if ( type == Boolean . TYPE ) { reportConversionError ( value , type ) ; } return coerceToNumber ( type , value ) ; } else if ( type . isInstance ( value ) ) { return value ; } else if ( type == ScriptRuntime . DateClass && value instanceof NativeDate ) { double time = ( ( NativeDate ) value ) . getJSTimeValue ( ) ; // XXX : This will replace NaN by 0 return new Date ( ( long ) time ) ; } else if ( type . isArray ( ) && value instanceof NativeArray ) { // Make a new java array , and coerce the JS array components // to the target ( component ) type . NativeArray array = ( NativeArray ) value ; long length = array . getLength ( ) ; Class < ? > arrayType = type . getComponentType ( ) ; Object Result = Array . newInstance ( arrayType , ( int ) length ) ; for ( int i = 0 ; i < length ; ++ i ) { try { Array . set ( Result , i , coerceTypeImpl ( arrayType , array . get ( i , array ) ) ) ; } catch ( EvaluatorException ee ) { reportConversionError ( value , type ) ; } } return Result ; } else if ( value instanceof Wrapper ) { value = ( ( Wrapper ) value ) . unwrap ( ) ; if ( type . isInstance ( value ) ) return value ; reportConversionError ( value , type ) ; } else if ( type . isInterface ( ) && ( value instanceof NativeObject || value instanceof NativeFunction ) ) { // Try to use function / object as implementation of Java interface . return createInterfaceAdapter ( type , ( ScriptableObject ) value ) ; } else { reportConversionError ( value , type ) ; } break ; } return value ;
public class KafkaSimpleStreamingExtractor { /** * Get the schema ( metadata ) of the extracted data records . * @ return the schema of Kafka topic being extracted * @ throws IOException if there is problem getting the schema */ @ Override public S getSchema ( ) throws IOException { } }
try { if ( _schemaRegistry . isPresent ( ) ) { return _schemaRegistry . get ( ) . getLatestSchemaByTopic ( this . _partition . topic ( ) ) ; } } catch ( SchemaRegistryException e ) { throw new RuntimeException ( e ) ; } return ( ( S ) this . _partition . topic ( ) ) ;
public class BlockResettableMutableObjectIterator { @ Override public T next ( T target ) throws IOException { } }
// check for the left over element if ( this . readPhase ) { return getNextRecord ( target ) ; } else { // writing phase . check for leftover first if ( this . leftOverReturned ) { // get next record if ( ( target = this . input . next ( target ) ) != null ) { if ( writeNextRecord ( target ) ) { return target ; } else { // did not fit into memory , keep as leftover this . leftOverRecord = this . serializer . copy ( target , this . leftOverRecord ) ; this . leftOverReturned = false ; this . fullWriteBuffer = true ; return null ; } } else { this . noMoreBlocks = true ; return null ; } } else if ( this . fullWriteBuffer ) { return null ; } else { this . leftOverReturned = true ; target = this . serializer . copy ( this . leftOverRecord , target ) ; return target ; } }
public class Log4JConfigurer { /** * Called during Logging init , potentially resets log4j to follow the settings * configured by pelzer . util . */ static void configureLog4j ( ) { } }
org . apache . log4j . LogManager . resetConfiguration ( ) ; org . apache . log4j . Logger . getRootLogger ( ) . addAppender ( new org . apache . log4j . Appender ( ) { private String name ; @ Override public void setName ( String name ) { this . name = name ; } @ Override public void setLayout ( org . apache . log4j . Layout layout ) { } @ Override public void setErrorHandler ( org . apache . log4j . spi . ErrorHandler errorHandler ) { } @ Override public boolean requiresLayout ( ) { return false ; } @ Override public String getName ( ) { return name ; } @ Override public org . apache . log4j . Layout getLayout ( ) { return null ; } @ Override public org . apache . log4j . spi . Filter getFilter ( ) { return null ; } @ Override public org . apache . log4j . spi . ErrorHandler getErrorHandler ( ) { return null ; } @ Override public void doAppend ( org . apache . log4j . spi . LoggingEvent event ) { Logger logger = Logging . getLogger ( event . getLoggerName ( ) ) ; logger . genericLog ( event . getMessage ( ) + "" , event . getThrowableInformation ( ) == null ? null : event . getThrowableInformation ( ) . getThrowable ( ) , convertPriority ( event . getLevel ( ) ) ) ; } private Priority convertPriority ( org . apache . log4j . Level level ) { switch ( level . toInt ( ) ) { case org . apache . log4j . Level . ALL_INT : return Priority . ALL ; case org . apache . log4j . Level . TRACE_INT : return Priority . VERBOSE ; case org . apache . log4j . Level . DEBUG_INT : return Priority . DEBUG ; case org . apache . log4j . Level . WARN_INT : return Priority . WARN ; case org . apache . log4j . Level . INFO_INT : return Priority . INFO ; case org . apache . log4j . Level . ERROR_INT : return Priority . ERROR ; case org . apache . log4j . Level . FATAL_INT : return Priority . FATAL ; } return Priority . OFF ; } @ Override public void close ( ) { } @ Override public void clearFilters ( ) { } @ Override public void addFilter ( org . apache . log4j . spi . Filter newFilter ) { } } ) ;
public class AuthenticateUserHelper { /** * Validate that the input parameters are not null . * @ param authenticationService the service to authenticate a user * @ param username the user to authenticate * @ throws AuthenticationException when either input is null */ private void validateInput ( AuthenticationService authenticationService , String username ) throws AuthenticationException { } }
if ( authenticationService == null ) { throw new AuthenticationException ( "authenticationService cannot be null." ) ; } else if ( username == null ) { throw new AuthenticationException ( "username cannot be null." ) ; }
public class SniffyServlet { /** * TODO : stream JSON instead ; otherwise we are creating unnecessary garbage out of interned strings mostly */ private byte [ ] getRequestStatsJson ( String requestId ) { } }
RequestStats requestStats = cache . get ( requestId ) ; if ( null != requestStats ) { StringBuilder sb = new StringBuilder ( ) ; sb . append ( "{" ) . append ( "\"timeToFirstByte\":" ) . append ( requestStats . getTimeToFirstByte ( ) ) . append ( "," ) . append ( "\"time\":" ) . append ( requestStats . getElapsedTime ( ) ) ; if ( null != requestStats . getExecutedStatements ( ) ) { sb . append ( ",\"executedQueries\":[" ) ; Set < Map . Entry < StatementMetaData , SqlStats > > entries = requestStats . getExecutedStatements ( ) . entrySet ( ) ; Iterator < Map . Entry < StatementMetaData , SqlStats > > statementsIt = entries . iterator ( ) ; while ( statementsIt . hasNext ( ) ) { Map . Entry < StatementMetaData , SqlStats > entry = statementsIt . next ( ) ; StatementMetaData statement = entry . getKey ( ) ; SqlStats sqlStats = entry . getValue ( ) ; sb . append ( "{" ) . append ( "\"query\":" ) . append ( StringUtil . escapeJsonString ( statement . sql ) ) . append ( "," ) . append ( "\"stackTrace\":" ) . append ( StringUtil . escapeJsonString ( statement . stackTrace ) ) . append ( "," ) . append ( "\"time\":" ) . append ( sqlStats . elapsedTime . longValue ( ) ) . append ( "," ) . append ( "\"invocations\":" ) . append ( sqlStats . queries . longValue ( ) ) . append ( "," ) . append ( "\"rows\":" ) . append ( sqlStats . rows . longValue ( ) ) . append ( "," ) . append ( "\"type\":\"" ) . append ( statement . query . name ( ) ) . append ( "\"," ) . append ( "\"bytesDown\":" ) . append ( sqlStats . bytesDown . longValue ( ) ) . append ( "," ) . append ( "\"bytesUp\":" ) . append ( sqlStats . bytesUp . longValue ( ) ) . append ( "}" ) ; if ( statementsIt . hasNext ( ) ) { sb . append ( "," ) ; } } sb . append ( "]" ) ; } if ( null != requestStats . getSocketOperations ( ) ) { sb . append ( ",\"networkConnections\":[" ) ; Iterator < Map . Entry < SocketMetaData , SocketStats > > statementsIt = requestStats . getSocketOperations ( ) . entrySet ( ) . iterator ( ) ; while ( statementsIt . hasNext ( ) ) { Map . Entry < SocketMetaData , SocketStats > entry = statementsIt . next ( ) ; SocketMetaData socketMetaData = entry . getKey ( ) ; SocketStats socketStats = entry . getValue ( ) ; sb . append ( "{" ) . append ( "\"host\":" ) . append ( StringUtil . escapeJsonString ( socketMetaData . address . toString ( ) ) ) . append ( "," ) . append ( "\"stackTrace\":" ) . append ( StringUtil . escapeJsonString ( socketMetaData . stackTrace ) ) . append ( "," ) . append ( "\"time\":" ) . append ( socketStats . elapsedTime . longValue ( ) ) . append ( "," ) . append ( "\"bytesDown\":" ) . append ( socketStats . bytesDown . longValue ( ) ) . append ( "," ) . append ( "\"bytesUp\":" ) . append ( socketStats . bytesUp . longValue ( ) ) . append ( "}" ) ; if ( statementsIt . hasNext ( ) ) { sb . append ( "," ) ; } } sb . append ( "]" ) ; } if ( null != requestStats . getExceptions ( ) && ! requestStats . getExceptions ( ) . isEmpty ( ) ) { sb . append ( ",\"exceptions\":[" ) ; Iterator < Throwable > exceptionsIt = requestStats . getExceptions ( ) . iterator ( ) ; while ( exceptionsIt . hasNext ( ) ) { Throwable exception = exceptionsIt . next ( ) ; StringWriter sw = new StringWriter ( ) ; PrintWriter pw = new PrintWriter ( sw ) ; exception . printStackTrace ( pw ) ; sb . append ( "{" ) . append ( "\"class\":" ) . append ( StringUtil . escapeJsonString ( exception . getClass ( ) . getName ( ) ) ) . append ( ",\"message\":" ) . append ( StringUtil . escapeJsonString ( exception . getMessage ( ) ) ) . append ( ",\"stackTrace\":" ) . append ( StringUtil . escapeJsonString ( sw . toString ( ) ) ) . append ( "}" ) ; if ( exceptionsIt . hasNext ( ) ) { sb . append ( "," ) ; } } sb . append ( "]" ) ; } sb . append ( "}" ) ; return sb . toString ( ) . getBytes ( ) ; } else { return null ; }
public class JCalendar { /** * Sets the foreground color . * @ param fg * the new foreground */ public void setForeground ( Color fg ) { } }
super . setForeground ( fg ) ; if ( dayChooser != null ) { dayChooser . setForeground ( fg ) ; monthChooser . setForeground ( fg ) ; yearChooser . setForeground ( fg ) ; }
public class SessionDataManager { /** * Reindex same - name siblings of the node Reindex is actual for remove , move only . If node is * added then its index always is a last in list of childs . * @ param cause * a node caused reindexing , i . e . deleted or moved node . */ protected List < ItemState > reindexSameNameSiblings ( NodeData cause , ItemDataConsumer dataManager ) throws RepositoryException { } }
List < ItemState > changes = new ArrayList < ItemState > ( ) ; NodeData parentNodeData = ( NodeData ) dataManager . getItemData ( cause . getParentIdentifier ( ) ) ; NodeData nextSibling = ( NodeData ) dataManager . getItemData ( parentNodeData , new QPathEntry ( cause . getQPath ( ) . getName ( ) , cause . getQPath ( ) . getIndex ( ) + 1 ) , ItemType . NODE ) ; String reindexedId = null ; // repeat till next sibling exists and it ' s not a caused Node ( deleted or moved to ) or just // reindexed while ( nextSibling != null && ! nextSibling . getIdentifier ( ) . equals ( cause . getIdentifier ( ) ) && ! nextSibling . getIdentifier ( ) . equals ( reindexedId ) ) { QPath siblingOldPath = QPath . makeChildPath ( nextSibling . getQPath ( ) . makeParentPath ( ) , nextSibling . getQPath ( ) . getName ( ) , nextSibling . getQPath ( ) . getIndex ( ) ) ; // update with new index QPath siblingPath = QPath . makeChildPath ( nextSibling . getQPath ( ) . makeParentPath ( ) , nextSibling . getQPath ( ) . getName ( ) , nextSibling . getQPath ( ) . getIndex ( ) - 1 ) ; NodeData reindexed = new TransientNodeData ( siblingPath , nextSibling . getIdentifier ( ) , nextSibling . getPersistedVersion ( ) , nextSibling . getPrimaryTypeName ( ) , nextSibling . getMixinTypeNames ( ) , nextSibling . getOrderNumber ( ) , nextSibling . getParentIdentifier ( ) , nextSibling . getACL ( ) ) ; reindexedId = reindexed . getIdentifier ( ) ; ItemState reindexedState = ItemState . createUpdatedState ( reindexed ) ; changes . add ( reindexedState ) ; itemsPool . reload ( reindexed ) ; reloadDescendants ( siblingOldPath , siblingPath ) ; // next . . . nextSibling = ( NodeData ) dataManager . getItemData ( parentNodeData , new QPathEntry ( nextSibling . getQPath ( ) . getName ( ) , nextSibling . getQPath ( ) . getIndex ( ) + 1 ) , ItemType . NODE ) ; } return changes ;
public class DateTimeStaticExtensions { /** * Parse text into a { @ link java . time . LocalDateTime } using the provided pattern . * Note : the order of parameters differs from versions of this method for the legacy Date class . * @ param type placeholder variable used by Groovy categories ; ignored for default static methods * @ param text String to be parsed to create the date instance * @ param pattern pattern used to parse the text * @ return a LocalDateTime representing the parsed text * @ throws java . lang . IllegalArgumentException if the pattern is invalid * @ throws java . time . format . DateTimeParseException if the text cannot be parsed * @ see java . time . format . DateTimeFormatter * @ see java . time . LocalDateTime # parse ( java . lang . CharSequence , java . time . format . DateTimeFormatter ) * @ since 2.5.0 */ public static LocalDateTime parse ( final LocalDateTime type , CharSequence text , String pattern ) { } }
return LocalDateTime . parse ( text , DateTimeFormatter . ofPattern ( pattern ) ) ;
public class XmlSoapFaultValidator { /** * Delegates to XML message validator for validation of fault detail . */ @ Override protected void validateFaultDetailString ( String receivedDetailString , String controlDetailString , TestContext context , ValidationContext validationContext ) throws ValidationException { } }
XmlMessageValidationContext xmlMessageValidationContext ; if ( validationContext instanceof XmlMessageValidationContext ) { xmlMessageValidationContext = ( XmlMessageValidationContext ) validationContext ; } else { xmlMessageValidationContext = new XmlMessageValidationContext ( ) ; } messageValidator . validateMessage ( new DefaultMessage ( receivedDetailString ) , new DefaultMessage ( controlDetailString ) , context , xmlMessageValidationContext ) ;
public class RESTAssert { /** * assert that objects are equal . < br > * This means they are both < i > null < / i > or < code > one . equals ( two ) < / code > returns < i > true < / i > * @ param one the first object * @ param two the second object * @ param status the status code to throw * @ throws WebApplicationException with given status code */ @ SuppressWarnings ( "null" ) public static void assertEquals ( final Object one , final Object two , final StatusType status ) { } }
if ( ( one == null ) && ( two == null ) ) { return ; } RESTAssert . assertNotNull ( one , status ) ; RESTAssert . assertTrue ( one . equals ( two ) , status ) ;
public class GuaranteedTargetStream { /** * Is the stream marked as blocked . * @ return true if the stream is blocked . */ private boolean isStreamBlocked ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( this , tc , "isStreamBlocked" ) ; SibTr . exit ( tc , "isStreamBlocked" , new Object [ ] { Boolean . valueOf ( isStreamBlocked ) , Long . valueOf ( linkBlockingTick ) } ) ; } return this . isStreamBlocked ;
public class CassandraSchemaManager { /** * showSchema Alters column type of an existing column . * @ param tableInfo * the table info * @ param translator * the translator * @ param column * the column * @ throws Exception * the exception */ private void alterColumnType ( TableInfo tableInfo , CQLTranslator translator , ColumnInfo column ) throws Exception { } }
StringBuilder alterColumnTypeQuery = new StringBuilder ( "ALTER TABLE " ) ; translator . ensureCase ( alterColumnTypeQuery , tableInfo . getTableName ( ) , false ) ; alterColumnTypeQuery . append ( " ALTER " ) ; translator . ensureCase ( alterColumnTypeQuery , column . getColumnName ( ) , false ) ; alterColumnTypeQuery . append ( " TYPE " + translator . getCQLType ( CassandraValidationClassMapper . getValidationClass ( column . getType ( ) , isCql3Enabled ( tableInfo ) ) ) ) ; cassandra_client . execute_cql3_query ( ByteBuffer . wrap ( alterColumnTypeQuery . toString ( ) . getBytes ( ) ) , Compression . NONE , ConsistencyLevel . ONE ) ; KunderaCoreUtils . printQuery ( alterColumnTypeQuery . toString ( ) , showQuery ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link MrowType } { @ code > } } */ @ XmlElementDecl ( namespace = "http://www.w3.org/1998/Math/MathML" , name = "mrow" ) public JAXBElement < MrowType > createMrow ( MrowType value ) { } }
return new JAXBElement < MrowType > ( _Mrow_QNAME , MrowType . class , null , value ) ;
public class FieldFacetProvider { private static void collectFields ( Class < ? > type , List < Field > fields ) { } }
final Field [ ] declaredFields = type . getDeclaredFields ( ) ; for ( Field declaredField : declaredFields ) { if ( isPersistable ( declaredField ) ) { fields . add ( declaredField ) ; } } if ( type . getSuperclass ( ) != null ) { collectFields ( type . getSuperclass ( ) , fields ) ; }
public class ActionFormMapper { protected Object prepareYourCollection ( VirtualForm virtualForm , Object bean , String name , Object value , StringBuilder pathSb , FormMappingOption option , PropertyDesc pd ) { } }
final List < FormYourCollectionResource > yourCollections = option . getYourCollections ( ) ; if ( yourCollections . isEmpty ( ) ) { return null ; // no settings of your collections } final Class < ? > propertyType = pd . getPropertyType ( ) ; for ( FormYourCollectionResource yourCollection : yourCollections ) { if ( ! propertyType . equals ( yourCollection . getYourType ( ) ) ) { // just type in form mapping ( to avoid complexity ) continue ; } final List < ? > objectList = prepareObjectList ( virtualForm , bean , name , value , pathSb , option , pd ) ; final Iterable < ? extends Object > applied = yourCollection . getYourCollectionCreator ( ) . apply ( objectList ) ; final Object mappedValue ; if ( applied instanceof List < ? > ) { mappedValue = applied ; } else { final List < Object > newList = new ArrayList < > ( ) ; for ( Object element : applied ) { newList . add ( element ) ; } mappedValue = newList ; } return mappedValue ; } return null ; // is not your collections
public class ParameterParser { /** * Parses out a token until any of the given terminators is encountered . * @ param terminators the array of terminating characters . Any of these characters when encountered signify the end * of the token * @ return the token */ private String parseToken ( final char [ ] terminators ) { } }
char ch ; i1 = pos ; i2 = pos ; while ( hasChar ( ) ) { ch = chars [ pos ] ; if ( isOneOf ( ch , terminators ) ) { break ; } i2 ++ ; pos ++ ; } return getToken ( false ) ;
public class AuthenticationAjaxFilter { /** * / * ( non - Javadoc ) * @ see org . directwebremoting . AjaxFilter # doFilter ( java . lang . Object , java . lang . reflect . Method , java . lang . Object [ ] , org . directwebremoting . AjaxFilterChain ) */ public Object doFilter ( Object object , Method method , Object [ ] params , AjaxFilterChain chain ) throws Exception { } }
// We allow anyone to authenticate if ( authenticateName . equals ( method . getName ( ) ) ) { return chain . doFilter ( object , method , params ) ; } Object user = getUser ( ) ; if ( user != null ) { return chain . doFilter ( object , method , params ) ; } throw new SecurityException ( "Not authenticated" ) ;
public class Task { /** * Set a start value . * @ param index start index ( 1-10) * @ param value start value */ public void setStart ( int index , Date value ) { } }
set ( selectField ( TaskFieldLists . CUSTOM_START , index ) , value ) ;
public class CPInstancePersistenceImpl { /** * Returns an ordered range of all the cp instances where CPDefinitionId = & # 63 ; and displayDate & lt ; & # 63 ; and status = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPInstanceModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param CPDefinitionId the cp definition ID * @ param displayDate the display date * @ param status the status * @ param start the lower bound of the range of cp instances * @ param end the upper bound of the range of cp instances ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ return the ordered range of matching cp instances */ @ Override public List < CPInstance > findByC_LtD_S ( long CPDefinitionId , Date displayDate , int status , int start , int end , OrderByComparator < CPInstance > orderByComparator ) { } }
return findByC_LtD_S ( CPDefinitionId , displayDate , status , start , end , orderByComparator , true ) ;
public class CmsHtmlWidget { /** * Returns the editor widget to use depending on the current users settings , current browser and installed editors . < p > * @ param cms the current CmsObject * @ param widgetDialog the dialog where the widget is used on * @ return the editor widget to use depending on the current users settings , current browser and installed editors */ private I_CmsWidget getEditorWidget ( CmsObject cms , I_CmsWidgetDialog widgetDialog ) { } }
if ( m_editorWidget == null ) { // get HTML widget to use from editor manager String widgetClassName = OpenCms . getWorkplaceManager ( ) . getWorkplaceEditorManager ( ) . getWidgetEditor ( cms . getRequestContext ( ) , widgetDialog . getUserAgent ( ) ) ; boolean foundWidget = true ; if ( CmsStringUtil . isEmpty ( widgetClassName ) ) { // no installed widget found , use default text area to edit HTML value widgetClassName = CmsTextareaWidget . class . getName ( ) ; foundWidget = false ; } try { if ( foundWidget ) { // get widget instance and set the widget configuration Class < ? > widgetClass = Class . forName ( widgetClassName ) ; A_CmsHtmlWidget editorWidget = ( A_CmsHtmlWidget ) widgetClass . newInstance ( ) ; editorWidget . setHtmlWidgetOption ( getHtmlWidgetOption ( ) ) ; m_editorWidget = editorWidget ; } else { // set the text area to display 15 rows for editing Class < ? > widgetClass = Class . forName ( widgetClassName ) ; I_CmsWidget editorWidget = ( I_CmsWidget ) widgetClass . newInstance ( ) ; editorWidget . setConfiguration ( "15" ) ; m_editorWidget = editorWidget ; } } catch ( Exception e ) { // failed to create widget instance LOG . error ( Messages . get ( ) . container ( Messages . LOG_CREATE_HTMLWIDGET_INSTANCE_FAILED_1 , widgetClassName ) . key ( ) ) ; } } return m_editorWidget ;
public class ContainerBase { /** * Adds the specified { @ link File } resource ( a nested JAR File form ) to the current archive , returning the archive * itself * @ param resource * @ param target * @ return * @ throws IllegalArgumentException */ private T addNestedJarFileResource ( final File resource , final ArchivePath target , final ArchivePath base ) throws IllegalArgumentException { } }
final Iterable < ClassLoader > classLoaders = ( ( Configurable ) this . getArchive ( ) ) . getConfiguration ( ) . getClassLoaders ( ) ; for ( final ClassLoader classLoader : classLoaders ) { final InputStream in = classLoader . getResourceAsStream ( resourceAdjustedPath ( resource ) ) ; if ( in != null ) { final Asset asset = new ByteArrayAsset ( in ) ; return add ( asset , base , target . get ( ) ) ; } } throw new IllegalArgumentException ( resource . getPath ( ) + " was not found in any available ClassLoaders" ) ;
public class AbstractCryptor { /** * Factory method for creating a new { @ link Cipher } from the given private key . This method is * invoked in the constructor from the derived classes and can be overridden so users can * provide their own version of a new { @ link Cipher } from the given private key . * @ param key * the key * @ return the new { @ link Cipher } from the given private key . * @ throws NoSuchAlgorithmException * is thrown if instantiation of the SecretKeyFactory object fails . * @ throws InvalidKeySpecException * is thrown if generation of the SecretKey object fails . * @ throws NoSuchPaddingException * is thrown if instantiation of the cypher object fails . * @ throws InvalidKeyException * is thrown if initialization of the cypher object fails . * @ throws InvalidAlgorithmParameterException * is thrown if initialization of the cypher object fails . * @ throws UnsupportedEncodingException * is thrown if the named charset is not supported . */ protected C newCipher ( final K key ) throws NoSuchAlgorithmException , InvalidKeySpecException , NoSuchPaddingException , InvalidKeyException , InvalidAlgorithmParameterException , UnsupportedEncodingException { } }
return newCipher ( key , newAlgorithm ( ) , newSalt ( ) , newIterationCount ( ) , newOperationMode ( ) ) ;
public class CommerceCurrencyPersistenceImpl { /** * Returns the first commerce currency in the ordered set where uuid = & # 63 ; . * @ param uuid the uuid * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce currency * @ throws NoSuchCurrencyException if a matching commerce currency could not be found */ @ Override public CommerceCurrency findByUuid_First ( String uuid , OrderByComparator < CommerceCurrency > orderByComparator ) throws NoSuchCurrencyException { } }
CommerceCurrency commerceCurrency = fetchByUuid_First ( uuid , orderByComparator ) ; if ( commerceCurrency != null ) { return commerceCurrency ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( "}" ) ; throw new NoSuchCurrencyException ( msg . toString ( ) ) ;
public class Configuration { /** * This method forces specific devices to be used . All other devices present in system will be ignored . * @ param devices * @ return */ public Configuration useDevices ( @ NonNull int ... devices ) { } }
List < Integer > usableDevices = new ArrayList < > ( ) ; for ( int device : devices ) { if ( ! availableDevices . contains ( device ) ) { log . warn ( "Non-existent device [{}] requested, ignoring..." , device ) ; } else { if ( ! usableDevices . contains ( device ) ) usableDevices . add ( device ) ; } } if ( usableDevices . size ( ) > 0 ) { availableDevices . clear ( ) ; availableDevices . addAll ( usableDevices ) ; } return this ;
public class GroovyEngine { /** * Initialize the engine . */ public void initialize ( BSFManager mgr , String lang , Vector declaredBeans ) throws BSFException { } }
super . initialize ( mgr , lang , declaredBeans ) ; // create a shell shell = new GroovyShell ( mgr . getClassLoader ( ) ) ; // register the mgr with object name " bsf " shell . setVariable ( "bsf" , new BSFFunctions ( mgr , this ) ) ; int size = declaredBeans . size ( ) ; for ( int i = 0 ; i < size ; i ++ ) { declareBean ( ( BSFDeclaredBean ) declaredBeans . elementAt ( i ) ) ; }
public class CheckableGroup { /** * Set the specified { @ link Checkable } { @ link Widget } as unchecked , if it is a child of this * { @ link CheckableGroup } and not already unchecked . * @ param checkableWidget The { @ code Checkable Widget } to * { @ linkplain Checkable # setChecked ( boolean ) set unchecked } . * @ return { @ code True } if { @ code checkableWidget } is a child of this { @ code CheckableGroup } and * was not already unchecked ; { @ code false } otherwise . */ public < T extends Widget & Checkable > boolean uncheck ( T checkableWidget ) { } }
if ( hasChild ( checkableWidget ) ) { return checkInternal ( checkableWidget , false ) ; } return false ;
public class A_CmsListResourceCollector { /** * Returns a list item created from the resource information , differs between valid resources and invalid resources . < p > * @ param resource the resource to create the list item from * @ param list the list * @ param showPermissions if to show permissions * @ param showDateLastMod if to show the last modification date * @ param showUserLastMod if to show the last modification user * @ param showDateCreate if to show the creation date * @ param showUserCreate if to show the creation date * @ param showDateRel if to show the date released * @ param showDateExp if to show the date expired * @ param showState if to show the state * @ param showLockedBy if to show the lock user * @ param showSite if to show the site * @ return a list item created from the resource information */ protected CmsListItem createResourceListItem ( CmsResource resource , CmsHtmlList list , boolean showPermissions , boolean showDateLastMod , boolean showUserLastMod , boolean showDateCreate , boolean showUserCreate , boolean showDateRel , boolean showDateExp , boolean showState , boolean showLockedBy , boolean showSite ) { } }
CmsListItem item = list . newItem ( resource . getStructureId ( ) . toString ( ) ) ; // get an initialized resource utility CmsResourceUtil resUtil = getWp ( ) . getResourceUtil ( ) ; resUtil . setResource ( resource ) ; item . set ( A_CmsListExplorerDialog . LIST_COLUMN_NAME , resUtil . getPath ( ) ) ; item . set ( A_CmsListExplorerDialog . LIST_COLUMN_ROOT_PATH , resUtil . getFullPath ( ) ) ; item . set ( A_CmsListExplorerDialog . LIST_COLUMN_TITLE , resUtil . getTitle ( ) ) ; item . set ( A_CmsListExplorerDialog . LIST_COLUMN_TYPE , resUtil . getResourceTypeName ( ) ) ; item . set ( A_CmsListExplorerDialog . LIST_COLUMN_SIZE , resUtil . getSizeString ( ) ) ; if ( showPermissions ) { item . set ( A_CmsListExplorerDialog . LIST_COLUMN_PERMISSIONS , resUtil . getPermissionString ( ) ) ; } if ( showDateLastMod ) { item . set ( A_CmsListExplorerDialog . LIST_COLUMN_DATELASTMOD , new Date ( resource . getDateLastModified ( ) ) ) ; } if ( showUserLastMod ) { item . set ( A_CmsListExplorerDialog . LIST_COLUMN_USERLASTMOD , resUtil . getUserLastModified ( ) ) ; } if ( showDateCreate ) { item . set ( A_CmsListExplorerDialog . LIST_COLUMN_DATECREATE , new Date ( resource . getDateCreated ( ) ) ) ; } if ( showUserCreate ) { item . set ( A_CmsListExplorerDialog . LIST_COLUMN_USERCREATE , resUtil . getUserCreated ( ) ) ; } if ( showDateRel ) { item . set ( A_CmsListExplorerDialog . LIST_COLUMN_DATEREL , new Date ( resource . getDateReleased ( ) ) ) ; } if ( showDateExp ) { item . set ( A_CmsListExplorerDialog . LIST_COLUMN_DATEEXP , new Date ( resource . getDateExpired ( ) ) ) ; } if ( showState ) { item . set ( A_CmsListExplorerDialog . LIST_COLUMN_STATE , resUtil . getStateName ( ) ) ; } if ( showLockedBy ) { item . set ( A_CmsListExplorerDialog . LIST_COLUMN_LOCKEDBY , resUtil . getLockedByName ( ) ) ; } if ( showSite ) { item . set ( A_CmsListExplorerDialog . LIST_COLUMN_SITE , resUtil . getSiteTitle ( ) ) ; } setAdditionalColumns ( item , resUtil ) ; return item ;
public class Application { /** * Factory method for Application list . Returns a list of Application object with page and size preferences * Allow different Client implementaitons * @ param client the client * @ param page the page * @ param size the page size * @ return the list * @ throws IOException unexpected error . */ public static ResourceList < Application > list ( final BandwidthClient client , final int page , final int size ) throws IOException { } }
final String applicationUri = client . getUserResourceUri ( BandwidthConstants . APPLICATIONS_URI_PATH ) ; final ResourceList < Application > applications = new ResourceList < Application > ( page , size , applicationUri , Application . class ) ; applications . setClient ( client ) ; applications . initialize ( ) ; return applications ;
public class AbstractOption { /** * Gets whether the name is valid or not . * @ param optionName the name of the option * @ return true if the name that not contain any illegal character */ public static boolean nameIsLegal ( String optionName ) { } }
for ( char illegalChar : illegalNameCharacters ) { if ( optionName . indexOf ( illegalChar ) >= 0 ) { return false ; } } return true ;
public class GestureController { public boolean onInterceptTouch ( @ NonNull View view , @ NonNull MotionEvent event ) { } }
isInterceptTouchCalled = true ; return onTouchInternal ( view , event ) ;
public class Paging { /** * / * package */ HttpParameter [ ] asPostParameterArray ( ) { } }
List < HttpParameter > list = asPostParameterList ( SMCP , COUNT ) ; if ( list . size ( ) == 0 ) { return NULL_PARAMETER_ARRAY ; } return list . toArray ( new HttpParameter [ list . size ( ) ] ) ;
public class FactoryMultiView { /** * Creates an estimator for the PnP problem that uses only three observations , which is the minimal case * and known as P3P . * < p > NOTE : Observations are in normalized image coordinates NOT pixels . < / p > * @ param which The algorithm which is to be returned . * @ param numIterations Number of iterations . Only used by some algorithms and recommended number varies * significantly by algorithm . * @ return An estimator which can return multiple estimates . */ public static EstimateNofPnP pnp_N ( EnumPNP which , int numIterations ) { } }
MotionTransformPoint < Se3_F64 , Point3D_F64 > motionFit = FitSpecialEuclideanOps_F64 . fitPoints3D ( ) ; switch ( which ) { case P3P_GRUNERT : P3PGrunert grunert = new P3PGrunert ( PolynomialOps . createRootFinder ( 5 , RootFinderType . STURM ) ) ; return new WrapP3PLineDistance ( grunert , motionFit ) ; case P3P_FINSTERWALDER : P3PFinsterwalder finster = new P3PFinsterwalder ( PolynomialOps . createRootFinder ( 4 , RootFinderType . STURM ) ) ; return new WrapP3PLineDistance ( finster , motionFit ) ; case EPNP : Estimate1ofPnP epnp = pnp_1 ( which , numIterations , 0 ) ; return new Estimate1toNofPnP ( epnp ) ; case IPPE : Estimate1ofEpipolar H = FactoryMultiView . homographyTLS ( ) ; return new Estimate1toNofPnP ( new IPPE_to_EstimatePnP ( H ) ) ; } throw new IllegalArgumentException ( "Type " + which + " not known" ) ;
public class Chart { /** * Specify animation easing * Default value is { @ link org . dashbuilder . renderer . chartjs . lib . options . Type # EASE _ OUT _ QUART } * @ param type */ public void setAnimationType ( Type type ) { } }
if ( type == null ) options . clearProperty ( ANIMATION_EASING ) ; else options . setProperty ( ANIMATION_EASING , type . getValue ( ) ) ;
public class CacheManagerFactory { /** * Resets the cache manager for a resource type * @ param config * the jawr config * @ param resourceType * the resource type * @ return the cache manager for a resource type */ public static synchronized JawrCacheManager resetCacheManager ( JawrConfig config , String resourceType ) { } }
String cacheMgrAttributeName = CACHE_ATTR_PREFIX + resourceType . toUpperCase ( ) + CACHE_ATTR_SUFFIX ; JawrCacheManager cacheManager = ( JawrCacheManager ) config . getContext ( ) . getAttribute ( cacheMgrAttributeName ) ; if ( cacheManager != null ) { cacheManager . clear ( ) ; config . getContext ( ) . removeAttribute ( cacheMgrAttributeName ) ; } return getCacheManager ( config , resourceType ) ;
public class RelatedTablesCoreExtension { /** * Returns the relationships defined through this extension * @ return a list of ExtendedRelation objects */ public List < ExtendedRelation > getRelationships ( ) { } }
List < ExtendedRelation > result = null ; try { if ( extendedRelationsDao . isTableExists ( ) ) { result = extendedRelationsDao . queryForAll ( ) ; } else { result = new ArrayList < > ( ) ; } } catch ( SQLException e ) { throw new GeoPackageException ( "Failed to query for relationships " + "in " + EXTENSION_NAME , e ) ; } return result ;
public class UpdateTypedLinkFacetRequest { /** * Attributes update structure . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setAttributeUpdates ( java . util . Collection ) } or { @ link # withAttributeUpdates ( java . util . Collection ) } if you * want to override the existing values . * @ param attributeUpdates * Attributes update structure . * @ return Returns a reference to this object so that method calls can be chained together . */ public UpdateTypedLinkFacetRequest withAttributeUpdates ( TypedLinkFacetAttributeUpdate ... attributeUpdates ) { } }
if ( this . attributeUpdates == null ) { setAttributeUpdates ( new java . util . ArrayList < TypedLinkFacetAttributeUpdate > ( attributeUpdates . length ) ) ; } for ( TypedLinkFacetAttributeUpdate ele : attributeUpdates ) { this . attributeUpdates . add ( ele ) ; } return this ;
public class BsonObjectTraversingParser { /** * / * Public API , traversal */ @ Override public JsonToken nextToken ( ) throws IOException { } }
if ( nextToken != null ) { _currToken = nextToken ; nextToken = null ; return _currToken ; } // are we to descend to a container child ? if ( startContainer ) { startContainer = false ; // minor optimization : empty containers can be skipped if ( ! nodeCursor . currentHasChildren ( ) ) { _currToken = ( _currToken == JsonToken . START_OBJECT ) ? JsonToken . END_OBJECT : JsonToken . END_ARRAY ; return _currToken ; } nodeCursor = nodeCursor . iterateChildren ( ) ; _currToken = nodeCursor . nextToken ( ) ; if ( _currToken == JsonToken . START_OBJECT || _currToken == JsonToken . START_ARRAY ) { startContainer = true ; } return _currToken ; } // No more content ? if ( nodeCursor == null ) { closed = true ; // if not already set return null ; } // Otherwise , next entry from currentFieldName cursor _currToken = nodeCursor . nextToken ( ) ; if ( _currToken != null ) { if ( _currToken == JsonToken . START_OBJECT || _currToken == JsonToken . START_ARRAY ) { startContainer = true ; } return _currToken ; } // null means no more children ; need to return end marker _currToken = nodeCursor . endToken ( ) ; nodeCursor = nodeCursor . getParent ( ) ; return _currToken ;
public class CPDefinitionOptionValueRelPersistenceImpl { /** * Creates a new cp definition option value rel with the primary key . Does not add the cp definition option value rel to the database . * @ param CPDefinitionOptionValueRelId the primary key for the new cp definition option value rel * @ return the new cp definition option value rel */ @ Override public CPDefinitionOptionValueRel create ( long CPDefinitionOptionValueRelId ) { } }
CPDefinitionOptionValueRel cpDefinitionOptionValueRel = new CPDefinitionOptionValueRelImpl ( ) ; cpDefinitionOptionValueRel . setNew ( true ) ; cpDefinitionOptionValueRel . setPrimaryKey ( CPDefinitionOptionValueRelId ) ; String uuid = PortalUUIDUtil . generate ( ) ; cpDefinitionOptionValueRel . setUuid ( uuid ) ; cpDefinitionOptionValueRel . setCompanyId ( companyProvider . getCompanyId ( ) ) ; return cpDefinitionOptionValueRel ;
public class DMRDriver { /** * Returns a { @ link List } of objects extracted from the given { @ code nodeList } * @ param nodeList the source list to extract the result values from * @ return a { @ link List } of objects extracted from the given { @ code nodeList } * @ throws ProtocolException */ private static List < Object > toObjectList ( List < ModelNode > nodeList ) throws ProtocolException { } }
if ( nodeList . isEmpty ( ) ) { return Collections . emptyList ( ) ; } else { ArrayList < Object > result = new ArrayList < > ( nodeList . size ( ) ) ; for ( ModelNode node : nodeList ) { if ( node . hasDefined ( JBossASClient . RESULT ) ) { result . add ( toObject ( node . get ( JBossASClient . RESULT ) ) ) ; } else { throw new IllegalStateException ( "No 'result' in a nodeList item [" + node + "]" ) ; } } return Collections . unmodifiableList ( result ) ; }
public class NameSpace { /** * Gets the package . * @ return the package */ String getPackage ( ) { } }
if ( this . packageName != null ) return this . packageName ; if ( this . parent != null ) return this . parent . getPackage ( ) ; return null ;
public class JDBCDatabaseMetaData { /** * ( JDBC4 clarification : ) * Retrieves a description of the foreign key columns in the given foreign key * table that reference the primary key or the columns representing a unique constraint of the parent table ( could be the same or a different table ) . * The number of columns returned from the parent table must match the number of * columns that make up the foreign key . They * are ordered by FKTABLE _ CAT , FKTABLE _ SCHEM , FKTABLE _ NAME , and * KEY _ SEQ . * < P > Each foreign key column description has the following columns : * < OL > * < LI > < B > PKTABLE _ CAT < / B > String = > parent key table catalog ( may be < code > null < / code > ) * < LI > < B > PKTABLE _ SCHEM < / B > String = > parent key table schema ( may be < code > null < / code > ) * < LI > < B > PKTABLE _ NAME < / B > String = > parent key table name * < LI > < B > PKCOLUMN _ NAME < / B > String = > parent key column name * < LI > < B > FKTABLE _ CAT < / B > String = > foreign key table catalog ( may be < code > null < / code > ) * being exported ( may be < code > null < / code > ) * < LI > < B > FKTABLE _ SCHEM < / B > String = > foreign key table schema ( may be < code > null < / code > ) * being exported ( may be < code > null < / code > ) * < LI > < B > FKTABLE _ NAME < / B > String = > foreign key table name * being exported * < LI > < B > FKCOLUMN _ NAME < / B > String = > foreign key column name * being exported * < LI > < B > KEY _ SEQ < / B > short = > sequence number within foreign key ( a value * of 1 represents the first column of the foreign key , a value of 2 would * represent the second column within the foreign key ) . * < LI > < B > UPDATE _ RULE < / B > short = > What happens to * foreign key when parent key is updated : * < UL > * < LI > importedNoAction - do not allow update of parent * key if it has been imported * < LI > importedKeyCascade - change imported key to agree * with parent key update * < LI > importedKeySetNull - change imported key to < code > NULL < / code > if * its parent key has been updated * < LI > importedKeySetDefault - change imported key to default values * if its parent key has been updated * < LI > importedKeyRestrict - same as importedKeyNoAction * ( for ODBC 2 . x compatibility ) * < / UL > * < LI > < B > DELETE _ RULE < / B > short = > What happens to * the foreign key when parent key is deleted . * < UL > * < LI > importedKeyNoAction - do not allow delete of parent * key if it has been imported * < LI > importedKeyCascade - delete rows that import a deleted key * < LI > importedKeySetNull - change imported key to < code > NULL < / code > if * its primary key has been deleted * < LI > importedKeyRestrict - same as importedKeyNoAction * ( for ODBC 2 . x compatibility ) * < LI > importedKeySetDefault - change imported key to default if * its parent key has been deleted * < / UL > * < LI > < B > FK _ NAME < / B > String = > foreign key name ( may be < code > null < / code > ) * < LI > < B > PK _ NAME < / B > String = > parent key name ( may be < code > null < / code > ) * < LI > < B > DEFERRABILITY < / B > short = > can the evaluation of foreign key * constraints be deferred until commit * < UL > * < LI > importedKeyInitiallyDeferred - see SQL92 for definition * < LI > importedKeyInitiallyImmediate - see SQL92 for definition * < LI > importedKeyNotDeferrable - see SQL92 for definition * < / UL > * < / OL > * < ! - - start release - specific documentation - - > * < div class = " ReleaseSpecificDocumentation " > * < h3 > HSQLDB - Specific Information : < / h3 > < p > * HSQLDB supports the SQL Standard . It treats unquoted identifiers as * case insensitive in SQL and stores * them in upper case ; it treats quoted identifiers as case sensitive and * stores them verbatim . All JDBCDatabaseMetaData methods perform * case - sensitive comparison between name ( pattern ) arguments and the * corresponding identifier values as they are stored in the database . * Therefore , care must be taken to specify name arguments precisely * ( including case ) as they are stored in the database . < p > * Since 1.7.2 , this feature is supported by default . If the jar is * compiled without org . hsqldb _ voltpatches . DatabaseInformationFull or * org . hsqldb _ voltpatches . DatabaseInformationMain , the feature is * not supported . The default implementation is * { @ link org . hsqldb _ voltpatches . dbinfo . DatabaseInformationFull } . * < / div > * < ! - - end release - specific documentation - - > * @ param parentCatalog a catalog name ; must match the catalog name * as it is stored in the database ; " " retrieves those without a * catalog ; < code > null < / code > means drop catalog name from the selection criteria * @ param parentSchema a schema name ; must match the schema name as * it is stored in the database ; " " retrieves those without a schema ; * < code > null < / code > means drop schema name from the selection criteria * @ param parentTable the name of the table that exports the key ; must match * the table name as it is stored in the database * @ param foreignCatalog a catalog name ; must match the catalog name as * it is stored in the database ; " " retrieves those without a * catalog ; < code > null < / code > means drop catalog name from the selection criteria * @ param foreignSchema a schema name ; must match the schema name as it * is stored in the database ; " " retrieves those without a schema ; * < code > null < / code > means drop schema name from the selection criteria * @ param foreignTable the name of the table that imports the key ; must match * the table name as it is stored in the database * @ return < code > ResultSet < / code > - each row is a foreign key column description * @ exception SQLException if a database access error occurs * @ see # getImportedKeys * @ see # supportsMixedCaseQuotedIdentifiers * @ see # storesUpperCaseIdentifiers */ public ResultSet getCrossReference ( String parentCatalog , String parentSchema , String parentTable , String foreignCatalog , String foreignSchema , String foreignTable ) throws SQLException { } }
if ( parentTable == null ) { throw Util . nullArgument ( "parentTable" ) ; } if ( foreignTable == null ) { throw Util . nullArgument ( "foreignTable" ) ; } parentSchema = translateSchema ( parentSchema ) ; foreignSchema = translateSchema ( foreignSchema ) ; StringBuffer select = toQueryPrefix ( "SYSTEM_CROSSREFERENCE" ) . append ( and ( "PKTABLE_CAT" , "=" , parentCatalog ) ) . append ( and ( "PKTABLE_SCHEM" , "=" , parentSchema ) ) . append ( and ( "PKTABLE_NAME" , "=" , parentTable ) ) . append ( and ( "FKTABLE_CAT" , "=" , foreignCatalog ) ) . append ( and ( "FKTABLE_SCHEM" , "=" , foreignSchema ) ) . append ( and ( "FKTABLE_NAME" , "=" , foreignTable ) ) ; // by default , query already returns the table ordered by // FKTABLE _ CAT , FKTABLE _ SCHEM , FKTABLE _ NAME , and KEY _ SEQ . return execute ( select . toString ( ) ) ;
public class AgentAttacher { /** * The problem with different stagemonitor versions is that a class like { @ link javax . xml . ws . Binding } , which is * loaded by the bootstrap class loader can be used by multiple applications . * < p > If those applications are using different versions of stagemonitor they might expect the class to be * instrumented differently . This can lead to unexpected behaviour no matter if the classes get instrumented twice * or if double instrumentation is avoided via { @ link StagemonitorByteBuddyTransformer # isPreventDuplicateTransformation ( ) } . * < p > One remedy of this could be to make { @ link StagemonitorByteBuddyTransformer } implement { @ link * java . io . Serializable } and check via { @ code new ObjectStreamClass ( SomeByteBuddyTransformer . class ) . getSerialVersionUID ( ) } * if there have been any changes to any transformer . < / p > * < p > This case can occur when we are dealing with an application server where different applications are deployed * with different stagemonitor versions . < / p > * < p > Another problem is when the API of { @ link Dispatcher } changes as this class is injected into the bootstrap * class loader if not already done so . If another application gets deployed to the same server . < / p > * < p > A solution could be to only inject the most recent { @ link Dispatcher } and to make sure that changes to its * API are always backwards compatible . < / p > */ private static boolean assertNoDifferentStagemonitorVersionIsDeployedOnSameJvm ( ) { } }
final String stagemonitorVersionKey = "stagemonitor.version" ; final String stagemonitorClassLoaderKey = "stagemonitor.classLoader" ; final String alreadyRegisteredVersion = System . getProperty ( stagemonitorVersionKey ) ; final String currentVersion = corePlugin . getVersion ( ) ; if ( alreadyRegisteredVersion != null && ! currentVersion . equals ( alreadyRegisteredVersion ) ) { final String msg = String . format ( "Detected a different version of stagemonitor on the same JVM:" + "already registered version: %s current version: %s. " + "It is not supported to have different versions of stagemonitor on the same JVM. " + "For more details take a look at the javadoc." , alreadyRegisteredVersion , currentVersion ) ; healthCheckRegistry . register ( "Agent attachment" , ImmediateResult . of ( HealthCheck . Result . unhealthy ( msg ) ) ) ; return false ; } if ( currentVersion != null ) { System . setProperty ( stagemonitorVersionKey , currentVersion ) ; System . setProperty ( stagemonitorClassLoaderKey , Stagemonitor . class . getClassLoader ( ) . toString ( ) ) ; } return true ;
public class Matrix3 { /** * Sets all of the matrix ' s components at once . * @ return a reference to this matrix , for chaining . */ public Matrix3 set ( double m00 , double m10 , double m20 , double m01 , double m11 , double m21 , double m02 , double m12 , double m22 ) { } }
this . m00 = m00 ; this . m01 = m01 ; this . m02 = m02 ; this . m10 = m10 ; this . m11 = m11 ; this . m12 = m12 ; this . m20 = m20 ; this . m21 = m21 ; this . m22 = m22 ; return this ;
public class SQLiteQueryBuilder { /** * Perform a query by combining all current settings and the * information passed into this method . * @ param db the database to query on * @ param projectionIn A list of which columns to return . Passing * null will return all columns , which is discouraged to prevent * reading data from storage that isn ' t going to be used . * @ param selection A filter declaring which rows to return , * formatted as an SQL WHERE clause ( excluding the WHERE * itself ) . Passing null will return all rows for the given URL . * @ param selectionArgs You may include ? s in selection , which * will be replaced by the values from selectionArgs , in order * that they appear in the selection . The values will be bound * as Strings . * @ param groupBy A filter declaring how to group rows , formatted * as an SQL GROUP BY clause ( excluding the GROUP BY * itself ) . Passing null will cause the rows to not be grouped . * @ param having A filter declare which row groups to include in * the cursor , if row grouping is being used , formatted as an * SQL HAVING clause ( excluding the HAVING itself ) . Passing * null will cause all row groups to be included , and is * required when row grouping is not being used . * @ param sortOrder How to order the rows , formatted as an SQL * ORDER BY clause ( excluding the ORDER BY itself ) . Passing null * will use the default sort order , which may be unordered . * @ param limit Limits the number of rows returned by the query , * formatted as LIMIT clause . Passing null denotes no LIMIT clause . * @ param cancellationSignal A signal to cancel the operation in progress , or null if none . * If the operation is canceled , then { @ link OperationCanceledException } will be thrown * when the query is executed . * @ return a cursor over the result set * @ see android . content . ContentResolver # query ( android . net . Uri , String [ ] , * String , String [ ] , String ) */ public Cursor query ( SQLiteDatabase db , String [ ] projectionIn , String selection , String [ ] selectionArgs , String groupBy , String having , String sortOrder , String limit , CancellationSignal cancellationSignal ) { } }
if ( mTables == null ) { return null ; } if ( mStrict && selection != null && selection . length ( ) > 0 ) { // Validate the user - supplied selection to detect syntactic anomalies // in the selection string that could indicate a SQL injection attempt . // The idea is to ensure that the selection clause is a valid SQL expression // by compiling it twice : once wrapped in parentheses and once as // originally specified . An attacker cannot create an expression that // would escape the SQL expression while maintaining balanced parentheses // in both the wrapped and original forms . String sqlForValidation = buildQuery ( projectionIn , "(" + selection + ")" , groupBy , having , sortOrder , limit ) ; validateQuerySql ( db , sqlForValidation , cancellationSignal ) ; // will throw if query is invalid } String sql = buildQuery ( projectionIn , selection , groupBy , having , sortOrder , limit ) ; DLog . d ( TAG , "Performing query: " + sql ) ; return db . rawQueryWithFactory ( mFactory , sql , selectionArgs , SQLiteDatabase . findEditTable ( mTables ) , cancellationSignal ) ; // will throw if query is invalid
public class ClusKernel { /** * Overwrites the LS , SS and weightedN in this cluster to the values of the * given cluster but adds N and classCount of the given cluster to this one . * This function is useful when the weight of an entry becomes to small , and * we want to forget the information of the old points . * @ param other The cluster that should overwrite the information . */ protected void overwriteOldCluster ( ClusKernel other ) { } }
this . totalN = other . totalN ; this . N = other . N ; // AuxiliaryFunctions . overwriteDoubleArray ( this . LS , other . LS ) ; // AuxiliaryFunctions . overwriteDoubleArray ( this . SS , other . SS ) ; assert ( LS . length == other . LS . length ) ; System . arraycopy ( other . LS , 0 , LS , 0 , LS . length ) ; assert ( SS . length == other . SS . length ) ; System . arraycopy ( other . SS , 0 , SS , 0 , SS . length ) ;
public class AppVersionService { /** * Returns the list of distinct versions for the given application sorted in * reverse chronological order * @ param cluster * @ param user * @ param appId * @ return the list of versions sorted in reverse chronological order ( the * list will be empty if no versions are found ) * @ throws IOException */ public List < VersionInfo > getDistinctVersions ( String cluster , String user , String appId ) throws IOException { } }
Get get = new Get ( getRowKey ( cluster , user , appId ) ) ; List < VersionInfo > versions = Lists . newArrayList ( ) ; Long ts = 0L ; Table versionsTable = null ; try { versionsTable = hbaseConnection . getTable ( TableName . valueOf ( Constants . HISTORY_APP_VERSION_TABLE ) ) ; Result r = versionsTable . get ( get ) ; if ( r != null && ! r . isEmpty ( ) ) { for ( Cell c : r . listCells ( ) ) { ts = 0L ; try { ts = Bytes . toLong ( CellUtil . cloneValue ( c ) ) ; versions . add ( new VersionInfo ( Bytes . toString ( CellUtil . cloneQualifier ( c ) ) , ts ) ) ; } catch ( IllegalArgumentException e1 ) { // Bytes . toLong may throw IllegalArgumentException , although // unlikely . LOG . error ( "Caught conversion error while converting timestamp to long value " + e1 . getMessage ( ) ) ; // rethrow the exception in order to propagate it throw e1 ; } } } if ( versions . size ( ) > 0 ) { Collections . sort ( versions ) ; } } finally { if ( versionsTable != null ) { versionsTable . close ( ) ; } } return versions ;
public class Scan { /** * Scans the named class file for uses of deprecated APIs . * @ param fileName the class file to scan * @ return true on success , false on failure */ public boolean processClassFile ( String fileName ) { } }
Path path = Paths . get ( fileName ) ; try { ClassFile cf = ClassFile . read ( path ) ; processClass ( cf ) ; return true ; } catch ( NoSuchFileException nsfe ) { errorNoFile ( fileName ) ; } catch ( IOException | ConstantPoolException ex ) { errorException ( ex ) ; } return false ;
public class N { /** * Removes the first occurrence of the specified element from the specified * array . All subsequent elements are shifted to the left ( subtracts one * from their indices ) . If the array doesn ' t contains such an element , no * elements are removed from the array . * This method returns a new array with the same elements of the input array * except the first occurrence of the specified element . The component type * of the returned array is always the same as that of the input array . * @ param a * @ param element * the element to be removed * @ return A new array containing the existing elements except the first * occurrence of the specified element . */ public static < T > T [ ] remove ( final T [ ] a , final Object element ) { } }
if ( N . isNullOrEmpty ( a ) ) { return a ; } int index = indexOf ( a , 0 , element ) ; return index == INDEX_NOT_FOUND ? a . clone ( ) : delete ( a , index ) ;
public class CollectionHelper { /** * new array list from iterable . * @ param iterables iterables to add to array list * @ return hash set */ @ SafeVarargs public static < T > ArrayList < T > newArrayList ( final Iterable < T > ... iterables ) { } }
final ArrayList < T > resultList = newArrayList ( ) ; for ( final Iterable < T > oneIterable : iterables ) { for ( final T oneElement : oneIterable ) { resultList . add ( oneElement ) ; } } return resultList ;
public class InputPanel { /** * Adds the xml description of the panels content to the StringBuilder . * Errors which occur during the xml transformation will be added to the * ConfigVerification . * @ param builder * Reference to a StringBuilder object * @ param errors * Reference to the ConfigVerification object */ @ Override public void toXML ( final StringBuilder builder , final ConfigVerification errors ) { } }
SurrogateModes surMode = controller . getSurrogates ( ) ; String wikiEncoding = encodingField . getText ( ) ; if ( wikiEncoding . length ( ) == 0 ) { errors . add ( new ConfigItem ( ConfigItemTypes . WARNING , ConfigErrorKeys . MISSING_VALUE , "The CharacterEncoding was not set." ) ) ; } builder . append ( "\t<input>\r\n" ) ; builder . append ( "\t\t<MODE_SURROGATES>" + surMode + "</MODE_SURROGATES>\r\n" ) ; builder . append ( "\t\t<WIKIPEDIA_ENCODING>" + wikiEncoding + "</WIKIPEDIA_ENCODING>\r\n" ) ; ArchiveRegistry reg = controller . getArchives ( ) ; int size = reg . getRowCount ( ) ; ArchiveDescription archive ; InputType type ; String archivePath ; long start ; if ( size == 0 ) { errors . add ( new ConfigItem ( ConfigItemTypes . WARNING , ConfigErrorKeys . MISSING_VALUE , "No source file has been set." ) ) ; } for ( int i = 0 ; i < size ; i ++ ) { archive = reg . get ( i ) ; type = archive . getType ( ) ; switch ( type ) { case XML : break ; case BZIP2 : // bzip is always enabled - nothing to check here break ; case SEVENZIP : if ( ! controller . is7ZipEnabled ( ) ) { errors . add ( new ConfigItem ( ConfigItemTypes . ERROR , ConfigErrorKeys . ILLEGAL_INPUT_FILE , "The SevenUip mode is not " + "activated" ) ) ; } break ; } archivePath = archive . getPath ( ) ; if ( archivePath . length ( ) == 0 ) { errors . add ( new ConfigItem ( ConfigItemTypes . ERROR , ConfigErrorKeys . PATH_NOT_SET , "The archive path is missing" ) ) ; } start = archive . getStartPosition ( ) ; if ( start < 0 ) { errors . add ( new ConfigItem ( ConfigItemTypes . ERROR , ConfigErrorKeys . VALUE_OUT_OF_RANGE , "The archive start value should be at least 0" ) ) ; } builder . append ( "\t\t<archive>\r\n" ) ; builder . append ( "\t\t\t<type>" + type + "</type>\r\n" ) ; builder . append ( "\t\t\t<path>\"" + archivePath + "\"</path>\r\n" ) ; builder . append ( "\t\t\t<start>" + start + "</start>\r\n" ) ; builder . append ( "\t\t</archive>\r\n" ) ; } builder . append ( "\t</input>\r\n" ) ;
public class CacheMapUtil { /** * retrieval all the values in the cache list * @ return the whole list if exists or null if the key does not exist . */ public static Maybe < List < String > > values ( CacheConfigBean cacheConfigBean , String key ) { } }
return SingleRxXian . call ( CacheService . CACHE_SERVICE , "cacheMapValues" , new JSONObject ( ) { { put ( "cacheConfig" , cacheConfigBean ) ; put ( "key" , key ) ; } } ) . flatMapMaybe ( unitResponse -> { unitResponse . throwExceptionIfNotSuccess ( ) ; if ( unitResponse . getData ( ) == null ) return Maybe . empty ( ) ; else return Maybe . just ( Reflection . toTypedList ( unitResponse . getData ( ) , String . class ) ) ; } ) ;
public class CommonExprTransformer { /** * < p > transformArgs . < / p > * @ param args an array of { @ link ameba . db . dsl . QueryExprMeta . Val } objects . * @ return array { @ link java . lang . Object } object . */ public static Object [ ] transformArgs ( Val < Expression > [ ] args ) { } }
Object [ ] objects = new Object [ args . length ] ; for ( int i = 0 ; i < args . length ; i ++ ) { objects [ i ] = args [ i ] . object ( ) ; } return objects ;
public class RegistrationResource { /** * Create a new user . * If a { @ link Person } is found , convert that object to a { @ link User } object . * Do not auto - create a new user . * @ param securityContext * @ param credentialKey * @ param credentialValue * @ param propertySet * @ param confKey * @ return user */ public Principal createUser ( final SecurityContext securityContext , final PropertyKey credentialKey , final String credentialValue , final Map < String , Object > propertySet , final String confKey ) { } }
return createUser ( securityContext , credentialKey , credentialValue , propertySet , false , confKey ) ;
public class ConfigurationBuilder { /** * Returns the Moore ' s law multiplier we ' re using for getDefaultGuessTypes ( ) . * We will only return a multiplier greater than 1 if it has been more than year since we ' ve updated the constants . * The date for this function is : 2018-08-01 * @ return the Moore ' s Law multiplier */ public static BigDecimal getMooresMultiplier ( ) { } }
double years = ( System . currentTimeMillis ( ) - START ) / YEAR ; // Only use the multiplier if we haven ' t updated the value date in over a year . if ( years <= 1d ) { years = 0 ; } // the multiplier for Moore ' s law is 2 to the power of ( years / 2) return BigDecimal . valueOf ( Math . pow ( 2d , years / 2d ) ) ;
public class NetscapeCertTypeExtension { /** * Get the attribute value . */ public Boolean get ( String name ) throws IOException { } }
return Boolean . valueOf ( isSet ( getPosition ( name ) ) ) ;
public class EnvironmentSettingsInner { /** * Get environment setting . * @ param resourceGroupName The name of the resource group . * @ param labAccountName The name of the lab Account . * @ param labName The name of the lab . * @ param environmentSettingName The name of the environment Setting . * @ param expand Specify the $ expand query . Example : ' properties ( $ select = publishingState ) ' * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < EnvironmentSettingInner > getAsync ( String resourceGroupName , String labAccountName , String labName , String environmentSettingName , String expand , final ServiceCallback < EnvironmentSettingInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getWithServiceResponseAsync ( resourceGroupName , labAccountName , labName , environmentSettingName , expand ) , serviceCallback ) ;
public class LdapUtils { /** * Execute search operation . * @ param connectionFactory the connection factory * @ param baseDn the base dn * @ param filter the filter * @ param binaryAttributes the binary attributes * @ param returnAttributes the return attributes * @ return the response * @ throws LdapException the ldap exception */ public static Response < SearchResult > executeSearchOperation ( final ConnectionFactory connectionFactory , final String baseDn , final SearchFilter filter , final String [ ] binaryAttributes , final String [ ] returnAttributes ) throws LdapException { } }
try ( val connection = createConnection ( connectionFactory ) ) { val searchOperation = new SearchOperation ( connection ) ; val request = LdapUtils . newLdaptiveSearchRequest ( baseDn , filter , binaryAttributes , returnAttributes ) ; request . setReferralHandler ( new SearchReferralHandler ( ) ) ; return searchOperation . execute ( request ) ; }
public class ByteUtils { /** * Converts a hexadecimal string into byte array . * @ param hexStr * a hexadecimal string * @ return byte array * @ throws IllegalArgumentException * if hexadecimal string is invalid */ public static byte [ ] fromHexString ( String hexStr ) { } }
if ( ! hexStr . matches ( "^[0-9A-Fa-f]*$" ) ) throw new IllegalArgumentException ( "Invalid hexadecimal string" ) ; if ( hexStr . isEmpty ( ) ) return new byte [ 0 ] ; int complementary = hexStr . length ( ) % 2 ; if ( complementary != 0 ) hexStr += "0" ; return rjust ( new BigInteger ( hexStr , 16 ) . toByteArray ( ) , hexStr . length ( ) / 2 ) ;
public class SessionConfigurationException { /** * Converts a Throwable to a SessionConfigurationException . If the Throwable is a * SessionConfigurationException , it will be passed through unmodified ; otherwise , it will be wrapped * in a new SessionConfigurationException . * @ param cause the Throwable to convert * @ return a SessionConfigurationException */ public static SessionConfigurationException fromThrowable ( Throwable cause ) { } }
return ( cause instanceof SessionConfigurationException ) ? ( SessionConfigurationException ) cause : new SessionConfigurationException ( cause ) ;
public class AddressTemplate { /** * Appends the specified template to this template and returns a new template . If the specified template does * not start with a slash , " / " is automatically appended . * @ param template the template to append ( makes no difference whether it starts with " / " or not ) * @ return a new template */ public AddressTemplate append ( String template ) { } }
String slashTemplate = template . startsWith ( "/" ) ? template : "/" + template ; return AddressTemplate . of ( this . template + slashTemplate ) ;
public class WalkModFacade { /** * Sets an specific reader for an specific chain . * @ param chain * Chain to apply the writer * @ param type * Reader type to set * @ param path * Reader path to set * @ param recursive * If to set the reader to all the submodules . * @ param params * Reader parameters * @ throws Exception * if the walkmod configuration file can ' t be read . */ public void setReader ( String chain , String type , String path , boolean recursive , Map < String , String > params ) throws Exception { } }
if ( ( type != null && ! "" . equals ( type . trim ( ) ) ) || ( path != null && ! "" . equals ( path . trim ( ) ) ) ) { long startTime = System . currentTimeMillis ( ) ; Exception exception = null ; if ( ! cfg . exists ( ) ) { init ( ) ; } userDir = new File ( System . getProperty ( "user.dir" ) ) . getAbsolutePath ( ) ; System . setProperty ( "user.dir" , options . getExecutionDirectory ( ) . getAbsolutePath ( ) ) ; try { ConfigurationManager manager = new ConfigurationManager ( cfg , false ) ; ProjectConfigurationProvider cfgProvider = manager . getProjectConfigurationProvider ( ) ; cfgProvider . setReader ( chain , type , path , recursive , params ) ; } catch ( Exception e ) { exception = e ; } finally { System . setProperty ( "user.dir" , userDir ) ; updateMsg ( startTime , exception ) ; } }
public class WrongStepServerInterceptor { /** * { @ inheritDoc } */ @ Override public void preparePaint ( final Request request ) { } }
// Increment the step counter UIContext uic = UIContextHolder . getCurrent ( ) ; StepCountUtil . incrementSessionStep ( uic ) ; super . preparePaint ( request ) ;
public class Util { /** * Creates a string of given length where each character comes from a * set of values 0-9 followed by A - Z . * @ param length returned string will be this long . Less than 1k + 1 * @ param maxVal maximum ordinal value of characters . If < than 0, * return null . If > 35 , 35 is used instead . * @ return String the random string */ public static String makeRandomString ( int length , int maxVal ) { } }
if ( length < 0 ) { return null ; } length = Math . min ( length , 1025 ) ; if ( maxVal < 0 ) { return null ; } maxVal = Math . min ( maxVal , 35 ) ; StringBuffer res = new StringBuffer ( ) ; Random rand = new Random ( ) ; for ( int i = 0 ; i <= length ; i ++ ) { res . append ( randChars [ rand . nextInt ( maxVal + 1 ) ] ) ; } return res . toString ( ) ;
public class AbstractCatchEventBuilder { /** * Sets an event definition for the timer with a time date . * @ param timerDate the time date of the timer * @ return the builder object */ public B timerWithDate ( String timerDate ) { } }
TimeDate timeDate = createInstance ( TimeDate . class ) ; timeDate . setTextContent ( timerDate ) ; TimerEventDefinition timerEventDefinition = createInstance ( TimerEventDefinition . class ) ; timerEventDefinition . setTimeDate ( timeDate ) ; element . getEventDefinitions ( ) . add ( timerEventDefinition ) ; return myself ;
public class Combinatorics { /** * Returns all the possible combinations of the set . * @ param elements * @ param subsetSize * @ param < T > * @ return */ public static < T > Set < Set < T > > combinations ( Set < T > elements , int subsetSize ) { } }
return combinationsStream ( elements , subsetSize ) . collect ( Collectors . toSet ( ) ) ;
public class RaftServiceContext { /** * Executes the given query on the state machine . * @ param index The index of the query . * @ param sequence The query sequence number . * @ param timestamp The timestamp of the query . * @ param session The session that submitted the query . * @ param operation The query to execute . * @ return A future to be completed with the query result . */ public CompletableFuture < OperationResult > executeQuery ( long index , long sequence , long timestamp , RaftSession session , PrimitiveOperation operation ) { } }
CompletableFuture < OperationResult > future = new CompletableFuture < > ( ) ; executeQuery ( index , sequence , timestamp , session , operation , future ) ; return future ;
public class ModelsImpl { /** * Get All Entity Roles for a given entity . * @ param appId The application ID . * @ param versionId The version ID . * @ param hEntityId The hierarchical entity extractor ID . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the List & lt ; EntityRole & gt ; object if successful . */ public List < EntityRole > getHierarchicalEntityRoles ( UUID appId , String versionId , UUID hEntityId ) { } }
return getHierarchicalEntityRolesWithServiceResponseAsync ( appId , versionId , hEntityId ) . toBlocking ( ) . single ( ) . body ( ) ;
public class CeProcessingSchedulerImpl { /** * This method is stopping all the workers giving them a delay before killing them . */ @ Override public void stopScheduling ( ) { } }
LOG . debug ( "Stopping compute engine" ) ; // Requesting all workers to stop for ( ChainingCallback chainingCallback : chainingCallbacks ) { chainingCallback . stop ( false ) ; } // Workers have 40s to gracefully stop processing tasks long until = System . currentTimeMillis ( ) + gracefulStopTimeoutInMs ; LOG . info ( "Waiting for workers to finish in-progress tasks" ) ; while ( System . currentTimeMillis ( ) < until && ceWorkerController . hasAtLeastOneProcessingWorker ( ) ) { try { Thread . sleep ( 200L ) ; } catch ( InterruptedException e ) { LOG . debug ( "Graceful stop period has been interrupted: {}" , e ) ; Thread . currentThread ( ) . interrupt ( ) ; break ; } } if ( ceWorkerController . hasAtLeastOneProcessingWorker ( ) ) { LOG . info ( "Some in-progress tasks did not finish in due time. Tasks will be stopped." ) ; } // Interrupting the tasks for ( ChainingCallback chainingCallback : chainingCallbacks ) { chainingCallback . stop ( true ) ; }
public class SVGParser { /** * supported if we are to render this element */ private static Set < String > parseRequiredFeatures ( String val ) { } }
TextScanner scan = new TextScanner ( val ) ; HashSet < String > result = new HashSet < > ( ) ; while ( ! scan . empty ( ) ) { String feature = scan . nextToken ( ) ; if ( feature . startsWith ( FEATURE_STRING_PREFIX ) ) { result . add ( feature . substring ( FEATURE_STRING_PREFIX . length ( ) ) ) ; } else { // Not a feature string we recognise or support . ( In order to avoid accidentally // matches with our truncated feature strings , we ' ll replace it with a string // we know for sure won ' t match anything . result . add ( "UNSUPPORTED" ) ; } scan . skipWhitespace ( ) ; } return result ;
public class MemMeter { /** * { @ inheritDoc } */ @ Override public double getValue ( ) { } }
final Runtime runtime = Runtime . getRuntime ( ) ; memAlreadyUsed = memAlreadyUsed + runtime . totalMemory ( ) - runtime . freeMemory ( ) ; return new BigDecimal ( memAlreadyUsed , MathContext . DECIMAL128 ) . divide ( new BigDecimal ( scale . getNumberOfBytes ( ) ) , MathContext . DECIMAL128 ) . doubleValue ( ) ;
public class VdmEvaluationAction { /** * IEditorActionDelegate */ public void setActiveEditor ( IAction action , IEditorPart targetEditor ) { } }
if ( targetEditor instanceof VdmEditor ) { setEditor ( ( VdmEditor ) targetEditor ) ; } else { setEditor ( null ) ; }
public class Bootstrap { /** * Helper method to load a list of DNS SRV records . * @ param serviceName the service to locate . * @ param ctx the directory context to fetch from . * @ return the list of dns records * @ throws NamingException if something goes wrong during the load process . */ static List < String > loadDnsRecords ( final String serviceName , final DirContext ctx ) throws NamingException { } }
Attributes attrs = ctx . getAttributes ( serviceName , new String [ ] { "SRV" } ) ; NamingEnumeration < ? > servers = attrs . get ( "srv" ) . getAll ( ) ; List < String > records = new ArrayList < String > ( ) ; while ( servers . hasMore ( ) ) { DnsRecord record = DnsRecord . fromString ( ( String ) servers . next ( ) ) ; records . add ( record . getHost ( ) ) ; } return records ;
public class CronOption { @ Override public OptionalThing < LaJobNote > getJobNote ( ) { } }
final LaJobNote note = ( jobTitle != null || jobDesc != null ) ? LaJobNote . of ( jobTitle , jobDesc ) : null ; return OptionalThing . ofNullable ( note , ( ) -> { throw new IllegalStateException ( "Not found the job note (both title and description)." ) ; } ) ;