idx
int64
0
165k
question
stringlengths
73
4.15k
target
stringlengths
5
918
len_question
int64
21
890
len_target
int64
3
255
30,000
private void applyL2Reg ( final double eta_t ) { if ( lambda0 > 0 ) //apply L2 regularization for ( Vec v : ws ) v . mutableMultiply ( 1 - eta_t * lambda0 ) ; }
Applies L2 regularization to the model
56
9
30,001
private void applyL1Reg ( final double eta_t , Vec x ) { //apply l1 regularization if ( lambda1 > 0 ) { l1U += eta_t * lambda1 ; //line 6: in Tsuruoka et al paper, figure 2 for ( int k = 0 ; k < ws . length ; k ++ ) { final Vec w_k = ws [ k ] ; final double [ ] l1Q_k = l1Q [ k ] ; for ( IndexValue iv : x ) { final int i = iv . getIndex ( ) ; //see "APPLYPENALTY(i)" on line 15: from Figure 2 in Tsuruoka et al paper final double z = w_k . get ( i ) ; double newW_i = 0 ; if ( z > 0 ) newW_i = Math . max ( 0 , z - ( l1U + l1Q_k [ i ] ) ) ; else if ( z < 0 ) newW_i = Math . min ( 0 , z + ( l1U - l1Q_k [ i ] ) ) ; l1Q_k [ i ] += ( newW_i - z ) ; w_k . set ( i , newW_i ) ; } } } }
Applies L1 regularization to the model
281
9
30,002
private void projectVector ( Vec vec , int slot , int [ ] projLocation , Vec projected ) { randProjMatrix . multiply ( vec , 1.0 , projected ) ; int pos = 0 ; int bitsLeft = Integer . SIZE ; int curVal = 0 ; while ( pos < slotsPerEntry ) { while ( bitsLeft > 0 ) { curVal <<= 1 ; if ( projected . get ( pos * Integer . SIZE + ( Integer . SIZE - bitsLeft ) ) >= 0 ) curVal |= 1 ; bitsLeft -- ; } projLocation [ slot + pos ] = curVal ; curVal = 0 ; bitsLeft = Integer . SIZE ; pos ++ ; } }
Projects a given vector into the array of integers .
148
11
30,003
public static void prune ( TreeNodeVisitor root , PruningMethod method , ClassificationDataSet testSet ) { //TODO add vargs for extra arguments that may be used by pruning methods if ( method == PruningMethod . NONE ) return ; else if ( method == PruningMethod . REDUCED_ERROR ) pruneReduceError ( null , - 1 , root , testSet ) ; else if ( method == PruningMethod . ERROR_BASED ) pruneErrorBased ( null , - 1 , root , testSet , 0.25 ) ; else throw new RuntimeException ( "BUG: please report" ) ; }
Performs pruning starting from the root node of a tree
136
12
30,004
private static int pruneReduceError ( TreeNodeVisitor parent , int pathFollowed , TreeNodeVisitor current , ClassificationDataSet testSet ) { if ( current == null ) return 0 ; int nodesPruned = 0 ; //If we are not a leaf, prune our children if ( ! current . isLeaf ( ) ) { //Each child should only be given testing points that would decend down that path int numSplits = current . childrenCount ( ) ; List < ClassificationDataSet > splits = new ArrayList <> ( numSplits ) ; IntList hadMissing = new IntList ( ) ; double [ ] fracs = new double [ numSplits ] ; double wSum = 0 ; for ( int i = 0 ; i < numSplits ; i ++ ) splits . ( testSet . emptyClone ( ) ) ; for ( int i = 0 ; i < testSet . size ( ) ; i ++ ) { double w_i = testSet . getWeight ( i ) ; int path = current . getPath ( testSet . getDataPoint ( i ) ) ; if ( path >= 0 ) { splits . get ( path ) . addDataPoint ( testSet . getDataPoint ( i ) , testSet . getDataPointCategory ( i ) , w_i ) ; wSum += w_i ; fracs [ path ] += w_i ; } else //missing value hadMissing . add ( i ) ; } //normalize fracs for ( int i = 0 ; i < numSplits ; i ++ ) fracs [ i ] /= wSum + 1e-15 ; if ( ! hadMissing . isEmpty ( ) ) DecisionStump . distributMissing ( splits , fracs , testSet , hadMissing ) ; for ( int i = numSplits - 1 ; i >= 0 ; i -- ) //Go backwards so child removals dont affect indices nodesPruned += pruneReduceError ( current , i , current . getChild ( i ) , splits . get ( i ) ) ; } //If we pruned all our children, we may have become a leaf! Should we prune ourselves? if ( current . isLeaf ( ) && parent != null ) //Compare this nodes accuracy vs its parrent { double childCorrect = 0 ; double parrentCorrect = 0 ; for ( int i = 0 ; i < testSet . size ( ) ; i ++ ) { DataPoint dp = testSet . getDataPoint ( i ) ; int truth = testSet . getDataPointCategory ( i ) ; if ( current . localClassify ( dp ) . mostLikely ( ) == truth ) childCorrect += testSet . getWeight ( i ) ; if ( parent . localClassify ( dp ) . mostLikely ( ) == truth ) parrentCorrect += testSet . getWeight ( i ) ; } if ( parrentCorrect >= childCorrect ) //We use >= b/c if they are the same, we assume smaller trees are better { parent . disablePath ( pathFollowed ) ; return nodesPruned + 1 ; //We prune our children and ourselves } return nodesPruned ; } return nodesPruned ; }
Performs pruning to reduce error on the testing set
680
11
30,005
public void setScale ( double scale ) { if ( scale <= 0 || Double . isNaN ( scale ) || Double . isInfinite ( scale ) ) throw new ArithmeticException ( "Scale must be a positive value, not " + scale ) ; this . scale = scale ; this . logScale = log ( scale ) ; }
Sets the scale of the Levy distribution
70
8
30,006
public void setLocation ( double location ) { if ( Double . isNaN ( location ) || Double . isInfinite ( location ) ) throw new ArithmeticException ( "location must be a real number" ) ; this . location = location ; }
Sets location of the Levy distribution .
52
8
30,007
private void sgdTrain ( ClassificationDataSet D , MatrixOfVecs W , Vec b , int sign_mul , boolean parallel ) { IntList order = new IntList ( D . size ( ) ) ; ListUtils . addRange ( order , 0 , D . size ( ) , 1 ) ; final double lambda_adj = lambda / ( D . size ( ) * epochs ) ; int [ ] owned = new int [ K ] ; //how many points does thsi guy own? int assigned_positive_instances = 0 ; //how many points in the positive class have been assigned? int [ ] assignments = new int [ D . size ( ) ] ; //who owns each data point Arrays . fill ( assignments , - 1 ) ; //Starts out that no one is assigned! Vec dots = new DenseVector ( W . rows ( ) ) ; long t = 0 ; for ( int epoch = 0 ; epoch < epochs ; epoch ++ ) { Collections . shuffle ( order ) ; for ( int i : order ) { t ++ ; double eta = 1 / ( lambda_adj * t ) ; Vec x_i = D . getDataPoint ( i ) . getNumericalValues ( ) ; int y_i = ( D . getDataPointCategory ( i ) * 2 - 1 ) * sign_mul ; //this sets dots = bias, which we then add to with matrix-vector product //result is the same as dots = W x_i + b b . copyTo ( dots ) ; W . multiply ( x_i , 1.0 , dots ) ; if ( y_i == - 1 ) { for ( int k = 0 ; k < K ; k ++ ) if ( dots . get ( k ) > - 1 ) { W . getRowView ( k ) . mutableSubtract ( eta , x_i ) ; b . increment ( k , - eta ) ; } } else //y_i == 1 { int k_true_max = 0 ; for ( int k = 1 ; k < dots . length ( ) ; k ++ ) if ( dots . get ( k ) > dots . get ( k_true_max ) ) k_true_max = k ; if ( dots . get ( k_true_max ) < 1 ) { int z = ASSIGN ( dots , i , k_true_max , owned , assignments , assigned_positive_instances ) ; W . getRowView ( z ) . mutableAdd ( eta , x_i ) ; b . increment ( z , eta ) ; //book keeping if ( assignments [ i ] < 0 ) //first assignment, inc counter assigned_positive_instances ++ ; else //change owner, decrement ownership count owned [ assignments [ i ] ] -- ; owned [ z ] ++ ; assignments [ i ] = z ; } } // W.mutableMultiply(1-eta*lambda); //equivalent form, more stable W . mutableMultiply ( 1 - 1.0 / t ) ; b . mutableMultiply ( 1 - 1.0 / t ) ; } } }
Training procedure that can be applied to each version of the CPM sub - problem .
669
17
30,008
public void setLearningRate ( double learningRate ) { if ( Double . isInfinite ( learningRate ) || Double . isNaN ( learningRate ) || learningRate <= 0 ) throw new IllegalArgumentException ( "Learning rate must be positive, not " + learningRate ) ; this . learningRate = learningRate ; }
Sets the learning rate to use
68
7
30,009
public void setThreshold ( double threshold ) { if ( Double . isNaN ( threshold ) || threshold <= 0 ) throw new IllegalArgumentException ( "Threshold must be positive, not " + threshold ) ; this . threshold = threshold ; }
Sets the threshold for a coefficient value to avoid regularization . While a coefficient reaches this magnitude regularization will not be applied .
52
26
30,010
public void setGravity ( double gravity ) { if ( Double . isInfinite ( gravity ) || Double . isNaN ( gravity ) || gravity <= 0 ) throw new IllegalArgumentException ( "Gravity must be positive, not " + gravity ) ; this . gravity = gravity ; }
Sets the gravity regularization parameter that weighs down the coefficient values . Larger gravity values impose stronger regularization and encourage greater sparsity .
61
28
30,011
private void performUpdate ( final Vec x , final double y , final double yHat ) { for ( IndexValue iv : x ) { final int j = iv . getIndex ( ) ; w . set ( j , T ( w . get ( j ) + 2 * learningRate * ( y - yHat ) * iv . getValue ( ) , ( ( time - t [ j ] ) / K ) * gravity * learningRate , threshold ) ) ; t [ j ] += ( ( time - t [ j ] ) / K ) * K ; } }
Performs the sparse update of the weight vector
117
9
30,012
static protected int SFSSelectFeature ( Set < Integer > available , DataSet dataSet , Set < Integer > catToRemove , Set < Integer > numToRemove , Set < Integer > catSelecteed , Set < Integer > numSelected , Object evaluater , int folds , Random rand , double [ ] PbestScore , int minFeatures ) { int nCat = dataSet . getNumCategoricalVars ( ) ; int curBest = - 1 ; double curBestScore = Double . POSITIVE_INFINITY ; for ( int feature : available ) { removeFeature ( feature , nCat , catToRemove , numToRemove ) ; DataSet workOn = dataSet . shallowClone ( ) ; RemoveAttributeTransform remove = new RemoveAttributeTransform ( workOn , catToRemove , numToRemove ) ; workOn . applyTransform ( remove ) ; double score = getScore ( workOn , evaluater , folds , rand ) ; if ( score < curBestScore ) { curBestScore = score ; curBest = feature ; } addFeature ( feature , nCat , catToRemove , numToRemove ) ; } if ( curBestScore <= 1e-14 && PbestScore [ 0 ] <= 1e-14 && catSelecteed . size ( ) + numSelected . size ( ) >= minFeatures ) return - 1 ; if ( curBestScore < PbestScore [ 0 ] || catSelecteed . size ( ) + numSelected . size ( ) < minFeatures || Math . abs ( PbestScore [ 0 ] - curBestScore ) < 1e-3 ) { PbestScore [ 0 ] = curBestScore ; addFeature ( curBest , nCat , catSelecteed , numSelected ) ; removeFeature ( curBest , nCat , catToRemove , numToRemove ) ; available . remove ( curBest ) ; return curBest ; } else return - 1 ; //No possible improvment & weve got enough }
Attempts to add one feature to the list of features while increasing or maintaining the current accuracy
416
17
30,013
protected static double getScore ( DataSet workOn , Object evaluater , int folds , Random rand ) { if ( workOn instanceof ClassificationDataSet ) { ClassificationModelEvaluation cme = new ClassificationModelEvaluation ( ( Classifier ) evaluater , ( ClassificationDataSet ) workOn ) ; cme . evaluateCrossValidation ( folds , rand ) ; return cme . getErrorRate ( ) ; } else if ( workOn instanceof RegressionDataSet ) { RegressionModelEvaluation rme = new RegressionModelEvaluation ( ( Regressor ) evaluater , ( RegressionDataSet ) workOn ) ; rme . evaluateCrossValidation ( folds , rand ) ; return rme . getMeanError ( ) ; } return Double . POSITIVE_INFINITY ; }
The score function for a data set and a learner by cross validation of a classifier
175
18
30,014
public void setM ( double m ) { if ( m < 0 || Double . isInfinite ( m ) || Double . isNaN ( m ) ) throw new ArithmeticException ( "The minimum count must be a non negative number" ) ; this . m = m ; }
Sets the minimum prior observation value needed for an attribute combination to have enough support to be included in the final estimate .
59
24
30,015
public static Vec extractTrueVec ( Vec b ) { while ( b instanceof VecPaired ) b = ( ( VecPaired ) b ) . getVector ( ) ; return b ; }
This method is used assuming multiple VecPaired are used together . The implementation of the vector may have logic to handle the case that the other vector is of the same type . This will go through every layer of VecPaired to return the final base vector .
41
52
30,016
private boolean addWord ( String word , SparseVector vec , Integer value ) { Integer indx = wordIndex . get ( word ) ; if ( indx == null ) //this word has never been seen before! { Integer index_for_new_word ; if ( ( index_for_new_word = wordIndex . putIfAbsent ( word , - 1 ) ) == null ) //I won the race to insert this word into the map { /* * we need to do this increment after words to avoid a race * condition where two people incrment currentLength for the * same word, as that will throw off other word additions * before we can fix the problem */ index_for_new_word = currentLength . getAndIncrement ( ) ; wordIndex . put ( word , index_for_new_word ) ; //overwrite with correct value } if ( index_for_new_word < 0 ) return false ; //possible race on tdf as well when two threads found same new word at the same time AtomicInteger termCount = new AtomicInteger ( 0 ) , tmp = null ; tmp = termDocumentFrequencys . putIfAbsent ( index_for_new_word , termCount ) ; if ( tmp != null ) termCount = tmp ; termCount . incrementAndGet ( ) ; int newLen = Math . max ( index_for_new_word + 1 , vec . length ( ) ) ; vec . setLength ( newLen ) ; vec . set ( index_for_new_word , value ) ; } else //this word has been seen before { if ( indx < 0 ) return false ; AtomicInteger toInc = termDocumentFrequencys . get ( indx ) ; if ( toInc == null ) { //wordIndex and termDocumnetFrequences are not updated //atomicly together, so could get index but not have tDF ready toInc = termDocumentFrequencys . putIfAbsent ( indx , new AtomicInteger ( 1 ) ) ; if ( toInc == null ) //other person finished adding before we "fixed" via putIfAbsent toInc = termDocumentFrequencys . get ( indx ) ; } toInc . incrementAndGet ( ) ; if ( vec . length ( ) <= indx ) //happens when another thread sees the word first and adds it, then get check and find it- but haven't increased our vector legnth vec . setLength ( indx + 1 ) ; vec . set ( indx , value ) ; } return true ; }
Does the work to add a given word to the sparse vector . May not succeed in race conditions when two ore more threads are trying to add a word at the same time .
546
35
30,017
public void setLambda ( double lambda ) { if ( Double . isNaN ( lambda ) || lambda <= 0 || Double . isInfinite ( lambda ) ) throw new IllegalArgumentException ( "lambda must be positive, not " + lambda ) ; this . lambda = lambda ; }
Sets the average rate of the event occurring in a unit of time
61
14
30,018
public int getMedianIndex ( final List < Integer > data , int pivot ) { int medianIndex = data . size ( ) / 2 ; //What if more than one point have the samve value? Keep incrementing until that dosn't happen while ( medianIndex < data . size ( ) - 1 && allVecs . get ( data . get ( medianIndex ) ) . get ( pivot ) == allVecs . get ( data . get ( medianIndex + 1 ) ) . get ( pivot ) ) medianIndex ++ ; return medianIndex ; }
Returns the index for the median adjusted incase multiple features have the same value .
118
16
30,019
public void setCardinality ( double cardinality ) { if ( cardinality < 0 || Double . isNaN ( cardinality ) ) throw new IllegalArgumentException ( "Cardinality must be a positive integer or infinity, not " + cardinality ) ; this . cardinality = Math . ceil ( cardinality ) ; fixCache ( ) ; }
Sets the cardinality of the distribution defining the maximum number of items that Zipf can return .
75
20
30,020
public void setSkew ( double skew ) { if ( skew <= 0 || Double . isNaN ( skew ) || Double . isInfinite ( skew ) ) throw new IllegalArgumentException ( "Skew must be a positive value, not " + skew ) ; this . skew = skew ; fixCache ( ) ; }
Sets the skewness of the distribution . Lower values spread out the probability distribution while higher values concentrate on the lowest ranks .
70
26
30,021
public void writePoint ( double weight , DataPoint dp , double label ) throws IOException { ByteArrayOutputStream baos = local_baos . get ( ) ; pointToBytes ( weight , dp , label , baos ) ; if ( baos . size ( ) >= LOCAL_BUFFER_SIZE ) //We've got a big chunk of data, lets dump it synchronized ( out ) { baos . writeTo ( out ) ; baos . reset ( ) ; } }
Write out the given data point to the output stream
104
10
30,022
private int increment ( int [ ] setTo , int max , int curCount ) { setTo [ 0 ] ++ ; curCount ++ ; if ( curCount <= max ) return curCount ; int carryPos = 0 ; while ( carryPos < setTo . length - 1 && curCount > max ) { curCount -= setTo [ carryPos ] ; setTo [ carryPos ] = 0 ; setTo [ ++ carryPos ] ++ ; curCount ++ ; } return curCount ; }
Increments the array to contain representation of the next combination of values in the polynomial
102
18
30,023
protected Parameter getParameterByName ( String name ) throws IllegalArgumentException { Parameter param ; if ( baseClassifier != null ) param = ( ( Parameterized ) baseClassifier ) . getParameter ( name ) ; else param = ( ( Parameterized ) baseRegressor ) . getParameter ( name ) ; if ( param == null ) throw new IllegalArgumentException ( "Parameter " + name + " does not exist" ) ; return param ; }
Finds the parameter object with the given name or throws an exception if a parameter with the given name does not exist .
98
24
30,024
private double getPreScore ( Vec x ) { return k . evalSum ( vecs , accelCache , alpha . getBackingArray ( ) , x , 0 , alpha . size ( ) ) ; }
Computes the margin score for the given data point
44
10
30,025
public void setCovariance ( Matrix covMatrix ) { if ( ! covMatrix . isSquare ( ) ) throw new ArithmeticException ( "Covariance matrix must be square" ) ; else if ( covMatrix . rows ( ) != this . mean . length ( ) ) throw new ArithmeticException ( "Covariance matrix does not agree with the mean" ) ; CholeskyDecomposition cd = new CholeskyDecomposition ( covMatrix . clone ( ) ) ; L = cd . getLT ( ) ; L . mutableTranspose ( ) ; LUPDecomposition lup = new LUPDecomposition ( covMatrix . clone ( ) ) ; int k = mean . length ( ) ; double det = lup . det ( ) ; if ( Double . isNaN ( det ) || det < 1e-10 ) { //Numerical unstable or sub rank matrix. Use the SVD to work with the more stable pesudo matrix SingularValueDecomposition svd = new SingularValueDecomposition ( covMatrix . clone ( ) ) ; //We need the rank deficient PDF and pesude inverse this . logPDFConst = 0.5 * log ( svd . getPseudoDet ( ) * pow ( 2 * PI , svd . getRank ( ) ) ) ; this . invCovariance = svd . getPseudoInverse ( ) ; } else { this . logPDFConst = ( - k * log ( 2 * PI ) - log ( det ) ) * 0.5 ; this . invCovariance = lup . solve ( Matrix . eye ( k ) ) ; } }
Sets the covariance matrix for this matrix .
355
10
30,026
protected double cluster ( DataSet data , boolean doInit , int [ ] medioids , int [ ] assignments , List < Double > cacheAccel , boolean parallel ) { DoubleAdder totalDistance = new DoubleAdder ( ) ; LongAdder changes = new LongAdder ( ) ; Arrays . fill ( assignments , - 1 ) ; //-1, invalid category! int [ ] bestMedCand = new int [ medioids . length ] ; double [ ] bestMedCandDist = new double [ medioids . length ] ; List < Vec > X = data . getDataVectors ( ) ; final List < Double > accel ; if ( doInit ) { TrainableDistanceMetric . trainIfNeeded ( dm , data ) ; accel = dm . getAccelerationCache ( X ) ; selectIntialPoints ( data , medioids , dm , accel , rand , seedSelection ) ; } else accel = cacheAccel ; int iter = 0 ; do { changes . reset ( ) ; totalDistance . reset ( ) ; ParallelUtils . run ( parallel , data . size ( ) , ( start , end ) -> { for ( int i = start ; i < end ; i ++ ) { int assignment = 0 ; double minDist = dm . dist ( medioids [ 0 ] , i , X , accel ) ; for ( int k = 1 ; k < medioids . length ; k ++ ) { double dist = dm . dist ( medioids [ k ] , i , X , accel ) ; if ( dist < minDist ) { minDist = dist ; assignment = k ; } } //Update which cluster it is in if ( assignments [ i ] != assignment ) { changes . increment ( ) ; assignments [ i ] = assignment ; } totalDistance . add ( minDist * minDist ) ; } } ) ; //TODO this update may be faster by using more memory, and actually moiving people about in the assignment loop above //Update the medoids Arrays . fill ( bestMedCandDist , Double . MAX_VALUE ) ; for ( int i = 0 ; i < data . size ( ) ; i ++ ) { double thisCandidateDistance ; final int clusterID = assignments [ i ] ; final int medCandadate = i ; final int ii = i ; thisCandidateDistance = ParallelUtils . range ( data . size ( ) , parallel ) . filter ( j -> j != ii && assignments [ j ] == clusterID ) . mapToDouble ( j -> Math . pow ( dm . dist ( medCandadate , j , X , accel ) , 2 ) ) . sum ( ) ; if ( thisCandidateDistance < bestMedCandDist [ clusterID ] ) { bestMedCand [ clusterID ] = i ; bestMedCandDist [ clusterID ] = thisCandidateDistance ; } } System . arraycopy ( bestMedCand , 0 , medioids , 0 , medioids . length ) ; } while ( changes . sum ( ) > 0 && iter ++ < iterLimit ) ; return totalDistance . sum ( ) ; }
Performs the actual work of PAM .
666
9
30,027
public void setRho ( double rho ) { if ( rho <= 0 || rho >= 1 || Double . isNaN ( rho ) ) throw new IllegalArgumentException ( "Rho must be in (0, 1)" ) ; this . rho = rho ; }
Sets the decay rate used by AdaDelta . Lower values focus more on the current gradient where higher values incorporate a longer history .
62
26
30,028
public void setSmoothing ( double smoothing ) { if ( smoothing <= 0 || smoothing > 1 || Double . isNaN ( smoothing ) ) throw new IllegalArgumentException ( "Smoothing must be in (0, 1], not " + smoothing ) ; this . smoothing = smoothing ; }
Sets the smoothing parameter value to use . Must be in the range ( 0 1 ] . Changing this value will impact how quickly the statistics adapt to changes with larger values increasing rate of change and smaller values decreasing it .
69
45
30,029
public void add ( double x ) { if ( Double . isNaN ( mean ) ) //fist case { mean = x ; variance = 0 ; } else //general case { //first update stnd deviation variance = ( 1 - smoothing ) * ( variance + smoothing * Math . pow ( x - mean , 2 ) ) ; mean = ( 1 - smoothing ) * mean + smoothing * x ; } }
Adds the given data point to the statistics
89
8
30,030
public void setMinMax ( int min , int max ) { if ( min >= max ) throw new IllegalArgumentException ( "The input minimum (" + min + ") must be less than the given max (" + max + ")" ) ; this . min = min ; this . max = max ; }
Sets the minimum and maximum values at the same time this is useful if setting them one at a time may have caused a conflict with the previous values
64
30
30,031
public void setC ( double C ) { if ( Double . isNaN ( C ) || Double . isInfinite ( C ) || C <= 0 ) throw new IllegalArgumentException ( "C must be a postive constant, not " + C ) ; this . C = C ; }
Set the aggressiveness parameter . Increasing the value of this parameter increases the aggressiveness of the algorithm . It must be a positive value . This parameter essentially performs a type of regularization on the updates
62
39
30,032
public static Distribution guessRegularization ( DataSet d ) { double T2 = d . size ( ) ; T2 *= T2 ; return new LogUniform ( Math . pow ( 2 , - 3 ) / T2 , Math . pow ( 2 , 3 ) / T2 ) ; }
Guesses the distribution to use for the Regularization parameter
62
11
30,033
public static < T > Comparator < T > getReverse ( final Comparator < T > cmp ) { return ( T o1 , T o2 ) -> - cmp . compare ( o1 , o2 ) ; }
Obtains the reverse order comparator
50
7
30,034
public void reset ( ) { for ( int i = 0 ; i < index . size ( ) ; i ++ ) index . set ( i , i ) ; }
Resets the index table so that the returned indices are in linear order meaning the original input would be returned in its original order instead of sorted order .
34
30
30,035
public < T extends Comparable < T > > void sort ( List < T > list ) { sort ( list , defaultComp ) ; }
Adjust this index table to contain the sorted index order for the given list
29
14
30,036
public < T extends Comparable < T > > void sortR ( List < T > list ) { sort ( list , getReverse ( defaultComp ) ) ; }
Adjusts this index table to contain the reverse sorted index order for the given list
36
16
30,037
public < T > void sort ( List < T > list , Comparator < T > cmp ) { if ( index . size ( ) < list . size ( ) ) for ( int i = index . size ( ) ; i < list . size ( ) ; i ++ ) index . ( i ) ; if ( list . size ( ) == index . size ( ) ) Collections . sort ( index , new IndexViewCompList ( list , cmp ) ) ; else { Collections . sort ( index ) ; //so [0, list.size) is at the front Collections . sort ( index . subList ( 0 , list . size ( ) ) , new IndexViewCompList ( list , cmp ) ) ; } prevSize = list . size ( ) ; }
Sets up the index table based on the given list of the same size and comparator .
161
19
30,038
public ClassificationDataSet asClassificationDataSet ( int index ) { if ( index < 0 ) throw new IllegalArgumentException ( "Index must be a non-negative value" ) ; else if ( getNumCategoricalVars ( ) == 0 ) throw new IllegalArgumentException ( "Dataset has no categorical variables, can not create classification dataset" ) ; else if ( index >= getNumCategoricalVars ( ) ) throw new IllegalArgumentException ( "Index " + index + " is larger than number of categorical features " + getNumCategoricalVars ( ) ) ; return new ClassificationDataSet ( this , index ) ; }
Converts this dataset into one meant for classification problems . The given categorical feature index is removed from the data and made the target variable for the classification problem .
141
32
30,039
public RegressionDataSet asRegressionDataSet ( int index ) { if ( index < 0 ) throw new IllegalArgumentException ( "Index must be a non-negative value" ) ; else if ( getNumNumericalVars ( ) == 0 ) throw new IllegalArgumentException ( "Dataset has no numeric variables, can not create regression dataset" ) ; else if ( index >= getNumNumericalVars ( ) ) throw new IllegalArgumentException ( "Index " + index + " i larger than number of numeric features " + getNumNumericalVars ( ) ) ; RegressionDataSet rds = new RegressionDataSet ( this . datapoints . toList ( ) , index ) ; for ( int i = 0 ; i < size ( ) ; i ++ ) rds . setWeight ( i , this . getWeight ( i ) ) ; return rds ; }
Converts this dataset into one meant for regression problems . The given numeric feature index is removed from the data and made the target variable for the regression problem .
194
31
30,040
public void setReflection ( double reflection ) { if ( reflection <= 0 || Double . isNaN ( reflection ) || Double . isInfinite ( reflection ) ) throw new ArithmeticException ( "Reflection constant must be > 0, not " + reflection ) ; this . reflection = reflection ; }
Sets the reflection constant which must be greater than 0
62
11
30,041
public void setExpansion ( double expansion ) { if ( expansion <= 1 || Double . isNaN ( expansion ) || Double . isInfinite ( expansion ) ) throw new ArithmeticException ( "Expansion constant must be > 1, not " + expansion ) ; else if ( expansion <= reflection ) throw new ArithmeticException ( "Expansion constant must be less than the reflection constant" ) ; this . expansion = expansion ; }
Sets the expansion constant which must be greater than 1 and the reflection constant
89
15
30,042
public Matrix transpose ( ) { Matrix toReturn = new DenseMatrix ( cols ( ) , rows ( ) ) ; this . transpose ( toReturn ) ; return toReturn ; }
Returns a new matrix that is the transpose of this matrix .
40
13
30,043
public void copyTo ( Matrix other ) { if ( this . rows ( ) != other . rows ( ) || this . cols ( ) != other . cols ( ) ) throw new ArithmeticException ( "Matrices are not of the same dimension" ) ; for ( int i = 0 ; i < rows ( ) ; i ++ ) this . getRowView ( i ) . copyTo ( other . getRowView ( i ) ) ; }
Copes the values of this matrix into the other matrix of the same dimensions
94
15
30,044
protected void accessingRow ( int r ) { if ( r < 0 ) { specific_row_cache_row = - 1 ; specific_row_cache_values = null ; return ; } if ( cacheMode == CacheMode . ROWS ) { double [ ] cache = partialCache . get ( r ) ; if ( cache == null ) //not present { //make a row cache = new double [ vecs . size ( ) ] ; Arrays . fill ( cache , Double . NaN ) ; double [ ] cache_missed = partialCache . putIfAbsentAndGet ( r , cache ) ; if ( cache_missed != null ) cache = cache_missed ; } specific_row_cache_values = cache ; specific_row_cache_row = r ; } }
This method allows the caller to hint that they are about to access many kernel values for a specific row . The row may be selected out from the cache into its own location to avoid excess LRU overhead . Giving a negative index indicates that we are done with the row and removes it . This method may be called multiple times with different row values . But when done accessing a specific row a negative value should be passed in .
167
84
30,045
protected double k ( int a , int b ) { evalCount ++ ; return kernel . eval ( a , b , vecs , accelCache ) ; }
Internal kernel eval source . Only call directly if you KNOW you will not be re - using the resulting value and intentionally wish to skip the caching system
33
29
30,046
protected void sparsify ( ) { final int N = vecs . size ( ) ; int accSize = accelCache == null ? 0 : accelCache . size ( ) / N ; int svCount = 0 ; for ( int i = 0 ; i < N ; i ++ ) if ( alphas [ i ] != 0 ) //Its a support vector { ListUtils . swap ( vecs , svCount , i ) ; if ( accelCache != null ) for ( int j = i * accSize ; j < ( i + 1 ) * accSize ; j ++ ) ListUtils . swap ( accelCache , svCount * accSize + j - i * accSize , j ) ; alphas [ svCount ++ ] = alphas [ i ] ; } vecs = new ArrayList < Vec > ( vecs . subList ( 0 , svCount ) ) ; alphas = Arrays . copyOfRange ( alphas , 0 , svCount ) ; }
Sparsifies the SVM by removing the vectors with &alpha ; = 0 from the dataset .
208
20
30,047
@ Parameter . WarmParameter ( prefLowToHigh = false ) public void setLambda ( double lambda ) { if ( lambda <= 0 || Double . isInfinite ( lambda ) || Double . isNaN ( lambda ) ) throw new IllegalArgumentException ( "Regularization term lambda must be a positive value, not " + lambda ) ; this . lambda = lambda ; }
Sets the regularization term where larger values indicate a larger regularization penalty .
80
16
30,048
public void addDataPoint ( Vec numerical , int [ ] categories , double val ) { if ( numerical . length ( ) != numNumerVals ) throw new RuntimeException ( "Data point does not contain enough numerical data points" ) ; if ( categories . length != categories . length ) throw new RuntimeException ( "Data point does not contain enough categorical data points" ) ; for ( int i = 0 ; i < categories . length ; i ++ ) if ( ! this . categories [ i ] . isValidCategory ( categories [ i ] ) && categories [ i ] >= 0 ) // >= so that missing values (negative) are allowed throw new RuntimeException ( "Categoriy value given is invalid" ) ; DataPoint dp = new DataPoint ( numerical , categories , this . categories ) ; addDataPoint ( dp , val ) ; }
Creates a new data point to be added to the data set . The arguments will be used directly modifying them after will effect the data set .
178
29
30,049
public DataPointPair < Double > getDataPointPair ( int i ) { return new DataPointPair <> ( getDataPoint ( i ) , targets . get ( i ) ) ; }
Returns the i th data point in the data set paired with its target regressor value . Modifying the DataPointPair will effect the data set .
43
31
30,050
public Map < Integer , Integer > getReverseNumericMap ( ) { Map < Integer , Integer > map = new HashMap < Integer , Integer > ( ) ; for ( int newIndex = 0 ; newIndex < numIndexMap . length ; newIndex ++ ) map . put ( newIndex , numIndexMap [ newIndex ] ) ; return map ; }
Returns a mapping from the numeric indices in the transformed space back to their original indices
77
16
30,051
public Map < Integer , Integer > getReverseNominalMap ( ) { Map < Integer , Integer > map = new HashMap < Integer , Integer > ( ) ; for ( int newIndex = 0 ; newIndex < catIndexMap . length ; newIndex ++ ) map . put ( newIndex , catIndexMap [ newIndex ] ) ; return map ; }
Returns a mapping from the nominal indices in the transformed space back to their original indices
78
16
30,052
protected final void setUp ( DataSet dataSet , Set < Integer > categoricalToRemove , Set < Integer > numericalToRemove ) { for ( int i : categoricalToRemove ) if ( i >= dataSet . getNumCategoricalVars ( ) ) throw new RuntimeException ( "The data set does not have a categorical value " + i + " to remove" ) ; for ( int i : numericalToRemove ) if ( i >= dataSet . getNumNumericalVars ( ) ) throw new RuntimeException ( "The data set does not have a numercal value " + i + " to remove" ) ; catIndexMap = new int [ dataSet . getNumCategoricalVars ( ) - categoricalToRemove . size ( ) ] ; newCatHeader = new CategoricalData [ catIndexMap . length ] ; numIndexMap = new int [ dataSet . getNumNumericalVars ( ) - numericalToRemove . size ( ) ] ; int k = 0 ; for ( int i = 0 ; i < dataSet . getNumCategoricalVars ( ) ; i ++ ) { if ( categoricalToRemove . contains ( i ) ) continue ; newCatHeader [ k ] = dataSet . getCategories ( ) [ i ] . clone ( ) ; catIndexMap [ k ++ ] = i ; } k = 0 ; for ( int i = 0 ; i < dataSet . getNumNumericalVars ( ) ; i ++ ) { if ( numericalToRemove . contains ( i ) ) continue ; numIndexMap [ k ++ ] = i ; } }
Sets up the Remove Attribute Transform properly
345
9
30,053
public void setSigma ( double sigma ) { if ( sigma <= 0 ) throw new IllegalArgumentException ( "Sigma must be a positive constant, not " + sigma ) ; this . sigma = sigma ; this . sigmaSqrd2Inv = 0.5 / ( sigma * sigma ) ; }
Sets the sigma parameter which must be a positive value
72
12
30,054
public void setMean ( double mean ) { if ( Double . isInfinite ( mean ) || Double . isNaN ( mean ) ) throw new ArithmeticException ( "Mean must be a real number, not " + mean ) ; ( ( Normal ) getDistribution ( ) ) . setMean ( mean ) ; }
Sets the mean value used for the normal distribution
70
10
30,055
public void setStandardDeviations ( double devs ) { if ( devs <= 0 || Double . isInfinite ( devs ) || Double . isNaN ( devs ) ) throw new ArithmeticException ( "The stnd devs must be a positive value" ) ; ( ( Normal ) getDistribution ( ) ) . setStndDev ( devs ) ; }
Sets the standard deviations used for the normal distribution
74
10
30,056
@ Override protected void registerCurrencies ( ) throws Exception { parseCurrencies ( loadFromFile ( "/org/joda/money/CurrencyData.csv" ) ) ; parseCountries ( loadFromFile ( "/org/joda/money/CountryData.csv" ) ) ; parseCurrencies ( loadFromFiles ( "META-INF/org/joda/money/CurrencyDataExtension.csv" ) ) ; parseCountries ( loadFromFiles ( "META-INF/org/joda/money/CountryDataExtension.csv" ) ) ; }
Registers all the currencies known by this provider .
128
10
30,057
private void parseCurrencies ( List < String > content ) throws Exception { for ( String line : content ) { Matcher matcher = CURRENCY_REGEX_LINE . matcher ( line ) ; if ( matcher . matches ( ) ) { String currencyCode = matcher . group ( 1 ) ; int numericCode = Integer . parseInt ( matcher . group ( 2 ) ) ; int digits = Integer . parseInt ( matcher . group ( 3 ) ) ; registerCurrency ( currencyCode , numericCode , digits ) ; } } }
parse the currencies
116
3
30,058
private void parseCountries ( List < String > content ) throws Exception { for ( String line : content ) { Matcher matcher = COUNTRY_REGEX_LINE . matcher ( line ) ; if ( matcher . matches ( ) ) { String countryCode = matcher . group ( 1 ) ; String currencyCode = matcher . group ( 2 ) ; registerCountry ( countryCode , currencyCode ) ; } } }
parse the countries
89
3
30,059
public MoneyAmountStyle withGroupingSize ( Integer groupingSize ) { int sizeVal = ( groupingSize == null ? - 1 : groupingSize ) ; if ( groupingSize != null && sizeVal <= 0 ) { throw new IllegalArgumentException ( "Grouping size must be greater than zero" ) ; } if ( sizeVal == this . groupingSize ) { return this ; } return new MoneyAmountStyle ( zeroCharacter , positiveCharacter , negativeCharacter , decimalPointCharacter , groupingStyle , groupingCharacter , sizeVal , extendedGroupingSize , forceDecimalPoint , absValue ) ; }
Returns a copy of this style with the specified grouping size .
122
12
30,060
public MoneyAmountStyle withExtendedGroupingSize ( Integer extendedGroupingSize ) { int sizeVal = ( extendedGroupingSize == null ? - 1 : extendedGroupingSize ) ; if ( extendedGroupingSize != null && sizeVal < 0 ) { throw new IllegalArgumentException ( "Extended grouping size must not be negative" ) ; } if ( sizeVal == this . extendedGroupingSize ) { return this ; } return new MoneyAmountStyle ( zeroCharacter , positiveCharacter , negativeCharacter , decimalPointCharacter , groupingStyle , groupingCharacter , groupingSize , sizeVal , forceDecimalPoint , absValue ) ; }
Returns a copy of this style with the specified extended grouping size .
132
13
30,061
public MoneyAmountStyle withGroupingStyle ( GroupingStyle groupingStyle ) { MoneyFormatter . checkNotNull ( groupingStyle , "groupingStyle" ) ; if ( this . groupingStyle == groupingStyle ) { return this ; } return new MoneyAmountStyle ( zeroCharacter , positiveCharacter , negativeCharacter , decimalPointCharacter , groupingStyle , groupingCharacter , groupingSize , extendedGroupingSize , forceDecimalPoint , absValue ) ; }
Returns a copy of this style with the specified grouping setting .
92
12
30,062
public MoneyAmountStyle withForcedDecimalPoint ( boolean forceDecimalPoint ) { if ( this . forceDecimalPoint == forceDecimalPoint ) { return this ; } return new MoneyAmountStyle ( zeroCharacter , positiveCharacter , negativeCharacter , decimalPointCharacter , groupingStyle , groupingCharacter , groupingSize , extendedGroupingSize , forceDecimalPoint , absValue ) ; }
Returns a copy of this style with the specified decimal point setting .
80
13
30,063
private BigMoney checkCurrencyEqual ( BigMoneyProvider moneyProvider ) { BigMoney money = of ( moneyProvider ) ; if ( isSameCurrency ( money ) == false ) { throw new CurrencyMismatchException ( getCurrencyUnit ( ) , money . getCurrencyUnit ( ) ) ; } return money ; }
Validates that the currency of this money and the specified money match .
69
14
30,064
@ Override public int compareTo ( BigMoneyProvider other ) { BigMoney otherMoney = of ( other ) ; if ( currency . equals ( otherMoney . currency ) == false ) { throw new CurrencyMismatchException ( getCurrencyUnit ( ) , otherMoney . getCurrencyUnit ( ) ) ; } return amount . compareTo ( otherMoney . amount ) ; }
Compares this monetary value to another . The compared values must be in the same currency .
79
18
30,065
void mergeChild ( MoneyParseContext child ) { setLocale ( child . getLocale ( ) ) ; setText ( child . getText ( ) ) ; setIndex ( child . getIndex ( ) ) ; setErrorIndex ( child . getErrorIndex ( ) ) ; setCurrency ( child . getCurrency ( ) ) ; setAmount ( child . getAmount ( ) ) ; }
Merges the child context back into this instance .
84
10
30,066
public ParsePosition toParsePosition ( ) { ParsePosition pp = new ParsePosition ( textIndex ) ; pp . setErrorIndex ( textErrorIndex ) ; return pp ; }
Converts the indexes to a parse position .
40
9
30,067
@ Override public Iterator < Entry < K , V > > iterator ( ) { checkClosed ( ) ; final Iterator < K > _keyIterator = cache . keys ( ) . iterator ( ) ; return new Iterator < Entry < K , V > > ( ) { CacheEntry < K , V > entry ; @ Override public boolean hasNext ( ) { while ( _keyIterator . hasNext ( ) ) { entry = cache . getEntry ( _keyIterator . next ( ) ) ; if ( entry . getException ( ) == null ) { return true ; } } entry = null ; return false ; } @ Override public Entry < K , V > next ( ) { if ( entry == null && ! hasNext ( ) ) { throw new NoSuchElementException ( ) ; } return new Entry < K , V > ( ) { @ Override public K getKey ( ) { return entry . getKey ( ) ; } @ Override public V getValue ( ) { return entry . getValue ( ) ; } @ SuppressWarnings ( "unchecked" ) @ Override public < T > T unwrap ( Class < T > _class ) { if ( CacheEntry . class . equals ( _class ) ) { return ( T ) entry ; } return null ; } } ; } @ Override public void remove ( ) { if ( entry == null ) { throw new IllegalStateException ( "hasNext() / next() not called or end of iteration reached" ) ; } cache . remove ( entry . getKey ( ) ) ; } } ; }
Iterate with the help of cache2k key iterator .
332
12
30,068
public static CacheManager getInstance ( ) { ClassLoader _defaultClassLoader = PROVIDER . getDefaultClassLoader ( ) ; return PROVIDER . getManager ( _defaultClassLoader , PROVIDER . getDefaultManagerName ( _defaultClassLoader ) ) ; }
Get the default cache manager for the default class loader . The default class loader is the class loader used to load the cache2k implementation classes .
54
29
30,069
public static CacheManager getInstance ( ClassLoader cl ) { return PROVIDER . getManager ( cl , PROVIDER . getDefaultManagerName ( cl ) ) ; }
Get the default cache manager for the specified class loader .
34
11
30,070
public static CacheManager getInstance ( ClassLoader cl , String managerName ) { return PROVIDER . getManager ( cl , managerName ) ; }
Retrieve a cache manager with the specified name using the specified classloader . If not existing a manager with that name is created . Different cache managers are created for different class loaders . Manager names should be unique within one VM instance .
30
47
30,071
public static < K , T > Cache2kBuilder < K , T > of ( Class < K > _keyType , Class < T > _valueType ) { return new Cache2kBuilder < K , T > ( CacheTypeCapture . of ( _keyType ) , CacheTypeCapture . of ( _valueType ) ) ; }
Create a new cache builder for key and value types of classes with no generic parameters .
71
17
30,072
public static < K , T > Cache2kBuilder < K , T > of ( Cache2kConfiguration < K , T > c ) { Cache2kBuilder < K , T > cb = new Cache2kBuilder < K , T > ( c ) ; return cb ; }
Create a builder from the configuration .
61
7
30,073
public final Cache2kBuilder < K , V > manager ( CacheManager manager ) { if ( this . manager != null ) { throw new IllegalStateException ( "manager() must be first operation on builder." ) ; } this . manager = manager ; return this ; }
The manager the created cache will belong to . If this is set it must be the first method called .
56
21
30,074
@ SuppressWarnings ( "unchecked" ) public final < T2 > Cache2kBuilder < K , T2 > valueType ( CacheType < T2 > t ) { Cache2kBuilder < K , T2 > me = ( Cache2kBuilder < K , T2 > ) this ; me . config ( ) . setValueType ( t ) ; return me ; }
Sets the value type to use . Arrays are not supported .
83
14
30,075
public final Cache2kBuilder < K , V > name ( Class < ? > _class ) { config ( ) . setName ( _class . getName ( ) ) ; return this ; }
Sets a cache name from the fully qualified class name .
41
12
30,076
private static < T > CustomizationReferenceSupplier < T > wrapCustomizationInstance ( T obj ) { if ( obj == null ) { return null ; } return new CustomizationReferenceSupplier < T > ( obj ) ; }
Wraps to factory but passes on nulls .
48
10
30,077
@ SuppressWarnings ( "unchecked" ) public final Cache2kBuilder < K , V > wrappingLoader ( AdvancedCacheLoader < K , LoadDetail < V > > l ) { config ( ) . setAdvancedLoader ( ( CustomizationSupplier < AdvancedCacheLoader < K , V > > ) ( Object ) wrapCustomizationInstance ( l ) ) ; return this ; }
Enables read through operation and sets a cache loader
82
10
30,078
public final Cache2kBuilder < K , V > writer ( CacheWriter < K , V > w ) { config ( ) . setWriter ( wrapCustomizationInstance ( w ) ) ; return this ; }
Enables write through operation and sets a writer customization that gets called synchronously upon cache mutations . By default write through is not enabled .
43
27
30,079
public final Cache2kBuilder < K , V > addCacheClosedListener ( CacheClosedListener listener ) { config ( ) . getCacheClosedListeners ( ) . add ( wrapCustomizationInstance ( listener ) ) ; return this ; }
Listener that is called after a cache is closed . This is mainly used for the JCache integration .
52
20
30,080
public final Cache2kBuilder < K , V > addListener ( CacheEntryOperationListener < K , V > listener ) { config ( ) . getListeners ( ) . add ( wrapCustomizationInstance ( listener ) ) ; return this ; }
Add a listener . The listeners will be executed in a synchronous mode meaning further processing for an entry will stall until a registered listener is executed . The expiry will be always executed asynchronously .
51
40
30,081
public final Cache2kBuilder < K , V > addAsyncListener ( CacheEntryOperationListener < K , V > listener ) { config ( ) . getAsyncListeners ( ) . add ( wrapCustomizationInstance ( listener ) ) ; return this ; }
A set of listeners . Listeners added in this collection will be executed in a asynchronous mode .
53
19
30,082
public final Cache2kBuilder < K , V > expiryPolicy ( ExpiryPolicy < K , V > c ) { config ( ) . setExpiryPolicy ( wrapCustomizationInstance ( c ) ) ; return this ; }
Set expiry policy to use .
50
7
30,083
public final Cache2kBuilder < K , V > maxRetryInterval ( long v , TimeUnit u ) { config ( ) . setMaxRetryInterval ( u . toMillis ( v ) ) ; return this ; }
If a loader exception happens this is the maximum time interval after a retry attempt is made . For retries an exponential backoff algorithm is used . It starts with the retry time and then increases the time to the maximum according to an exponential pattern .
50
51
30,084
public final Cache2kBuilder < K , V > with ( ConfigurationSectionBuilder < ? extends ConfigurationSection > ... sectionBuilders ) { for ( ConfigurationSectionBuilder < ? extends ConfigurationSection > b : sectionBuilders ) { config ( ) . getSections ( ) . add ( b . buildConfigurationSection ( ) ) ; } return this ; }
Add a new configuration sub section .
73
7
30,085
public final Cache2kBuilder < K , V > asyncListenerExecutor ( Executor v ) { config ( ) . setAsyncListenerExecutor ( new CustomizationReferenceSupplier < Executor > ( v ) ) ; return this ; }
Executor for asynchronous listeners . If no executor is specified an internal executor is used that has unbounded thread capacity .
50
25
30,086
public final Cache2kBuilder < K , V > timeReference ( TimeReference v ) { config ( ) . setTimeReference ( new CustomizationReferenceSupplier < TimeReference > ( v ) ) ; return this ; }
Clock to be used by the cache as time reference .
46
11
30,087
static long limitExpiryToMaxLinger ( long now , long _maxLinger , long _requestedExpiryTime , boolean _sharpExpiryEnabled ) { if ( _sharpExpiryEnabled && _requestedExpiryTime > ExpiryPolicy . REFRESH && _requestedExpiryTime < ExpiryPolicy . ETERNAL ) { _requestedExpiryTime = - _requestedExpiryTime ; } return Expiry . mixTimeSpanAndPointInTime ( now , _maxLinger , _requestedExpiryTime ) ; }
Ignore the value of the expiry policy if later then the maximum expiry time . If max linger takes over we do not request sharp expiry .
131
31
30,088
@ Override public String getDefaultManagerName ( ClassLoader cl ) { ConfigurationContext ctx = classLoader2config . get ( cl ) ; if ( ctx == null ) { ctx = createContext ( cl , null , DEFAULT_CONFIGURATION_FILE ) ; Map < ClassLoader , ConfigurationContext > m2 = new HashMap < ClassLoader , ConfigurationContext > ( classLoader2config ) ; m2 . put ( cl , ctx ) ; classLoader2config = m2 ; } return ctx . getManagerConfiguration ( ) . getDefaultManagerName ( ) ; }
The name of the default manager may be changed in the configuration file . Load the default configuration file and save the loaded context for the respective classloader so we do not load the context twice when we create the first cache .
125
44
30,089
private ConfigurationContext getManagerContext ( final CacheManager mgr ) { ConfigurationContext ctx = manager2defaultConfig . get ( mgr ) ; if ( ctx != null ) { return ctx ; } synchronized ( this ) { ctx = manager2defaultConfig . get ( mgr ) ; if ( ctx != null ) { return ctx ; } if ( mgr . isDefaultManager ( ) ) { ctx = classLoader2config . get ( mgr . getClassLoader ( ) ) ; } if ( ctx == null ) { ctx = createContext ( mgr . getClassLoader ( ) , mgr . getName ( ) , getFileName ( mgr ) ) ; } Map < CacheManager , ConfigurationContext > m2 = new HashMap < CacheManager , ConfigurationContext > ( manager2defaultConfig ) ; m2 . put ( mgr , ctx ) ; manager2defaultConfig = m2 ; return ctx ; } }
Hold the cache default configuration of a manager in a hash table . This is reused for all caches of one manager .
203
23
30,090
void apply ( final ConfigurationContext ctx , final ParsedConfiguration _parsedCfg , final Object cfg ) { ParsedConfiguration _templates = ctx . getTemplates ( ) ; ConfigurationTokenizer . Property _include = _parsedCfg . getPropertyMap ( ) . get ( "include" ) ; if ( _include != null ) { for ( String _template : _include . getValue ( ) . split ( "," ) ) { ParsedConfiguration c2 = null ; if ( _templates != null ) { c2 = _templates . getSection ( _template ) ; } if ( c2 == null ) { throw new ConfigurationException ( "Template not found \'" + _template + "\'" , _include ) ; } apply ( ctx , c2 , cfg ) ; } } applyPropertyValues ( _parsedCfg , cfg ) ; if ( ! ( cfg instanceof ConfigurationWithSections ) ) { return ; } ConfigurationWithSections _configurationWithSections = ( ConfigurationWithSections ) cfg ; for ( ParsedConfiguration _parsedSection : _parsedCfg . getSections ( ) ) { String _sectionType = ctx . getPredefinedSectionTypes ( ) . get ( _parsedSection . getName ( ) ) ; if ( _sectionType == null ) { _sectionType = _parsedSection . getType ( ) ; } if ( _sectionType == null ) { throw new ConfigurationException ( "type missing or unknown" , _parsedSection ) ; } Class < ? > _type ; try { _type = Class . forName ( _sectionType ) ; } catch ( ClassNotFoundException ex ) { throw new ConfigurationException ( "class not found '" + _sectionType + "'" , _parsedSection ) ; } if ( ! handleSection ( ctx , _type , _configurationWithSections , _parsedSection ) && ! handleCollection ( ctx , _type , cfg , _parsedSection ) && ! handleBean ( ctx , _type , cfg , _parsedSection ) ) { throw new ConfigurationException ( "Unknown property '" + _parsedSection . getContainer ( ) + "'" , _parsedSection ) ; } } }
Set properties in configuration bean based on the parsed configuration . Called by unit test .
506
16
30,091
private boolean handleBean ( final ConfigurationContext ctx , final Class < ? > _type , final Object cfg , final ParsedConfiguration _parsedCfg ) { String _containerName = _parsedCfg . getContainer ( ) ; BeanPropertyMutator m = provideMutator ( cfg . getClass ( ) ) ; Class < ? > _targetType = m . getType ( _containerName ) ; if ( _targetType == null ) { return false ; } if ( ! _targetType . isAssignableFrom ( _type ) ) { throw new ConfigurationException ( "Type mismatch, expected: '" + _targetType . getName ( ) + "'" , _parsedCfg ) ; } Object _bean = createBeanAndApplyConfiguration ( ctx , _type , _parsedCfg ) ; mutateAndCatch ( cfg , m , _containerName , _bean , _parsedCfg , _bean ) ; return true ; }
Create the bean apply configuration to it and set it .
215
11
30,092
private boolean handleSection ( final ConfigurationContext ctx , final Class < ? > _type , final ConfigurationWithSections cfg , final ParsedConfiguration sc ) { String _containerName = sc . getContainer ( ) ; if ( ! "sections" . equals ( _containerName ) ) { return false ; } @ SuppressWarnings ( "unchecked" ) ConfigurationSection _sectionBean = cfg . getSections ( ) . getSection ( ( Class < ConfigurationSection > ) _type ) ; if ( ! ( _sectionBean instanceof SingletonConfigurationSection ) ) { try { _sectionBean = ( ConfigurationSection ) _type . newInstance ( ) ; } catch ( Exception ex ) { throw new ConfigurationException ( "Cannot instantiate section class: " + ex , sc ) ; } cfg . getSections ( ) . add ( _sectionBean ) ; } apply ( ctx , sc , _sectionBean ) ; return true ; }
Create a new configuration section or reuse an existing section if it is a singleton .
209
17
30,093
public void checkKeepOrRemove ( ) { boolean _hasKeepAfterExpired = heapCache . isKeepAfterExpired ( ) ; if ( expiry != 0 || remove || _hasKeepAfterExpired ) { mutationUpdateHeap ( ) ; return ; } if ( _hasKeepAfterExpired ) { expiredImmediatelyKeepData ( ) ; return ; } expiredImmediatelyAndRemove ( ) ; }
In case we have a expiry of 0 this means that the entry should not be cached . If there is a valid entry we remove it if we do not keep the data .
85
36
30,094
public void asyncOperationStarted ( ) { if ( syncThread == Thread . currentThread ( ) ) { synchronized ( entry ) { while ( entry . isProcessing ( ) ) { try { entry . wait ( ) ; } catch ( InterruptedException ex ) { Thread . currentThread ( ) . interrupt ( ) ; } } } } else { entryLocked = false ; } }
If thread is a synchronous call wait until operation is complete . There is a little chance that the call back completes before we get here as well as some other operation changing the entry again .
80
38
30,095
private static void initializeLogFactory ( ) { ServiceLoader < LogFactory > loader = ServiceLoader . load ( LogFactory . class ) ; for ( LogFactory lf : loader ) { logFactory = lf ; log ( "New instance, using: " + logFactory . getClass ( ) . getName ( ) ) ; return ; } try { final org . slf4j . ILoggerFactory lf = org . slf4j . LoggerFactory . getILoggerFactory ( ) ; logFactory = new LogFactory ( ) { @ Override public Log getLog ( String s ) { return new Slf4jLogger ( lf . getLogger ( s ) ) ; } } ; log ( "New instance, using SLF4J logging" ) ; return ; } catch ( NoClassDefFoundError ignore ) { } try { final org . apache . commons . logging . LogFactory cl = org . apache . commons . logging . LogFactory . getFactory ( ) ; logFactory = new LogFactory ( ) { @ Override public Log getLog ( String s ) { return new CommonsLogger ( cl . getInstance ( s ) ) ; } } ; log ( "New instance, using commons logging" ) ; return ; } catch ( NoClassDefFoundError ignore ) { } logFactory = new LogFactory ( ) { @ Override public Log getLog ( String s ) { return new JdkLogger ( Logger . getLogger ( s ) ) ; } } ; log ( "New instance, using JDK logging" ) ; }
Finds a logger we can use . First we start with looking for a registered service provider . Then apache commons logging . As a fallback we use JDK logging .
334
35
30,096
private static String readFile ( String _name ) throws IOException { InputStream in = SingleProviderResolver . class . getClassLoader ( ) . getResourceAsStream ( _name ) ; if ( in == null ) { return null ; } try { LineNumberReader r = new LineNumberReader ( new InputStreamReader ( in ) ) ; String l = r . readLine ( ) ; while ( l != null ) { if ( ! l . startsWith ( "#" ) ) { return l ; } l = r . readLine ( ) ; } } finally { in . close ( ) ; } return null ; }
Read the first line of a file in the classpath into a string .
130
15
30,097
@ SuppressWarnings ( "unchecked" ) private static < S > Iterable < S > constructAllServiceImplementations ( Class < S > _service ) { ClassLoader cl = CacheManagerImpl . class . getClassLoader ( ) ; ArrayList < S > li = new ArrayList < S > ( ) ; Iterator < S > it = ServiceLoader . load ( _service , cl ) . iterator ( ) ; while ( it . hasNext ( ) ) { try { li . add ( it . next ( ) ) ; } catch ( ServiceConfigurationError ex ) { Log . getLog ( CacheManager . class . getName ( ) ) . debug ( "Error loading service '" + _service + "'" , ex ) ; } } final S [ ] a = ( S [ ] ) Array . newInstance ( _service , li . size ( ) ) ; li . toArray ( a ) ; return new Iterable < S > ( ) { public Iterator < S > iterator ( ) { return new Iterator < S > ( ) { private int pos = 0 ; public boolean hasNext ( ) { return pos < a . length ; } public S next ( ) { return a [ pos ++ ] ; } public void remove ( ) { throw new UnsupportedOperationException ( ) ; } } ; } } ; }
The service loader works lazy however we want to have all implementations constructed . Retrieve all implementations from the service loader and return an read - only iterable backed by an array .
280
35
30,098
public static void checkName ( String s ) { for ( char c : s . toCharArray ( ) ) { if ( c == ' ' || c == ' ' || c == ' ' || c == ' ' || c == ' ' || c == ' ' || c == ' ' || c == ' ' || c == ' ' || c == ' ' || c == ' ' || c == ' ' || c == ' ' ) { continue ; } if ( c < 32 || c >= 127 || ! Character . isJavaIdentifierPart ( c ) ) { throw new IllegalArgumentException ( "Cache name contains illegal character: '" + c + "', name=\"" + s + "\"" ) ; } } }
Don t accept a cache or manager names with too weird characters .
153
13
30,099
@ Override public void close ( ) { if ( isDefaultManager ( ) && getClass ( ) . getClassLoader ( ) == classLoader ) { log . info ( "Closing default CacheManager" ) ; } Iterable < Cache > _caches ; synchronized ( lock ) { if ( closing ) { return ; } _caches = cachesCopy ( ) ; closing = true ; } logPhase ( "close" ) ; List < Throwable > _suppressedExceptions = new ArrayList < Throwable > ( ) ; for ( Cache c : _caches ) { ( ( InternalCache ) c ) . cancelTimerJobs ( ) ; } for ( Cache c : _caches ) { try { c . close ( ) ; } catch ( Throwable t ) { _suppressedExceptions . add ( t ) ; } } try { for ( CacheManagerLifeCycleListener lc : cacheManagerLifeCycleListeners ) { lc . managerDestroyed ( this ) ; } } catch ( Throwable t ) { _suppressedExceptions . add ( t ) ; } ( ( Cache2kCoreProviderImpl ) PROVIDER ) . removeManager ( this ) ; synchronized ( lock ) { for ( Cache c : cacheNames . values ( ) ) { log . warn ( "unable to close cache: " + c . getName ( ) ) ; } } eventuallyThrowException ( _suppressedExceptions ) ; cacheNames = null ; }
The shutdown takes place in two phases . First all caches are notified to cancel their scheduled timer jobs after that the shutdown is done . Cancelling the timer jobs first is needed because there may be cache stacking and a timer job of one cache may call an already closed cache .
306
54