idx int64 0 41.2k | question stringlengths 74 4.04k | target stringlengths 7 750 |
|---|---|---|
31,700 | public float [ ] t1 ( float [ ] z , int k ) { float [ ] result = new float [ z . length ] ; for ( int i = 0 ; i < z . length ; i ++ ) { result [ i ] = ( new Transformations ( ) ) . sDecept ( z [ i ] , ( float ) 0.35 , ( float ) 0.001 , ( float ) 0.05 ) ; } return result ; } | WFG5 t1 transformation |
31,701 | double calculateHypervolumeIndicator ( Solution < ? > solutionA , Solution < ? > solutionB , int d , double maximumValues [ ] , double minimumValues [ ] ) { double a , b , r , max ; double volume ; double rho = 2.0 ; r = rho * ( maximumValues [ d - 1 ] - minimumValues [ d - 1 ] ) ; max = minimumValues [ d - 1 ] + r ; a = solutionA . getObjective ( d - 1 ) ; if ( solutionB == null ) { b = max ; } else { b = solutionB . getObjective ( d - 1 ) ; } if ( d == 1 ) { if ( a < b ) { volume = ( b - a ) / r ; } else { volume = 0 ; } } else { if ( a < b ) { volume = calculateHypervolumeIndicator ( solutionA , null , d - 1 , maximumValues , minimumValues ) * ( b - a ) / r ; volume += calculateHypervolumeIndicator ( solutionA , solutionB , d - 1 , maximumValues , minimumValues ) * ( max - b ) / r ; } else { volume = calculateHypervolumeIndicator ( solutionA , solutionB , d - 1 , maximumValues , minimumValues ) * ( max - a ) / r ; } } return ( volume ) ; } | Calculates the hypervolume of that portion of the objective space that is dominated by individual a but not by individual b |
31,702 | public void computeIndicatorValuesHD ( List < S > solutionSet , double [ ] maximumValues , double [ ] minimumValues ) { List < S > A , B ; indicatorValues = new ArrayList < List < Double > > ( ) ; maxIndicatorValue = - Double . MAX_VALUE ; for ( int j = 0 ; j < solutionSet . size ( ) ; j ++ ) { A = new ArrayList < > ( 1 ) ; A . add ( solutionSet . get ( j ) ) ; List < Double > aux = new ArrayList < Double > ( ) ; for ( S solution : solutionSet ) { B = new ArrayList < > ( 1 ) ; B . add ( solution ) ; int flag = ( new DominanceComparator < S > ( ) ) . compare ( A . get ( 0 ) , B . get ( 0 ) ) ; double value ; if ( flag == - 1 ) { value = - calculateHypervolumeIndicator ( A . get ( 0 ) , B . get ( 0 ) , problem . getNumberOfObjectives ( ) , maximumValues , minimumValues ) ; } else { value = calculateHypervolumeIndicator ( B . get ( 0 ) , A . get ( 0 ) , problem . getNumberOfObjectives ( ) , maximumValues , minimumValues ) ; } if ( Math . abs ( value ) > maxIndicatorValue ) { maxIndicatorValue = Math . abs ( value ) ; } aux . add ( value ) ; } indicatorValues . add ( aux ) ; } } | This structure stores the indicator values of each pair of elements |
31,703 | public void fitness ( List < S > solutionSet , int pos ) { double fitness = 0.0 ; double kappa = 0.05 ; for ( int i = 0 ; i < solutionSet . size ( ) ; i ++ ) { if ( i != pos ) { fitness += Math . exp ( ( - 1 * indicatorValues . get ( i ) . get ( pos ) / maxIndicatorValue ) / kappa ) ; } } solutionFitness . setAttribute ( solutionSet . get ( pos ) , fitness ) ; } | Calculate the fitness for the individual at position pos |
31,704 | public void calculateFitness ( List < S > solutionSet ) { double [ ] maximumValues = new double [ problem . getNumberOfObjectives ( ) ] ; double [ ] minimumValues = new double [ problem . getNumberOfObjectives ( ) ] ; for ( int i = 0 ; i < problem . getNumberOfObjectives ( ) ; i ++ ) { maximumValues [ i ] = - Double . MAX_VALUE ; minimumValues [ i ] = Double . MAX_VALUE ; } for ( S solution : solutionSet ) { for ( int obj = 0 ; obj < problem . getNumberOfObjectives ( ) ; obj ++ ) { double value = solution . getObjective ( obj ) ; if ( value > maximumValues [ obj ] ) { maximumValues [ obj ] = value ; } if ( value < minimumValues [ obj ] ) { minimumValues [ obj ] = value ; } } } computeIndicatorValuesHD ( solutionSet , maximumValues , minimumValues ) ; for ( int pos = 0 ; pos < solutionSet . size ( ) ; pos ++ ) { fitness ( solutionSet , pos ) ; } } | Calculate the fitness for the entire population . |
31,705 | public void removeWorst ( List < S > solutionSet ) { double worst = ( double ) solutionFitness . getAttribute ( solutionSet . get ( 0 ) ) ; int worstIndex = 0 ; double kappa = 0.05 ; for ( int i = 1 ; i < solutionSet . size ( ) ; i ++ ) { if ( ( double ) solutionFitness . getAttribute ( solutionSet . get ( i ) ) > worst ) { worst = ( double ) solutionFitness . getAttribute ( solutionSet . get ( i ) ) ; worstIndex = i ; } } for ( int i = 0 ; i < solutionSet . size ( ) ; i ++ ) { if ( i != worstIndex ) { double fitness = ( double ) solutionFitness . getAttribute ( solutionSet . get ( i ) ) ; fitness -= Math . exp ( ( - indicatorValues . get ( worstIndex ) . get ( i ) / maxIndicatorValue ) / kappa ) ; solutionFitness . setAttribute ( solutionSet . get ( i ) , fitness ) ; } } indicatorValues . remove ( worstIndex ) ; for ( List < Double > anIndicatorValues_ : indicatorValues ) { anIndicatorValues_ . remove ( worstIndex ) ; } solutionSet . remove ( worstIndex ) ; } | Update the fitness before removing an individual |
31,706 | public double evaluate ( List < ? extends Solution < ? > > set1 , List < ? extends Solution < ? > > set2 ) { double result ; int sum = 0 ; if ( set2 . size ( ) == 0 ) { if ( set1 . size ( ) == 0 ) { result = 0.0 ; } else { result = 1.0 ; } } else { for ( Solution < ? > solution : set2 ) { if ( SolutionListUtils . isSolutionDominatedBySolutionList ( solution , set1 ) ) { sum ++ ; } } result = ( double ) sum / set2 . size ( ) ; } return result ; } | Calculates the set coverage of set1 over set2 |
31,707 | protected void checkNumberOfParents ( List < S > population , int numberOfParentsForCrossover ) { if ( ( population . size ( ) % numberOfParentsForCrossover ) != 0 ) { throw new JMetalException ( "Wrong number of parents: the remainder if the " + "population size (" + population . size ( ) + ") is not divisible by " + numberOfParentsForCrossover ) ; } } | A crossover operator is applied to a number of parents and it assumed that the population contains a valid number of solutions . This method checks that . |
31,708 | public int location ( S solution ) { int [ ] position = new int [ numberOfObjectives ] ; for ( int obj = 0 ; obj < numberOfObjectives ; obj ++ ) { if ( ( solution . getObjective ( obj ) > gridUpperLimits [ obj ] ) || ( solution . getObjective ( obj ) < gridLowerLimits [ obj ] ) ) { return - 1 ; } else if ( solution . getObjective ( obj ) == gridLowerLimits [ obj ] ) { position [ obj ] = 0 ; } else if ( solution . getObjective ( obj ) == gridUpperLimits [ obj ] ) { position [ obj ] = ( ( int ) Math . pow ( 2.0 , bisections ) ) - 1 ; } else { double tmpSize = divisionSize [ obj ] ; double value = solution . getObjective ( obj ) ; double account = gridLowerLimits [ obj ] ; int ranges = ( int ) Math . pow ( 2.0 , bisections ) ; for ( int b = 0 ; b < bisections ; b ++ ) { tmpSize /= 2.0 ; ranges /= 2 ; if ( value > ( account + tmpSize ) ) { position [ obj ] += ranges ; account += tmpSize ; } } } } int location = 0 ; for ( int obj = 0 ; obj < numberOfObjectives ; obj ++ ) { location += position [ obj ] * Math . pow ( 2.0 , obj * bisections ) ; } return location ; } | Calculates the hypercube of a solution |
31,709 | public void removeSolution ( int location ) { hypercubes [ location ] -- ; if ( location == mostPopulatedHypercube ) { for ( int i = 0 ; i < hypercubes . length ; i ++ ) { if ( hypercubes [ i ] > hypercubes [ mostPopulatedHypercube ] ) { mostPopulatedHypercube = i ; } } } if ( hypercubes [ location ] == 0 ) { this . calculateOccupied ( ) ; } } | Decreases the number of solutions into a specific hypercube . |
31,710 | public void addSolution ( int location ) { hypercubes [ location ] ++ ; if ( hypercubes [ location ] > hypercubes [ mostPopulatedHypercube ] ) { mostPopulatedHypercube = location ; } if ( hypercubes [ location ] == 1 ) { this . calculateOccupied ( ) ; } } | Increases the number of solutions into a specific hypercube . |
31,711 | public int rouletteWheel ( BoundedRandomGenerator < Double > randomGenerator ) { double inverseSum = 0.0 ; for ( int hypercube : hypercubes ) { if ( hypercube > 0 ) { inverseSum += 1.0 / ( double ) hypercube ; } } double random = randomGenerator . getRandomValue ( 0.0 , inverseSum ) ; int hypercube = 0 ; double accumulatedSum = 0.0 ; while ( hypercube < hypercubes . length ) { if ( hypercubes [ hypercube ] > 0 ) { accumulatedSum += 1.0 / ( double ) hypercubes [ hypercube ] ; } if ( accumulatedSum > random ) { return hypercube ; } hypercube ++ ; } return hypercube ; } | Returns a random hypercube using a rouleteWheel method . |
31,712 | public void calculateOccupied ( ) { int total = 0 ; for ( int hypercube : hypercubes ) { if ( hypercube > 0 ) { total ++ ; } } occupied = new int [ total ] ; int base = 0 ; for ( int i = 0 ; i < hypercubes . length ; i ++ ) { if ( hypercubes [ i ] > 0 ) { occupied [ base ] = i ; base ++ ; } } } | Calculates the number of hypercubes having one or more solutions . return the number of hypercubes with more than zero solutions . |
31,713 | public int randomOccupiedHypercube ( BoundedRandomGenerator < Integer > randomGenerator ) { int rand = randomGenerator . getRandomValue ( 0 , occupied . length - 1 ) ; return occupied [ rand ] ; } | Returns a random hypercube that has more than zero solutions . |
31,714 | public double getAverageOccupation ( ) { calculateOccupied ( ) ; double result ; if ( occupiedHypercubes ( ) == 0 ) { result = 0.0 ; } else { double sum = 0.0 ; for ( int value : occupied ) { sum += hypercubes [ value ] ; } result = sum / occupiedHypercubes ( ) ; } return result ; } | Return the average number of solutions in the occupied hypercubes |
31,715 | public static double [ ] getMaximumValues ( Front front ) { if ( front == null ) { throw new NullFrontException ( ) ; } else if ( front . getNumberOfPoints ( ) == 0 ) { throw new EmptyFrontException ( ) ; } int numberOfObjectives = front . getPoint ( 0 ) . getDimension ( ) ; double [ ] maximumValue = new double [ numberOfObjectives ] ; for ( int i = 0 ; i < numberOfObjectives ; i ++ ) { maximumValue [ i ] = Double . NEGATIVE_INFINITY ; } for ( int i = 0 ; i < front . getNumberOfPoints ( ) ; i ++ ) { for ( int j = 0 ; j < numberOfObjectives ; j ++ ) { if ( front . getPoint ( i ) . getValue ( j ) > maximumValue [ j ] ) { maximumValue [ j ] = front . getPoint ( i ) . getValue ( j ) ; } } } return maximumValue ; } | Gets the maximum values for each objectives in a front |
31,716 | public static double [ ] getMinimumValues ( Front front ) { if ( front == null ) { throw new NullFrontException ( ) ; } else if ( front . getNumberOfPoints ( ) == 0 ) { throw new EmptyFrontException ( ) ; } int numberOfObjectives = front . getPoint ( 0 ) . getDimension ( ) ; double [ ] minimumValue = new double [ numberOfObjectives ] ; for ( int i = 0 ; i < numberOfObjectives ; i ++ ) { minimumValue [ i ] = Double . MAX_VALUE ; } for ( int i = 0 ; i < front . getNumberOfPoints ( ) ; i ++ ) { for ( int j = 0 ; j < numberOfObjectives ; j ++ ) { if ( front . getPoint ( i ) . getValue ( j ) < minimumValue [ j ] ) { minimumValue [ j ] = front . getPoint ( i ) . getValue ( j ) ; } } } return minimumValue ; } | Gets the minimum values for each objectives in a given front |
31,717 | public static double distanceToNearestPoint ( Point point , Front front , PointDistance distance ) { if ( front == null ) { throw new NullFrontException ( ) ; } else if ( front . getNumberOfPoints ( ) == 0 ) { throw new EmptyFrontException ( ) ; } else if ( point == null ) { throw new JMetalException ( "The point is null" ) ; } double minDistance = Double . MAX_VALUE ; for ( int i = 0 ; i < front . getNumberOfPoints ( ) ; i ++ ) { double aux = distance . compute ( point , front . getPoint ( i ) ) ; if ( ( aux < minDistance ) && ( aux > 0.0 ) ) { minDistance = aux ; } } return minDistance ; } | Gets the distance between a point and the nearest one in a front . If a distance equals to 0 is found that means that the point is in the front so it is excluded |
31,718 | public static Front getInvertedFront ( Front front ) { if ( front == null ) { throw new NullFrontException ( ) ; } else if ( front . getNumberOfPoints ( ) == 0 ) { throw new EmptyFrontException ( ) ; } int numberOfDimensions = front . getPoint ( 0 ) . getDimension ( ) ; Front invertedFront = new ArrayFront ( front . getNumberOfPoints ( ) , numberOfDimensions ) ; for ( int i = 0 ; i < front . getNumberOfPoints ( ) ; i ++ ) { for ( int j = 0 ; j < numberOfDimensions ; j ++ ) { if ( front . getPoint ( i ) . getValue ( j ) <= 1.0 && front . getPoint ( i ) . getValue ( j ) >= 0.0 ) { invertedFront . getPoint ( i ) . setValue ( j , 1.0 - front . getPoint ( i ) . getValue ( j ) ) ; } else if ( front . getPoint ( i ) . getValue ( j ) > 1.0 ) { invertedFront . getPoint ( i ) . setValue ( j , 0.0 ) ; } else if ( front . getPoint ( i ) . getValue ( j ) < 0.0 ) { invertedFront . getPoint ( i ) . setValue ( j , 1.0 ) ; } } } return invertedFront ; } | This method receives a normalized pareto front and return the inverted one . This method is for minimization problems |
31,719 | public static double [ ] [ ] convertFrontToArray ( Front front ) { if ( front == null ) { throw new NullFrontException ( ) ; } double [ ] [ ] arrayFront = new double [ front . getNumberOfPoints ( ) ] [ ] ; for ( int i = 0 ; i < front . getNumberOfPoints ( ) ; i ++ ) { arrayFront [ i ] = new double [ front . getPoint ( i ) . getDimension ( ) ] ; for ( int j = 0 ; j < front . getPoint ( i ) . getDimension ( ) ; j ++ ) { arrayFront [ i ] [ j ] = front . getPoint ( i ) . getValue ( j ) ; } } return arrayFront ; } | Given a front converts it to an array of double values |
31,720 | public static List < PointSolution > convertFrontToSolutionList ( Front front ) { if ( front == null ) { throw new NullFrontException ( ) ; } int numberOfObjectives ; int solutionSetSize = front . getNumberOfPoints ( ) ; if ( front . getNumberOfPoints ( ) == 0 ) { numberOfObjectives = 0 ; } else { numberOfObjectives = front . getPoint ( 0 ) . getDimension ( ) ; } List < PointSolution > solutionSet = new ArrayList < > ( solutionSetSize ) ; for ( int i = 0 ; i < front . getNumberOfPoints ( ) ; i ++ ) { PointSolution solution = new PointSolution ( numberOfObjectives ) ; for ( int j = 0 ; j < numberOfObjectives ; j ++ ) { solution . setObjective ( j , front . getPoint ( i ) . getValue ( j ) ) ; } solutionSet . add ( solution ) ; } return solutionSet ; } | Given a front converts it to a Solution set of PointSolutions |
31,721 | public List < PermutationSolution < Integer > > execute ( List < PermutationSolution < Integer > > parents ) { if ( null == parents ) { throw new JMetalException ( "Null parameter" ) ; } else if ( parents . size ( ) != 2 ) { throw new JMetalException ( "There must be two parents instead of " + parents . size ( ) ) ; } return doCrossover ( crossoverProbability , parents ) ; } | Executes the operation |
31,722 | public int compare ( Point pointOne , Point pointTwo ) { if ( pointOne == null ) { throw new JMetalException ( "PointOne is null" ) ; } else if ( pointTwo == null ) { throw new JMetalException ( "PointTwo is null" ) ; } else if ( pointOne . getDimension ( ) != pointTwo . getDimension ( ) ) { throw new JMetalException ( "Points have different size: " + pointOne . getDimension ( ) + " and " + pointTwo . getDimension ( ) ) ; } for ( int i = pointOne . getDimension ( ) - 1 ; i >= 0 ; i -- ) { if ( isBetter ( pointOne . getValue ( i ) , pointTwo . getValue ( i ) ) ) { return - 1 ; } else if ( isBetter ( pointTwo . getValue ( i ) , pointOne . getValue ( i ) ) ) { return 1 ; } } return 0 ; } | Compares two Point objects |
31,723 | public double spread ( Front front , Front referenceFront ) { PointDistance distance = new EuclideanDistance ( ) ; front . sort ( new LexicographicalPointComparator ( ) ) ; referenceFront . sort ( new LexicographicalPointComparator ( ) ) ; double df = distance . compute ( front . getPoint ( 0 ) , referenceFront . getPoint ( 0 ) ) ; double dl = distance . compute ( front . getPoint ( front . getNumberOfPoints ( ) - 1 ) , referenceFront . getPoint ( referenceFront . getNumberOfPoints ( ) - 1 ) ) ; double mean = 0.0 ; double diversitySum = df + dl ; int numberOfPoints = front . getNumberOfPoints ( ) ; for ( int i = 0 ; i < ( numberOfPoints - 1 ) ; i ++ ) { mean += distance . compute ( front . getPoint ( i ) , front . getPoint ( i + 1 ) ) ; } mean = mean / ( double ) ( numberOfPoints - 1 ) ; if ( numberOfPoints > 1 ) { for ( int i = 0 ; i < ( numberOfPoints - 1 ) ; i ++ ) { diversitySum += Math . abs ( distance . compute ( front . getPoint ( i ) , front . getPoint ( i + 1 ) ) - mean ) ; } return diversitySum / ( df + dl + ( numberOfPoints - 1 ) * mean ) ; } else { return 1.0 ; } } | Calculates the Spread metric . |
31,724 | private double hypervolume ( Front front , Front referenceFront ) { Front invertedFront ; invertedFront = FrontUtils . getInvertedFront ( front ) ; int numberOfObjectives = referenceFront . getPoint ( 0 ) . getDimension ( ) ; return this . calculateHypervolume ( FrontUtils . convertFrontToArray ( invertedFront ) , invertedFront . getNumberOfPoints ( ) , numberOfObjectives ) ; } | Returns the hypervolume value of a front of points |
31,725 | private double [ ] hvContributions ( double [ ] [ ] front ) { int numberOfObjectives = front [ 0 ] . length ; double [ ] contributions = new double [ front . length ] ; double [ ] [ ] frontSubset = new double [ front . length - 1 ] [ front [ 0 ] . length ] ; LinkedList < double [ ] > frontCopy = new LinkedList < double [ ] > ( ) ; Collections . addAll ( frontCopy , front ) ; double [ ] [ ] totalFront = frontCopy . toArray ( frontSubset ) ; double totalVolume = this . calculateHypervolume ( totalFront , totalFront . length , numberOfObjectives ) ; for ( int i = 0 ; i < front . length ; i ++ ) { double [ ] evaluatedPoint = frontCopy . remove ( i ) ; frontSubset = frontCopy . toArray ( frontSubset ) ; double hv = this . calculateHypervolume ( frontSubset , frontSubset . length , numberOfObjectives ) ; double contribution = totalVolume - hv ; contributions [ i ] = contribution ; frontCopy . add ( i , evaluatedPoint ) ; } return contributions ; } | Calculates how much hypervolume each point dominates exclusively . The points have to be transformed beforehand to accommodate the assumptions of Zitzler s hypervolume code . |
31,726 | public double invertedGenerationalDistancePlus ( Front front , Front referenceFront ) { double sum = 0.0 ; for ( int i = 0 ; i < referenceFront . getNumberOfPoints ( ) ; i ++ ) { sum += FrontUtils . distanceToClosestPoint ( referenceFront . getPoint ( i ) , front , new DominanceDistance ( ) ) ; } return sum / referenceFront . getNumberOfPoints ( ) ; } | Returns the inverted generational distance plus value for a given front |
31,727 | private Map < String , Double > computeStatistics ( List < Double > values ) { Map < String , Double > results = new HashMap < > ( ) ; DescriptiveStatistics stats = new DescriptiveStatistics ( ) ; for ( Double value : values ) { stats . addValue ( value ) ; } results . put ( "mean" , stats . getMean ( ) ) ; results . put ( "median" , stats . getPercentile ( 50.0 ) ) ; results . put ( "stdDeviation" , stats . getStandardDeviation ( ) ) ; results . put ( "iqr" , stats . getPercentile ( 75 ) - stats . getPercentile ( 25 ) ) ; results . put ( "max" , stats . getMax ( ) ) ; results . put ( "min" , stats . getMean ( ) ) ; results . put ( "numberOfElements" , ( double ) values . size ( ) ) ; return results ; } | Computes the statistical values |
31,728 | protected int [ ] rankUnfeasibleSolutions ( List < S > population ) { int numberOfViolatedConstraintsBySolution1 , numberOfViolatedConstraintsBySolution2 ; int indexOfFirstSolution , indexOfSecondSolution , indexOfWeight ; double overallConstraintViolationSolution1 , overallConstraintViolationSolution2 ; double minimumValueFirstSolution , minimumValueSecondSolution , value ; int [ ] rank = new int [ population . size ( ) ] ; Arrays . fill ( rank , 0 ) ; for ( indexOfFirstSolution = 0 ; indexOfFirstSolution < population . size ( ) - 1 ; indexOfFirstSolution ++ ) { for ( indexOfSecondSolution = indexOfFirstSolution + 1 ; indexOfSecondSolution < population . size ( ) ; indexOfSecondSolution ++ ) { numberOfViolatedConstraintsBySolution1 = numberOfViolatedConstraints . getAttribute ( population . get ( indexOfFirstSolution ) ) ; numberOfViolatedConstraintsBySolution2 = numberOfViolatedConstraints . getAttribute ( population . get ( indexOfSecondSolution ) ) ; if ( numberOfViolatedConstraintsBySolution1 > numberOfViolatedConstraintsBySolution2 ) { rank [ indexOfFirstSolution ] ++ ; } else if ( numberOfViolatedConstraintsBySolution1 < numberOfViolatedConstraintsBySolution2 ) { rank [ indexOfSecondSolution ] ++ ; } else { overallConstraintViolationSolution1 = overallConstraintViolation . getAttribute ( population . get ( indexOfFirstSolution ) ) ; overallConstraintViolationSolution2 = overallConstraintViolation . getAttribute ( population . get ( indexOfSecondSolution ) ) ; if ( overallConstraintViolationSolution1 > overallConstraintViolationSolution2 ) { rank [ indexOfSecondSolution ] ++ ; } else if ( overallConstraintViolationSolution1 < overallConstraintViolationSolution2 ) { rank [ indexOfFirstSolution ] ++ ; } else { minimumValueFirstSolution = minimumValueSecondSolution = Double . POSITIVE_INFINITY ; for ( indexOfWeight = 0 ; indexOfWeight < this . utilityFunctionsNadir . getSize ( ) ; indexOfWeight ++ ) { value = this . utilityFunctionsNadir . evaluate ( population . get ( indexOfFirstSolution ) , indexOfWeight ) ; if ( value < minimumValueFirstSolution ) { minimumValueFirstSolution = value ; } value = this . utilityFunctionsNadir . evaluate ( population . get ( indexOfSecondSolution ) , indexOfWeight ) ; if ( value < minimumValueSecondSolution ) { minimumValueSecondSolution = value ; } } for ( indexOfWeight = 0 ; indexOfWeight < this . utilityFunctionsUtopia . getSize ( ) ; indexOfWeight ++ ) { value = this . utilityFunctionsUtopia . evaluate ( population . get ( indexOfFirstSolution ) , indexOfWeight ) ; if ( value < minimumValueFirstSolution ) { minimumValueFirstSolution = value ; } value = this . utilityFunctionsUtopia . evaluate ( population . get ( indexOfSecondSolution ) , indexOfWeight ) ; if ( value < minimumValueSecondSolution ) { minimumValueSecondSolution = value ; } } if ( minimumValueFirstSolution < minimumValueSecondSolution ) { rank [ indexOfSecondSolution ] ++ ; } else { rank [ indexOfFirstSolution ] ++ ; } } } } } return rank ; } | Obtain the rank of each solution in a list of unfeasible solutions |
31,729 | private double delta ( double y , double bMutationParameter ) { double rand = randomGenenerator . getRandomValue ( ) ; int it , maxIt ; it = currentIteration ; maxIt = maxIterations ; return ( y * ( 1.0 - Math . pow ( rand , Math . pow ( ( 1.0 - it / ( double ) maxIt ) , bMutationParameter ) ) ) ) ; } | Calculates the delta value used in NonUniform mutation operator |
31,730 | private void scaleToPositive ( ) { double minScalarization = Double . MAX_VALUE ; for ( S solution : getSolutionList ( ) ) { if ( scalarization . getAttribute ( solution ) < minScalarization ) { minScalarization = scalarization . getAttribute ( solution ) ; } } if ( minScalarization < 0 ) { double eps = 10e-6 ; for ( S solution : getSolutionList ( ) ) { scalarization . setAttribute ( solution , eps + scalarization . getAttribute ( solution ) + minScalarization ) ; } } } | The niching mechanism of ESPEA only works if the scalarization values are positive . Otherwise scalarization values cannot be interpreted as charges of a physical system that signal desirability . |
31,731 | private double [ ] energyVector ( double [ ] [ ] distanceMatrix ) { double [ ] energyVector = new double [ distanceMatrix . length - 1 ] ; for ( int i = 0 ; i < energyVector . length - 1 ; i ++ ) { for ( int j = i + 1 ; j < energyVector . length ; j ++ ) { energyVector [ i ] += scalarization . getAttribute ( archive . get ( j ) ) / distanceMatrix [ i ] [ j ] ; energyVector [ j ] += scalarization . getAttribute ( archive . get ( i ) ) / distanceMatrix [ i ] [ j ] ; } energyVector [ i ] *= scalarization . getAttribute ( archive . get ( i ) ) ; } return energyVector ; } | Computes the energy contribution of each archive member . Note that the archive member at position maxSize + 1 is the new solution that is tested for eligibility of replacement . |
31,732 | private double [ ] replacementVector ( double [ ] [ ] distanceMatrix ) { double [ ] replacementVector = new double [ distanceMatrix . length - 1 ] ; double [ ] individualEnergy = new double [ distanceMatrix . length - 1 ] ; double totalEnergy = 0.0 ; for ( int i = 0 ; i < replacementVector . length ; i ++ ) { individualEnergy [ i ] = scalarization . getAttribute ( archive . get ( i ) ) / distanceMatrix [ i ] [ maxSize ] ; totalEnergy += individualEnergy [ i ] ; } for ( int i = 0 ; i < individualEnergy . length ; i ++ ) { replacementVector [ i ] = totalEnergy - individualEnergy [ i ] ; replacementVector [ i ] *= scalarization . getAttribute ( archive . get ( maxSize ) ) ; } return replacementVector ; } | Computes the replacement energy vector . Each component k of the replacement vector states how much energy the new solution would introduce into the archive instead of the archive member at position k . |
31,733 | public static void printFinalSolutionSet ( List < ? extends Solution < ? > > population ) { new SolutionListOutput ( population ) . setSeparator ( "\t" ) . setVarFileOutputContext ( new DefaultFileOutputContext ( "VAR.tsv" ) ) . setFunFileOutputContext ( new DefaultFileOutputContext ( "FUN.tsv" ) ) . print ( ) ; JMetalLogger . logger . info ( "Random seed: " + JMetalRandom . getInstance ( ) . getSeed ( ) ) ; JMetalLogger . logger . info ( "Objectives values have been written to file FUN.tsv" ) ; JMetalLogger . logger . info ( "Variables values have been written to file VAR.tsv" ) ; } | Write the population into two files and prints some data on screen |
31,734 | public static < S extends Solution < ? > > void printQualityIndicators ( List < S > population , String paretoFrontFile ) throws FileNotFoundException { Front referenceFront = new ArrayFront ( paretoFrontFile ) ; FrontNormalizer frontNormalizer = new FrontNormalizer ( referenceFront ) ; Front normalizedReferenceFront = frontNormalizer . normalize ( referenceFront ) ; Front normalizedFront = frontNormalizer . normalize ( new ArrayFront ( population ) ) ; List < PointSolution > normalizedPopulation = FrontUtils . convertFrontToSolutionList ( normalizedFront ) ; String outputString = "\n" ; outputString += "Hypervolume (N) : " + new PISAHypervolume < PointSolution > ( normalizedReferenceFront ) . evaluate ( normalizedPopulation ) + "\n" ; outputString += "Hypervolume : " + new PISAHypervolume < S > ( referenceFront ) . evaluate ( population ) + "\n" ; outputString += "Epsilon (N) : " + new Epsilon < PointSolution > ( normalizedReferenceFront ) . evaluate ( normalizedPopulation ) + "\n" ; outputString += "Epsilon : " + new Epsilon < S > ( referenceFront ) . evaluate ( population ) + "\n" ; outputString += "GD (N) : " + new GenerationalDistance < PointSolution > ( normalizedReferenceFront ) . evaluate ( normalizedPopulation ) + "\n" ; outputString += "GD : " + new GenerationalDistance < S > ( referenceFront ) . evaluate ( population ) + "\n" ; outputString += "IGD (N) : " + new InvertedGenerationalDistance < PointSolution > ( normalizedReferenceFront ) . evaluate ( normalizedPopulation ) + "\n" ; outputString += "IGD : " + new InvertedGenerationalDistance < S > ( referenceFront ) . evaluate ( population ) + "\n" ; outputString += "IGD+ (N) : " + new InvertedGenerationalDistancePlus < PointSolution > ( normalizedReferenceFront ) . evaluate ( normalizedPopulation ) + "\n" ; outputString += "IGD+ : " + new InvertedGenerationalDistancePlus < S > ( referenceFront ) . evaluate ( population ) + "\n" ; outputString += "Spread (N) : " + new Spread < PointSolution > ( normalizedReferenceFront ) . evaluate ( normalizedPopulation ) + "\n" ; outputString += "Spread : " + new Spread < S > ( referenceFront ) . evaluate ( population ) + "\n" ; outputString += "Error ratio : " + new ErrorRatio < List < ? extends Solution < ? > > > ( referenceFront ) . evaluate ( population ) + "\n" ; JMetalLogger . logger . info ( outputString ) ; } | Print all the available quality indicators |
31,735 | public void doMutation ( PermutationSolution < T > solution ) { int permutationLength ; permutationLength = solution . getNumberOfVariables ( ) ; if ( ( permutationLength != 0 ) && ( permutationLength != 1 ) ) { if ( mutationRandomGenerator . getRandomValue ( ) < mutationProbability ) { int pos1 = positionRandomGenerator . getRandomValue ( 0 , permutationLength - 1 ) ; int pos2 = positionRandomGenerator . getRandomValue ( 0 , permutationLength - 1 ) ; while ( pos1 == pos2 ) { if ( pos1 == ( permutationLength - 1 ) ) pos2 = positionRandomGenerator . getRandomValue ( 0 , permutationLength - 2 ) ; else pos2 = positionRandomGenerator . getRandomValue ( pos1 , permutationLength - 1 ) ; } T temp = solution . getVariableValue ( pos1 ) ; solution . setVariableValue ( pos1 , solution . getVariableValue ( pos2 ) ) ; solution . setVariableValue ( pos2 , temp ) ; } } } | Performs the operation |
31,736 | private void doMutation ( double probability , DoubleSolution solution ) { for ( int i = 0 ; i < solution . getNumberOfVariables ( ) ; i ++ ) { if ( randomGenerator . getRandomValue ( ) <= probability ) { Double value = solution . getLowerBound ( i ) + ( ( solution . getUpperBound ( i ) - solution . getLowerBound ( i ) ) * randomGenerator . getRandomValue ( ) ) ; solution . setVariableValue ( i , value ) ; } } } | Implements the mutation operation |
31,737 | @ SuppressWarnings ( "unchecked" ) public static < S > Problem < S > loadProblem ( String problemName ) { Problem < S > problem ; try { problem = ( Problem < S > ) Class . forName ( problemName ) . getConstructor ( ) . newInstance ( ) ; } catch ( InstantiationException e ) { throw new JMetalException ( "newInstance() cannot instantiate (abstract class)" , e ) ; } catch ( IllegalAccessException e ) { throw new JMetalException ( "newInstance() is not usable (uses restriction)" , e ) ; } catch ( InvocationTargetException e ) { throw new JMetalException ( "an exception was thrown during the call of newInstance()" , e ) ; } catch ( NoSuchMethodException e ) { throw new JMetalException ( "getConstructor() was not able to find the constructor without arguments" , e ) ; } catch ( ClassNotFoundException e ) { throw new JMetalException ( "Class.forName() did not recognized the name of the class" , e ) ; } return problem ; } | Create an instance of problem passed as argument |
31,738 | public float [ ] t1 ( float [ ] z , int k ) { float [ ] result = new float [ z . length ] ; for ( int i = 0 ; i < z . length ; i ++ ) { result [ i ] = ( new Transformations ( ) ) . sMulti ( z [ i ] , 30 , 10 , ( float ) 0.35 ) ; } return result ; } | WFG4 t1 transformation |
31,739 | public double evalG ( BinarySolution solution ) { double res = 0.0 ; for ( int i = 1 ; i < solution . getNumberOfVariables ( ) ; i ++ ) { res += evalV ( u ( solution . getVariableValue ( i ) ) ) ; } return res ; } | Returns the value of the ZDT5 function G . |
31,740 | public FieldsFilter withFields ( String fields , boolean includeFields ) { parameters . put ( "fields" , fields ) ; parameters . put ( "include_fields" , includeFields ) ; return this ; } | Only retrieve certain fields from the item . |
31,741 | @ JsonFormat ( shape = JsonFormat . Shape . STRING , pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" ) @ JsonProperty ( "created_at" ) public Date getCreatedAt ( ) { return createdAt ; } | Getter for the date this user was created on . |
31,742 | @ JsonFormat ( shape = JsonFormat . Shape . STRING , pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" ) @ JsonProperty ( "updated_at" ) public Date getUpdatedAt ( ) { return updatedAt ; } | Getter for the date this user was last updated on . |
31,743 | @ JsonFormat ( shape = JsonFormat . Shape . STRING , pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" ) @ JsonProperty ( "last_login" ) public Date getLastLogin ( ) { return lastLogin ; } | Getter for the last login date . |
31,744 | @ JsonProperty ( "from" ) public void setFrom ( String from ) throws IllegalArgumentException { if ( messagingServiceSID != null ) { throw new IllegalArgumentException ( "You must specify either `from` or `messagingServiceSID`, but not both" ) ; } this . from = from ; } | Setter for the Twilio From number . |
31,745 | @ JsonProperty ( "messaging_service_sid" ) public void setMessagingServiceSID ( String messagingServiceSID ) throws IllegalArgumentException { if ( from != null ) { throw new IllegalArgumentException ( "You must specify either `from` or `messagingServiceSID`, but not both" ) ; } this . messagingServiceSID = messagingServiceSID ; } | Setter for the Twilio Messaging Service SID . |
31,746 | public QueryFilter withQuery ( String query ) { try { String encodedQuery = urlEncode ( query ) ; parameters . put ( KEY_QUERY , encodedQuery ) ; return this ; } catch ( UnsupportedEncodingException ex ) { throw new IllegalStateException ( "UTF-8 encoding not supported by current Java platform implementation." , ex ) ; } } | Filter by a query |
31,747 | public String build ( ) { for ( Map . Entry < String , String > p : parameters . entrySet ( ) ) { builder . addQueryParameter ( p . getKey ( ) , p . getValue ( ) ) ; } return builder . build ( ) . toString ( ) ; } | Creates a string representation of the URL with the configured parameters . |
31,748 | @ JsonFormat ( shape = JsonFormat . Shape . STRING , pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" ) @ JsonProperty ( "enrolled_at" ) public Date getEnrolledAt ( ) { return enrolledAt ; } | Getter for the enrolled at . |
31,749 | @ JsonFormat ( shape = JsonFormat . Shape . STRING , pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" ) @ JsonProperty ( "last_auth" ) public Date getLastAuth ( ) { return lastAuth ; } | Getter for the last authentication . |
31,750 | public AuthorizeUrlBuilder withParameter ( String name , String value ) { assertNotNull ( name , "name" ) ; assertNotNull ( value , "value" ) ; parameters . put ( name , value ) ; return this ; } | Sets an additional parameter . |
31,751 | public Object getValue ( String key ) { if ( values == null ) { return null ; } return values . get ( key ) ; } | Returns a value from the error map if any . |
31,752 | @ JsonFormat ( shape = JsonFormat . Shape . STRING , pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" ) @ JsonProperty ( "date" ) public Date getDate ( ) { return date ; } | Getter for the date to which the stats belong |
31,753 | public LogEventFilter withCheckpoint ( String from , int take ) { parameters . put ( "from" , from ) ; parameters . put ( "take" , take ) ; return this ; } | Filter by checkpoint |
31,754 | public void setup ( JavaStreamingContext jssc , CommandLine cli ) throws Exception { String filtersArg = cli . getOptionValue ( "tweetFilters" ) ; String [ ] filters = ( filtersArg != null ) ? filtersArg . split ( "," ) : new String [ 0 ] ; JavaReceiverInputDStream < Status > tweets = TwitterUtils . createStream ( jssc , null , filters ) ; String fusionUrl = cli . getOptionValue ( "fusion" ) ; if ( fusionUrl != null ) { SolrSupport . sendDStreamOfDocsToFusion ( fusionUrl , cli . getOptionValue ( "fusionCredentials" ) , tweets . dstream ( ) , batchSize ) ; } else { JavaDStream < SolrInputDocument > docs = tweets . map ( new Function < Status , SolrInputDocument > ( ) { public SolrInputDocument call ( Status status ) { if ( log . isDebugEnabled ( ) ) { log . debug ( "Received tweet: " + status . getId ( ) + ": " + status . getText ( ) . replaceAll ( "\\s+" , " " ) ) ; } SolrInputDocument doc = SolrSupport . autoMapToSolrInputDoc ( "tweet-" + status . getId ( ) , status , null ) ; doc . setField ( "provider_s" , "twitter" ) ; doc . setField ( "author_s" , status . getUser ( ) . getScreenName ( ) ) ; doc . setField ( "type_s" , status . isRetweet ( ) ? "echo" : "post" ) ; if ( log . isDebugEnabled ( ) ) log . debug ( "Transformed document: " + doc . toString ( ) ) ; return doc ; } } ) ; SolrSupport . indexDStreamOfDocs ( zkHost , collection , batchSize , docs . dstream ( ) ) ; } } | Sends a stream of tweets to Solr . |
31,755 | public static void main ( String [ ] args ) throws Exception { if ( args == null || args . length == 0 || args [ 0 ] == null || args [ 0 ] . trim ( ) . length ( ) == 0 ) { System . err . println ( "Invalid command-line args! Must pass the name of a processor to run.\n" + "Supported processors:\n" ) ; displayProcessorOptions ( System . err ) ; System . exit ( 1 ) ; } RDDProcessor procImpl ; ClassLoader myCL = SparkApp . class . getClassLoader ( ) ; try { Class < ? extends RDDProcessor > clazz = ( Class < ? extends RDDProcessor > ) myCL . loadClass ( args [ 0 ] ) ; procImpl = clazz . newInstance ( ) ; } catch ( ClassNotFoundException cnfe ) { procImpl = newProcessor ( args [ 0 ] . trim ( ) . toLowerCase ( Locale . ROOT ) ) ; } assertSerializable ( procImpl ) ; String [ ] procImplArgs = new String [ args . length - 1 ] ; System . arraycopy ( args , 1 , procImplArgs , 0 , procImplArgs . length ) ; CommandLine cli = processCommandLineArgs ( joinCommonAndProcessorOptions ( procImpl . getOptions ( ) ) , procImplArgs ) ; SparkConf sparkConf = new SparkConf ( ) . setAppName ( procImpl . getName ( ) ) ; sparkConf . set ( "spark.task.maxFailures" , "10" ) ; setupSolrAuthenticationProps ( cli , sparkConf ) ; String masterUrl = cli . getOptionValue ( "master" ) ; if ( masterUrl != null ) sparkConf . setMaster ( masterUrl ) ; log . info ( "Running processor " + procImpl . getName ( ) ) ; int exitCode = procImpl . run ( sparkConf , cli ) ; System . exit ( exitCode ) ; } | Runs a stream processor implementation . |
31,756 | public static Option [ ] getCommonOptions ( ) { return new Option [ ] { Option . builder ( ) . hasArg ( ) . required ( false ) . desc ( "Batch interval (seconds) for streaming applications; default is 1 second" ) . longOpt ( "batchInterval" ) . build ( ) , Option . builder ( ) . hasArg ( ) . required ( false ) . desc ( "The master URL to connect to, such as \"local\" to run locally with one thread, \"local[4]\" to run locally with 4 cores, or \"spark://master:7077\" to run on a Spark standalone cluster." ) . longOpt ( "master" ) . build ( ) , Option . builder ( ) . hasArg ( ) . required ( false ) . desc ( "Address of the Zookeeper ensemble; defaults to: localhost:9983" ) . longOpt ( "zkHost" ) . build ( ) , Option . builder ( ) . hasArg ( ) . required ( false ) . desc ( "Name of collection; no default" ) . longOpt ( "collection" ) . build ( ) , Option . builder ( ) . hasArg ( ) . required ( false ) . desc ( "Number of docs to queue up on the client before sending to Solr; default is 10" ) . longOpt ( "batchSize" ) . build ( ) , Option . builder ( ) . hasArg ( ) . required ( false ) . desc ( "For authenticating to Solr using JAAS, sets the '" + LOGIN_CONFIG_PROP + "' system property." ) . longOpt ( "solrJaasAuthConfig" ) . build ( ) , Option . builder ( ) . hasArg ( ) . required ( false ) . desc ( "For authenticating to Solr using JAAS, sets the 'solr.kerberos.jaas.appname' system property; default is Client" ) . longOpt ( "solrJaasAppName" ) . build ( ) } ; } | Support options common to all tools . |
31,757 | private static RDDProcessor newProcessor ( String streamProcType ) throws Exception { streamProcType = streamProcType . trim ( ) ; if ( "twitter-to-solr" . equals ( streamProcType ) ) return new TwitterToSolrStreamProcessor ( ) ; else if ( "word-count" . equals ( streamProcType ) ) return new WordCount ( ) ; else if ( "term-vectors" . equals ( streamProcType ) ) return new ReadTermVectors ( ) ; else if ( "docfilter" . equals ( streamProcType ) ) return new DocumentFilteringStreamProcessor ( ) ; else if ( "hdfs-to-solr" . equals ( streamProcType ) ) return new HdfsToSolrRDDProcessor ( ) ; else if ( "logs2solr" . equals ( streamProcType ) ) return new Logs2SolrRDDProcessor ( ) ; else if ( "query-solr-benchmark" . equals ( streamProcType ) ) return new QueryBenchmark ( ) ; else if ( "kmeans-anomaly" . equals ( streamProcType ) ) return new KMeansAnomaly ( ) ; else if ( "eventsim" . equals ( streamProcType ) ) return new EventsimIndexer ( ) ; for ( Class < RDDProcessor > next : findProcessorClassesInPackage ( "com.lucidworks.spark" ) ) { RDDProcessor streamProc = next . newInstance ( ) ; if ( streamProcType . equals ( streamProc . getName ( ) ) ) return streamProc ; } System . err . println ( "\n\n " + streamProcType + " not supported! Please check your command-line arguments and re-try. \n\n" ) ; System . exit ( 1 ) ; return null ; } | Creates an instance of the requested tool using classpath scanning if necessary |
31,758 | public static CommandLine processCommandLineArgs ( Option [ ] customOptions , String [ ] args ) { Options options = new Options ( ) ; options . addOption ( "h" , "help" , false , "Print this message" ) ; options . addOption ( "v" , "verbose" , false , "Generate verbose log messages" ) ; if ( customOptions != null ) { for ( int i = 0 ; i < customOptions . length ; i ++ ) options . addOption ( customOptions [ i ] ) ; } CommandLine cli = null ; try { cli = ( new GnuParser ( ) ) . parse ( options , args ) ; } catch ( ParseException exp ) { boolean hasHelpArg = false ; if ( args != null && args . length > 0 ) { for ( int z = 0 ; z < args . length ; z ++ ) { if ( "-h" . equals ( args [ z ] ) || "-help" . equals ( args [ z ] ) ) { hasHelpArg = true ; break ; } } } if ( ! hasHelpArg ) { System . err . println ( "Failed to parse command-line arguments due to: " + exp . getMessage ( ) ) ; } HelpFormatter formatter = new HelpFormatter ( ) ; formatter . printHelp ( SparkApp . class . getName ( ) , options ) ; System . exit ( 1 ) ; } if ( cli . hasOption ( "help" ) ) { HelpFormatter formatter = new HelpFormatter ( ) ; formatter . printHelp ( SparkApp . class . getName ( ) , options ) ; System . exit ( 0 ) ; } return cli ; } | Parses the command - line arguments passed by the user . |
31,759 | @ SuppressWarnings ( "unchecked" ) private static List < Class < RDDProcessor > > findProcessorClassesInPackage ( String packageName ) { List < Class < RDDProcessor > > streamProcClasses = new ArrayList < Class < RDDProcessor > > ( ) ; try { ClassLoader classLoader = Thread . currentThread ( ) . getContextClassLoader ( ) ; String path = packageName . replace ( '.' , '/' ) ; Enumeration < URL > resources = classLoader . getResources ( path ) ; Set < String > classes = new TreeSet < String > ( ) ; while ( resources . hasMoreElements ( ) ) { URL resource = ( URL ) resources . nextElement ( ) ; classes . addAll ( findClasses ( resource . getFile ( ) , packageName ) ) ; } for ( String classInPackage : classes ) { Class < ? > theClass = classLoader . loadClass ( classInPackage ) ; if ( RDDProcessor . class . isAssignableFrom ( theClass ) ) streamProcClasses . add ( ( Class < RDDProcessor > ) theClass ) ; } } catch ( Exception e ) { e . printStackTrace ( ) ; } return streamProcClasses ; } | Scans Jar files on the classpath for RDDProcessor implementations to activate . |
31,760 | public static Map < String , String > parseColumns ( String sqlStmt ) throws Exception { String tmp = sqlStmt . replaceAll ( "\\s+" , " " ) . trim ( ) ; String lc = tmp . toLowerCase ( ) ; if ( ! lc . startsWith ( SELECT ) ) throw new IllegalArgumentException ( "Expected SQL to start with '" + SELECT + "' but found [" + sqlStmt + "] instead!" ) ; int fromAt = lc . indexOf ( FROM , SELECT . length ( ) ) ; if ( fromAt == - 1 ) throw new IllegalArgumentException ( "No FROM keyword found in SQL: " + sqlStmt ) ; String columnList = tmp . substring ( SELECT . length ( ) , fromAt ) . trim ( ) ; if ( "*" . equals ( columnList ) ) return Collections . emptyMap ( ) ; Map < String , String > columns = new HashMap < > ( ) ; for ( String pair : columnList . split ( "," ) ) { pair = pair . trim ( ) ; if ( pair . toLowerCase ( ) . startsWith ( DISTINCT ) ) { pair = pair . substring ( DISTINCT . length ( ) ) ; } String col ; String alias ; int spaceAt = pair . indexOf ( " " ) ; if ( spaceAt != - 1 ) { col = pair . substring ( 0 , spaceAt ) ; alias = pair . substring ( spaceAt + 1 ) ; if ( alias . toLowerCase ( ) . startsWith ( AS ) ) { alias = alias . substring ( AS . length ( ) ) ; } } else { col = pair ; alias = pair ; } columns . put ( col . replace ( "`" , "" ) . replace ( "'" , "" ) , alias . replace ( "`" , "" ) . replace ( "'" , "" ) ) ; } return columns ; } | Given a valid Solr SQL statement parse out the columns and aliases as a map . |
31,761 | public static SolrTermVector newInstance ( String docId , HashingTF hashingTF , NamedList < Object > termList ) { int termCount = termList . size ( ) ; int [ ] indices = new int [ termCount ] ; double [ ] weights = new double [ termCount ] ; String [ ] terms = new String [ termCount ] ; Iterator < Map . Entry < String , Object > > termsIter = termList . iterator ( ) ; int idx = - 1 ; while ( termsIter . hasNext ( ) ) { Map . Entry < String , Object > termEntry = termsIter . next ( ) ; double weight = 0d ; Object termVal = termEntry . getValue ( ) ; if ( termVal instanceof NamedList ) { Object w = ( ( NamedList ) termVal ) . get ( "tf-idf" ) ; if ( w != null ) { weight = ( w instanceof Number ) ? ( ( Number ) w ) . doubleValue ( ) : Double . parseDouble ( w . toString ( ) ) ; } } ++ idx ; String term = termEntry . getKey ( ) ; terms [ idx ] = term ; indices [ idx ] = hashingTF . indexOf ( term ) ; weights [ idx ] = weight ; } return new SolrTermVector ( docId , terms , hashingTF . numFeatures ( ) , indices , weights ) ; } | Converts doc - level term vector information the Solr QueryResponse into a SolrTermVector . |
31,762 | public InsertStrategy getOverridenStrategy ( AbstractEntityProperty < ? > property ) { final InsertStrategy insertStrategy = OverridingOptional . from ( this . insertStrategy ) . defaultValue ( property . insertStrategy ( ) ) . get ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( format ( "Get runtime insert strategy for entity %s : %s" , property . entityClass . getCanonicalName ( ) , insertStrategy . name ( ) ) ) ; } return insertStrategy ; } | Determine the insert strategy for the given entity using static configuration and runtime option |
31,763 | public TypeSpec buildDeleteWhereForClusteringColumn ( EntityMetaSignature signature , List < FieldSignatureInfo > clusteringCols , List < ClassSignatureInfo > classesSignature , ClassSignatureInfo lastSignature ) { final ClassSignatureInfo classSignature = classesSignature . get ( 0 ) ; final ClassSignatureInfo nextSignature = classesSignature . get ( 1 ) ; final FieldSignatureInfo clusteringColumnInfo = clusteringCols . get ( 0 ) ; final String rootClassName = signature . deleteClassName ( ) ; TypeName relationClassTypeName = ClassName . get ( DSL_PACKAGE , rootClassName + "." + classSignature . className + "." + DSL_RELATION ) ; final TypeSpec . Builder whereClassBuilder = TypeSpec . classBuilder ( classSignature . className ) . superclass ( classSignature . superType ) . addModifiers ( Modifier . PUBLIC , Modifier . FINAL ) . addMethod ( buildWhereConstructorWithOptions ( DELETE_DOT_WHERE ) ) . addMethod ( buildGetThis ( classSignature . returnClassType ) ) . addMethod ( buildGetMetaInternal ( signature . entityRawClass ) ) . addMethod ( buildGetEntityClass ( signature ) ) . addMethod ( buildGetRte ( ) ) . addMethod ( buildGetOptions ( ) ) . addMethod ( buildGetBoundValuesInternal ( ) ) . addMethod ( buildGetEncodedBoundValuesInternal ( ) ) ; final TypeSpec . Builder relationClassBuilder = TypeSpec . classBuilder ( DSL_RELATION ) . addModifiers ( Modifier . PUBLIC , Modifier . FINAL ) . addMethod ( buildColumnRelation ( EQ , nextSignature . returnClassType , clusteringColumnInfo , ReturnType . NEW ) ) ; augmentClusteringColRelationClassForWhereClause ( relationClassBuilder , clusteringColumnInfo , nextSignature , lastSignature ) ; whereClassBuilder . addMethod ( buildRelationMethod ( clusteringColumnInfo . fieldName , relationClassTypeName ) ) . addType ( relationClassBuilder . build ( ) ) ; augmentWhereClass ( whereClassBuilder , signature , clusteringCols , classesSignature , lastSignature ) ; return whereClassBuilder . build ( ) ; } | Generate extra method to extends the AbstractDeleteEnd class |
31,764 | public T withDefaultReadConsistencyMap ( Map < String , ConsistencyLevel > readConsistencyMap ) { configMap . put ( CONSISTENCY_LEVEL_READ_MAP , readConsistencyMap ) ; return getThis ( ) ; } | Define the default Consistency level map to be used for all READ operations The map keys represent table names and values represent the corresponding consistency level |
31,765 | public T withDefaultWriteConsistencyMap ( Map < String , ConsistencyLevel > writeConsistencyMap ) { configMap . put ( CONSISTENCY_LEVEL_WRITE_MAP , writeConsistencyMap ) ; return getThis ( ) ; } | Define the default Consistency level map to be used for all WRITE operations The map keys represent table names and values represent the corresponding consistency level |
31,766 | public T withDefaultSerialConsistencyMap ( Map < String , ConsistencyLevel > serialConsistencyMap ) { configMap . put ( CONSISTENCY_LEVEL_SERIAL_MAP , serialConsistencyMap ) ; return getThis ( ) ; } | Define the default Consistency level map to be used for all LightWeightTransaction operations operations The map keys represent table names and values represent the corresponding consistency level |
31,767 | public T withEventInterceptors ( List < Interceptor < ? > > interceptors ) { configMap . put ( EVENT_INTERCEPTORS , interceptors ) ; return getThis ( ) ; } | Provide a list of event interceptors |
31,768 | public T withParameter ( ConfigurationParameters parameter , Object value ) { configMap . put ( parameter , value ) ; return getThis ( ) ; } | Pass an arbitrary parameter to configure Achilles |
31,769 | public ENTITY mapFromRow ( Row row ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( format ( "Map row %s back to entity of type %s" , row , entityClass . getCanonicalName ( ) ) ) ; } validateNotNull ( row , "Row object should not be null" ) ; final String tableName = row . getColumnDefinitions ( ) . asList ( ) . get ( 0 ) . getTable ( ) ; final String entityTableName = meta_internal . getTableOrViewName ( ) ; validateTableTrue ( entityTableName . equals ( tableName ) , "CQL row is from table '%s', it cannot be mapped to entity '%s' associated to table '%s'" , tableName , entityClass . getCanonicalName ( ) , entityTableName ) ; return meta_internal . createEntityFrom ( row ) ; } | Map a given row back to an entity instance . This method provides a raw object mapping facility . Advanced features like interceptors are not available . User codecs are taken into account though . |
31,770 | public CassandraEmbeddedServerBuilder withScript ( String scriptLocation ) { Validator . validateNotBlank ( scriptLocation , "The script location should not be blank while executing CassandraEmbeddedServerBuilder.withScript()" ) ; scriptLocations . add ( scriptLocation . trim ( ) ) ; return this ; } | Load an CQL script in the class path and execute it upon initialization of the embedded Cassandra server |
31,771 | public CassandraEmbeddedServerBuilder withScriptTemplate ( String scriptTemplateLocation , Map < String , Object > values ) { Validator . validateNotBlank ( scriptTemplateLocation , "The script template should not be blank while executing CassandraEmbeddedServerBuilder.withScriptTemplate()" ) ; Validator . validateNotEmpty ( values , "The template values should not be empty while executing CassandraEmbeddedServerBuilder.withScriptTemplate()" ) ; scriptTemplates . put ( scriptTemplateLocation . trim ( ) , values ) ; return this ; } | Load an CQL script template in the class path inject the values into the template to produce the final script and execute it upon initialization of the embedded Cassandra server |
31,772 | public List < TypeSpec > buildWhereClasses ( GlobalParsingContext context , EntityMetaSignature signature ) { SelectWhereDSLCodeGen selectWhereDSLCodeGen = context . selectWhereDSLCodeGen ( ) ; final List < FieldSignatureInfo > partitionKeys = getPartitionKeysSignatureInfo ( signature . fieldMetaSignatures ) ; final List < FieldSignatureInfo > clusteringCols = getClusteringColsSignatureInfo ( signature . fieldMetaSignatures ) ; final ClassSignatureParams classSignatureParams = ClassSignatureParams . of ( SELECT_DSL_SUFFIX , WHERE_DSL_SUFFIX , END_DSL_SUFFIX , ABSTRACT_SELECT_WHERE_PARTITION , ABSTRACT_SELECT_WHERE ) ; final ClassSignatureParams typedMapClassSignatureParams = ClassSignatureParams . of ( SELECT_DSL_SUFFIX , WHERE_TYPED_MAP_DSL_SUFFIX , END_TYPED_MAP_DSL_SUFFIX , ABSTRACT_SELECT_WHERE_PARTITION_TYPED_MAP , ABSTRACT_SELECT_WHERE_TYPED_MAP ) ; final List < TypeSpec > partitionKeysWhereClasses = buildWhereClassesInternal ( signature , selectWhereDSLCodeGen , partitionKeys , clusteringCols , classSignatureParams ) ; final List < TypeSpec > partitionKeysWhereTypedMapClasses = buildWhereClassesInternal ( signature , selectWhereDSLCodeGen , partitionKeys , clusteringCols , typedMapClassSignatureParams ) ; partitionKeysWhereClasses . addAll ( partitionKeysWhereTypedMapClasses ) ; partitionKeysWhereClasses . addAll ( generateExtraWhereClasses ( context , signature , partitionKeys , clusteringCols ) ) ; return partitionKeysWhereClasses ; } | ClassSignatureInfo lastSignature ) ; |
31,773 | public T withConsistencyLevel ( ConsistencyLevel consistencyLevel ) { getOptions ( ) . setCl ( Optional . of ( consistencyLevel ) ) ; return getThis ( ) ; } | Set the given consistency level on the generated statement |
31,774 | public T withSerialConsistencyLevel ( ConsistencyLevel serialConsistencyLevel ) { getOptions ( ) . setSerialCL ( Optional . of ( serialConsistencyLevel ) ) ; return getThis ( ) ; } | Set the given serial consistency level on the generated statement |
31,775 | public T withOutgoingPayload ( Map < String , ByteBuffer > outgoingPayload ) { getOptions ( ) . setOutgoingPayLoad ( Optional . of ( outgoingPayload ) ) ; return getThis ( ) ; } | Set the given outgoing payload map on the generated statement |
31,776 | public T withOptionalOutgoingPayload ( Optional < Map < String , ByteBuffer > > outgoingPayload ) { getOptions ( ) . setOutgoingPayLoad ( outgoingPayload ) ; return getThis ( ) ; } | Set the given outgoing payload map on the generated statement IF NOT NULL |
31,777 | public T withPagingState ( PagingState pagingState ) { getOptions ( ) . setPagingState ( Optional . of ( pagingState ) ) ; return getThis ( ) ; } | Set the given paging state on the generated statement |
31,778 | public T withOptionalPagingStateString ( Optional < String > pagingStateString ) { pagingStateString . ifPresent ( cl -> getOptions ( ) . setPagingState ( Optional . of ( PagingState . fromString ( pagingStateString . get ( ) ) ) ) ) ; return getThis ( ) ; } | Set the given paging state string on the generated statement IF NOT NULL |
31,779 | public T withRetryPolicy ( RetryPolicy retryPolicy ) { getOptions ( ) . setRetryPolicy ( Optional . of ( retryPolicy ) ) ; return getThis ( ) ; } | Set the given retry policy |
31,780 | public Iterator < ENTITY > iterator ( ) { StatementWrapper statementWrapper = new BoundStatementWrapper ( getOperationType ( boundStatement ) , meta , boundStatement , encodedBoundValues ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( String . format ( "Generate iterator for typed query : %s" , statementWrapper . getBoundStatement ( ) . preparedStatement ( ) . getQueryString ( ) ) ) ; } CompletableFuture < ResultSet > futureRS = rte . execute ( statementWrapper ) ; return new EntityIteratorWrapper < > ( futureRS , meta , statementWrapper , options ) ; } | Execute the typed query and return an iterator of entities |
31,781 | public void executeScriptTemplate ( String scriptTemplateLocation , Map < String , Object > values ) { final List < SimpleStatement > statements = buildStatements ( loadScriptAsLines ( scriptTemplateLocation , values ) ) ; for ( SimpleStatement statement : statements ) { if ( isDMLStatement ( statement ) ) { DML_LOGGER . debug ( "\tSCRIPT : {}\n" , statement . getQueryString ( ) ) ; } else { DDL_LOGGER . debug ( "\tSCRIPT : {}\n" , statement . getQueryString ( ) ) ; } session . execute ( statement ) ; } } | Execute a CQL script template located in the class path and inject provided values into the template to produce the actual script |
31,782 | public CompletableFuture < ResultSet > executeAsync ( Statement statement ) { return FutureUtils . toCompletableFuture ( session . executeAsync ( statement ) , sameThreadExecutor ) ; } | Execute a CQL statement asynchronously |
31,783 | public static Map < ExecutableElement , AnnotationValue > getElementValuesWithDefaults ( AnnotationMirror annotMirror ) { Map < ExecutableElement , AnnotationValue > valMap = Optional . ofNullable ( ( Map < ExecutableElement , AnnotationValue > ) annotMirror . getElementValues ( ) ) . map ( x -> new HashMap < > ( x ) ) . orElse ( new HashMap < > ( ) ) ; ElementFilter . methodsIn ( annotMirror . getAnnotationType ( ) . asElement ( ) . getEnclosedElements ( ) ) . stream ( ) . map ( annot -> Tuple2 . of ( annot , annot . getDefaultValue ( ) ) ) . filter ( tuple2 -> tuple2 . _2 ( ) != null && ! valMap . containsKey ( tuple2 . _1 ( ) ) ) . forEach ( tuple2 -> valMap . put ( tuple2 . _1 ( ) , tuple2 . _2 ( ) ) ) ; return valMap ; } | Returns the values of an annotation s attributes including defaults . The method with the same name in JavacElements cannot be used directly because it includes a cast to Attribute . Compound which doesn t hold for annotations generated by the Checker Framework . |
31,784 | public static < T > Optional < Class < T > > getElementValueClass ( AnnotationMirror anno , CharSequence name , boolean useDefaults ) { Name cn = getElementValueClassName ( anno , name , useDefaults ) ; try { Class < ? > cls = Class . forName ( cn . toString ( ) ) ; return Optional . of ( ( Class < T > ) cls ) ; } catch ( ClassNotFoundException e ) { return Optional . empty ( ) ; } } | Get the Class that is referenced by attribute name . This method uses Class . forName to load the class . It returns null if the class wasn t found . |
31,785 | public static < T extends Enum < T > > T getElementValueEnum ( AnnotationMirror anno , CharSequence name , Class < T > t , boolean useDefaults ) { Symbol . VarSymbol vs = getElementValue ( anno , name , Symbol . VarSymbol . class , useDefaults ) ; T value = Enum . valueOf ( t , vs . getSimpleName ( ) . toString ( ) ) ; return value ; } | Version that is suitable for Enum elements . |
31,786 | public static TypeElement enclosingClass ( final Element elem ) { Element result = elem ; while ( result != null && ! result . getKind ( ) . isClass ( ) && ! result . getKind ( ) . isInterface ( ) ) { Element encl = result . getEnclosingElement ( ) ; result = encl ; } return ( TypeElement ) result ; } | Returns the innermost type element enclosing the given element |
31,787 | public static VariableElement findFieldInType ( TypeElement type , String name ) { for ( VariableElement field : ElementFilter . fieldsIn ( type . getEnclosedElements ( ) ) ) { if ( field . getSimpleName ( ) . toString ( ) . equals ( name ) ) { return field ; } } return null ; } | Returns the field of the class |
31,788 | public CompletableFuture < ExecutionInfo > executeAsyncWithStats ( ) { final StatementWrapper statementWrapper = new NativeStatementWrapper ( getOperationType ( boundStatement ) , meta , boundStatement , encodedBoundValues ) ; final String queryString = statementWrapper . getBoundStatement ( ) . preparedStatement ( ) . getQueryString ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( format ( "Execute native query async with execution info : %s" , queryString ) ) ; } CompletableFuture < ResultSet > cfutureRS = rte . execute ( statementWrapper ) ; return cfutureRS . thenApply ( options :: resultSetAsyncListener ) . thenApply ( x -> statementWrapper . logReturnResults ( x , options . computeMaxDisplayedResults ( rte . configContext ) ) ) . thenApply ( statementWrapper :: logTrace ) . thenApply ( x -> LWTHelper . triggerLWTListeners ( lwtResultListeners , x , queryString ) ) . thenApply ( x -> x . getExecutionInfo ( ) ) ; } | Execute the native query asynchronously and return the execution info |
31,789 | public VALUEFROM decodeFromRaw ( Object o ) { if ( o == null && ! isOptional ( ) ) return null ; return decodeFromRawInternal ( o ) ; } | Decode the given raw object to Java value value using Achilles codec system |
31,790 | public static SetOperation heapify ( final Memory srcMem , final long seed ) { final byte famID = srcMem . getByte ( FAMILY_BYTE ) ; final Family family = idToFamily ( famID ) ; switch ( family ) { case UNION : { return UnionImpl . heapifyInstance ( srcMem , seed ) ; } case INTERSECTION : { return IntersectionImpl . heapifyInstance ( srcMem , seed ) ; } default : { throw new SketchesArgumentException ( "SetOperation cannot heapify family: " + family . toString ( ) ) ; } } } | Heapify takes the SetOperation image in Memory and instantiates an on - heap SetOperation using the given seed . The resulting SetOperation will not retain any link to the source Memory . |
31,791 | public static SetOperation wrap ( final Memory srcMem , final long seed ) { final byte famID = srcMem . getByte ( FAMILY_BYTE ) ; final Family family = idToFamily ( famID ) ; final int serVer = srcMem . getByte ( SER_VER_BYTE ) ; if ( serVer != 3 ) { throw new SketchesArgumentException ( "SerVer must be 3: " + serVer ) ; } switch ( family ) { case UNION : { return UnionImpl . wrapInstance ( srcMem , seed ) ; } case INTERSECTION : { return IntersectionImplR . wrapInstance ( srcMem , seed ) ; } default : throw new SketchesArgumentException ( "SetOperation cannot wrap family: " + family . toString ( ) ) ; } } | Wrap takes the SetOperation image in Memory and refers to it directly . There is no data copying onto the java heap . Only Direct SetOperations that have been explicitly stored as direct can be wrapped . |
31,792 | public static int getMaxIntersectionBytes ( final int nomEntries ) { final int nomEnt = ceilingPowerOf2 ( nomEntries ) ; final int bytes = ( nomEnt << 4 ) + ( Family . INTERSECTION . getMaxPreLongs ( ) << 3 ) ; return bytes ; } | Returns the maximum required storage bytes given a nomEntries parameter for Intersection operations |
31,793 | static final CompactSketch createCompactSketch ( final long [ ] compactCache , boolean empty , final short seedHash , final int curCount , long thetaLong , final boolean dstOrdered , final WritableMemory dstMem ) { thetaLong = thetaOnCompact ( empty , curCount , thetaLong ) ; empty = emptyOnCompact ( curCount , thetaLong ) ; CompactSketch sketchOut = null ; final int sw = ( dstOrdered ? 2 : 0 ) | ( ( dstMem != null ) ? 1 : 0 ) ; switch ( sw ) { case 0 : { sketchOut = HeapCompactUnorderedSketch . compact ( compactCache , empty , seedHash , curCount , thetaLong ) ; break ; } case 1 : { sketchOut = DirectCompactUnorderedSketch . compact ( compactCache , empty , seedHash , curCount , thetaLong , dstMem ) ; break ; } case 2 : { sketchOut = HeapCompactOrderedSketch . compact ( compactCache , empty , seedHash , curCount , thetaLong ) ; break ; } case 3 : { sketchOut = DirectCompactOrderedSketch . compact ( compactCache , empty , seedHash , curCount , thetaLong , dstMem ) ; break ; } } return sketchOut ; } | used only by the set operations |
31,794 | static final int computeMinLgArrLongsFromCount ( final int count ) { final int upperCount = ( int ) Math . ceil ( count / REBUILD_THRESHOLD ) ; final int arrLongs = max ( ceilingPowerOf2 ( upperCount ) , 1 << MIN_LG_ARR_LONGS ) ; final int newLgArrLongs = Integer . numberOfTrailingZeros ( arrLongs ) ; return newLgArrLongs ; } | Used by intersection and AnotB |
31,795 | static boolean isValidSetOpID ( final int id ) { final Family family = Family . idToFamily ( id ) ; final boolean ret = ( ( family == Family . UNION ) || ( family == Family . INTERSECTION ) || ( family == Family . A_NOT_B ) ) ; return ret ; } | Returns true if given Family id is one of the set operations |
31,796 | static final void hipAndKxQIncrementalUpdate ( final AbstractHllArray host , final int oldValue , final int newValue ) { assert newValue > oldValue ; final int configK = 1 << host . getLgConfigK ( ) ; double kxq0 = host . getKxQ0 ( ) ; double kxq1 = host . getKxQ1 ( ) ; host . addToHipAccum ( configK / ( kxq0 + kxq1 ) ) ; if ( oldValue < 32 ) { host . putKxQ0 ( kxq0 -= invPow2 ( oldValue ) ) ; } else { host . putKxQ1 ( kxq1 -= invPow2 ( oldValue ) ) ; } if ( newValue < 32 ) { host . putKxQ0 ( kxq0 += invPow2 ( newValue ) ) ; } else { host . putKxQ1 ( kxq1 += invPow2 ( newValue ) ) ; } } | Called here and by Heap and Direct 6 and 8 bit implementations |
31,797 | public static SingleItemSketch heapify ( final Memory mem ) { final long memPre0 = mem . getLong ( 0 ) ; checkDefaultBytes0to7 ( memPre0 ) ; return new SingleItemSketch ( mem . getLong ( 8 ) ) ; } | Creates a SingleItemSketch on the heap given a Memory and assumes the DEFAULT_UPDATE_SEED . |
31,798 | public static SingleItemSketch heapify ( final Memory mem , final long seed ) { final long memPre0 = mem . getLong ( 0 ) ; checkDefaultBytes0to5 ( memPre0 ) ; final short seedHashIn = mem . getShort ( 6 ) ; final short seedHashCk = computeSeedHash ( seed ) ; checkSeedHashes ( seedHashIn , seedHashCk ) ; return new SingleItemSketch ( mem . getLong ( 8 ) , seed ) ; } | Creates a SingleItemSketch on the heap given a Memory . Checks the seed hash of the given Memory against a hash of the given seed . |
31,799 | public static SingleItemSketch create ( final byte [ ] data ) { if ( ( data == null ) || ( data . length == 0 ) ) { return null ; } return new SingleItemSketch ( hash ( data , DEFAULT_UPDATE_SEED ) [ 0 ] >>> 1 ) ; } | Create this sketch with the given byte array . If the byte array is null or empty no create attempt is made and the method returns null . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.