index int64 0 0 | repo_id stringlengths 26 205 | file_path stringlengths 51 246 | content stringlengths 8 433k | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/returntypes/ComputeDescriptorState.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state.returntypes;
import java.io.Serializable;
import lombok.Data;
import com.amazon.randomcutforest.state.returntypes.DiVectorState;
@Data
public class ComputeDescriptorState implements Serializable {
private static final long serialVersionUID = 1L;
private long internalTimeStamp;
private double score;
private DiVectorState attribution;
private double lastScore;
private double[] point;
private double[] expectedPoint;
private int relativeIndex;
private int lastReset;
private String strategy;
private double[] shift;
private double[] scale;
private double[] postShift;
private double transformDecay;
private double[] postDeviations;
private double threshold;
private double anomalyGrade;
private String correctionMode;
}
| 600 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/returntypes/ComputeDescriptorMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state.returntypes;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.config.CorrectionMode;
import com.amazon.randomcutforest.config.ScoringStrategy;
import com.amazon.randomcutforest.parkservices.RCFComputeDescriptor;
import com.amazon.randomcutforest.state.IStateMapper;
import com.amazon.randomcutforest.state.returntypes.DiVectorMapper;
@Getter
@Setter
public class ComputeDescriptorMapper implements IStateMapper<RCFComputeDescriptor, ComputeDescriptorState> {
@Override
public RCFComputeDescriptor toModel(ComputeDescriptorState state, long seed) {
RCFComputeDescriptor descriptor = new RCFComputeDescriptor(null, 0L);
descriptor.setRCFScore(state.getScore());
descriptor.setInternalTimeStamp(state.getInternalTimeStamp());
descriptor.setAttribution(new DiVectorMapper().toModel(state.getAttribution()));
descriptor.setRCFPoint(state.getPoint());
descriptor.setExpectedRCFPoint(state.getExpectedPoint());
descriptor.setRelativeIndex(state.getRelativeIndex());
descriptor.setScoringStrategy(ScoringStrategy.valueOf(state.getStrategy()));
descriptor.setShift(state.getShift());
descriptor.setPostShift(state.getPostShift());
descriptor.setTransformDecay(state.getTransformDecay());
descriptor.setPostDeviations(state.getPostDeviations());
descriptor.setScale(state.getScale());
descriptor.setAnomalyGrade(state.getAnomalyGrade());
descriptor.setThreshold(state.getThreshold());
descriptor.setCorrectionMode(CorrectionMode.valueOf(state.getCorrectionMode()));
return descriptor;
}
@Override
public ComputeDescriptorState toState(RCFComputeDescriptor descriptor) {
ComputeDescriptorState state = new ComputeDescriptorState();
state.setInternalTimeStamp(descriptor.getInternalTimeStamp());
state.setScore(descriptor.getRCFScore());
state.setAttribution(new DiVectorMapper().toState(descriptor.getAttribution()));
state.setPoint(descriptor.getRCFPoint());
state.setExpectedPoint(descriptor.getExpectedRCFPoint());
state.setRelativeIndex(descriptor.getRelativeIndex());
state.setStrategy(descriptor.getScoringStrategy().name());
state.setShift(descriptor.getShift());
state.setPostShift(descriptor.getPostShift());
state.setTransformDecay(descriptor.getTransformDecay());
state.setPostDeviations(descriptor.getPostDeviations());
state.setScale(descriptor.getScale());
state.setAnomalyGrade(descriptor.getAnomalyGrade());
state.setThreshold(descriptor.getThreshold());
state.setCorrectionMode(descriptor.getCorrectionMode().name());
return state;
}
}
| 601 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/preprocessor/PreprocessorMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state.preprocessor;
import static com.amazon.randomcutforest.parkservices.state.statistics.DeviationMapper.getDeviations;
import static com.amazon.randomcutforest.parkservices.state.statistics.DeviationMapper.getStates;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.ImputationMethod;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.preprocessor.Preprocessor;
import com.amazon.randomcutforest.parkservices.state.statistics.DeviationMapper;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
import com.amazon.randomcutforest.state.IStateMapper;
@Getter
@Setter
public class PreprocessorMapper implements IStateMapper<Preprocessor, PreprocessorState> {
@Override
public Preprocessor toModel(PreprocessorState state, long seed) {
DeviationMapper deviationMapper = new DeviationMapper();
Deviation[] deviations = getDeviations(state.getDeviationStates(), deviationMapper);
Deviation[] timeStampDeviations = getDeviations(state.getTimeStampDeviationStates(), deviationMapper);
Deviation[] dataQuality = getDeviations(state.getDataQualityStates(), deviationMapper);
Preprocessor.Builder<?> preprocessorBuilder = new Preprocessor.Builder<>()
.forestMode(ForestMode.valueOf(state.getForestMode())).shingleSize(state.getShingleSize())
.dimensions(state.getDimensions()).normalizeTime(state.isNormalizeTime())
.imputationMethod(ImputationMethod.valueOf(state.getImputationMethod()))
.fillValues(state.getDefaultFill()).inputLength(state.getInputLength()).weights(state.getWeights())
.transformMethod(TransformMethod.valueOf(state.getTransformMethod()))
.startNormalization(state.getStartNormalization()).useImputedFraction(state.getUseImputedFraction())
.timeDeviations(timeStampDeviations).deviations(deviations).dataQuality(dataQuality)
.timeDecay(state.getTimeDecay());
Preprocessor preprocessor = preprocessorBuilder.build();
preprocessor.setInitialValues(state.getInitialValues());
preprocessor.setInitialTimeStamps(state.getInitialTimeStamps());
preprocessor.setClipFactor(state.getClipFactor());
preprocessor.setValuesSeen(state.getValuesSeen());
preprocessor.setInternalTimeStamp(state.getInternalTimeStamp());
preprocessor.setLastShingledInput(state.getLastShingledInput());
preprocessor.setLastShingledPoint(state.getLastShingledPoint());
preprocessor.setPreviousTimeStamps(state.getPreviousTimeStamps());
preprocessor.setNormalizeTime(state.isNormalizeTime());
return preprocessor;
}
@Override
public PreprocessorState toState(Preprocessor model) {
PreprocessorState state = new PreprocessorState();
state.setShingleSize(model.getShingleSize());
state.setDimensions(model.getDimension());
state.setInputLength(model.getInputLength());
state.setClipFactor(model.getClipFactor());
state.setDefaultFill(model.getDefaultFill());
state.setImputationMethod(model.getImputationMethod().name());
state.setTransformMethod(model.getTransformMethod().name());
state.setWeights(model.getWeights());
state.setForestMode(model.getMode().name());
state.setInitialTimeStamps(model.getInitialTimeStamps());
state.setInitialValues(model.getInitialValues());
state.setUseImputedFraction(model.getUseImputedFraction());
state.setNormalizeTime(model.isNormalizeTime());
state.setStartNormalization(model.getStartNormalization());
state.setStopNormalization(model.getStopNormalization());
state.setPreviousTimeStamps(model.getPreviousTimeStamps());
state.setLastShingledInput(model.getLastShingledInput());
state.setLastShingledPoint(model.getLastShingledPoint());
state.setValuesSeen(model.getValuesSeen());
state.setInternalTimeStamp(model.getInternalTimeStamp());
DeviationMapper deviationMapper = new DeviationMapper();
state.setTimeDecay(model.getTimeDecay());
state.setDeviationStates(getStates(model.getDeviationList(), deviationMapper));
state.setTimeStampDeviationStates(getStates(model.getTimeStampDeviations(), deviationMapper));
state.setDataQualityStates(getStates(model.getDataQuality(), deviationMapper));
return state;
}
}
| 602 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/preprocessor/PreprocessorState.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state.preprocessor;
import static com.amazon.randomcutforest.state.Version.V3_7;
import java.io.Serializable;
import lombok.Data;
import com.amazon.randomcutforest.parkservices.state.statistics.DeviationState;
@Data
public class PreprocessorState implements Serializable {
private static final long serialVersionUID = 1L;
private String version = V3_7;
private double useImputedFraction;
private String imputationMethod;
private String forestMode;
private String transformMethod;
private double[] weights;
private double[] lastShingledPoint;
private double[] lastShingledInput;
private double[] defaultFill;
private double timeDecay;
private int startNormalization;
private int stopNormalization;
private int shingleSize;
private int dimensions;
private int inputLength;
private double clipFactor;
private boolean normalizeTime;
private long[] initialTimeStamps;
private double[][] initialValues;
private long[] previousTimeStamps;
private int valuesSeen;
private int internalTimeStamp;
@Deprecated
private DeviationState dataQualityState;
@Deprecated
private DeviationState timeStampDeviationState;
private DeviationState[] deviationStates;
private DeviationState[] dataQualityStates;
private DeviationState[] timeStampDeviationStates;
}
| 603 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/predictorcorrector/PredictorCorrectorState.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state.predictorcorrector;
import static com.amazon.randomcutforest.state.Version.V3_8;
import java.io.Serializable;
import lombok.Data;
import com.amazon.randomcutforest.parkservices.state.returntypes.ComputeDescriptorState;
import com.amazon.randomcutforest.parkservices.state.statistics.DeviationState;
import com.amazon.randomcutforest.parkservices.state.threshold.BasicThresholderState;
@Data
public class PredictorCorrectorState implements Serializable {
private static final long serialVersionUID = 1L;
private String version = V3_8;
private BasicThresholderState[] thresholderStates;
private double[] lastScore;
private String lastStrategy;
private int numberOfAttributors;
private int baseDimension;
private long randomSeed;
private double noiseFactor;
private boolean autoAdjust;
private boolean ignoreDrift;
private ComputeDescriptorState lastDescriptor;
private int runLength;
private double samplingSuppport;
private double[] modeInformation; // multiple modes -- to be used in future
private DeviationState[] deviationStates; // in future to be used for learning deviations
private double[] ignoreNearExpected;
}
| 604 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/predictorcorrector/PredictorCorrectorMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state.predictorcorrector;
import com.amazon.randomcutforest.config.ScoringStrategy;
import com.amazon.randomcutforest.parkservices.PredictorCorrector;
import com.amazon.randomcutforest.parkservices.state.returntypes.ComputeDescriptorMapper;
import com.amazon.randomcutforest.parkservices.state.statistics.DeviationMapper;
import com.amazon.randomcutforest.parkservices.state.statistics.DeviationState;
import com.amazon.randomcutforest.parkservices.state.threshold.BasicThresholderMapper;
import com.amazon.randomcutforest.parkservices.state.threshold.BasicThresholderState;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
import com.amazon.randomcutforest.parkservices.threshold.BasicThresholder;
import com.amazon.randomcutforest.state.IStateMapper;
public class PredictorCorrectorMapper implements IStateMapper<PredictorCorrector, PredictorCorrectorState> {
@Override
public PredictorCorrectorState toState(PredictorCorrector model) {
PredictorCorrectorState state = new PredictorCorrectorState();
state.setLastScore(model.getLastScore());
state.setNumberOfAttributors(model.getNumberOfAttributors());
state.setIgnoreNearExpected(model.getIgnoreNearExpected());
BasicThresholderMapper mapper = new BasicThresholderMapper();
BasicThresholder[] thresholders = model.getThresholders();
BasicThresholderState thresholderState[] = new BasicThresholderState[thresholders.length];
for (int y = 0; y < thresholders.length; y++) {
thresholderState[y] = mapper.toState(thresholders[y]);
}
state.setThresholderStates(thresholderState);
DeviationMapper devMapper = new DeviationMapper();
Deviation[] deviations = model.getDeviations();
state.setAutoAdjust(model.isAutoAdjust());
if (state.isAutoAdjust()) {
DeviationState deviationState[] = new DeviationState[deviations.length];
for (int y = 0; y < deviations.length; y++) {
deviationState[y] = devMapper.toState(deviations[y]);
}
state.setDeviationStates(deviationState);
}
state.setNoiseFactor(model.getNoiseFactor());
state.setBaseDimension(model.getBaseDimension());
state.setLastStrategy(model.getLastStrategy().name());
state.setRandomSeed(model.getRandomSeed());
if (model.getLastDescriptor() != null) {
ComputeDescriptorMapper descriptorMapper = new ComputeDescriptorMapper();
state.setLastDescriptor(descriptorMapper.toState(model.getLastDescriptor()));
}
state.setRunLength(model.getRunLength());
state.setIgnoreDrift(model.isIgnoreDrift());
state.setSamplingSuppport(model.getSamplingSupport());
return state;
}
@Override
public PredictorCorrector toModel(PredictorCorrectorState state, long seed) {
BasicThresholderMapper mapper = new BasicThresholderMapper();
int num = state.getThresholderStates().length;
BasicThresholder[] thresholders = new BasicThresholder[num];
for (int i = 0; i < num; i++) {
thresholders[i] = mapper.toModel(state.getThresholderStates()[i]);
}
Deviation[] deviations = null;
if (state.isAutoAdjust()) {
DeviationMapper devMapper = new DeviationMapper();
deviations = new Deviation[state.getDeviationStates().length];
for (int y = 0; y < deviations.length; y++) {
deviations[y] = devMapper.toModel(state.getDeviationStates()[y]);
}
}
PredictorCorrector predictorCorrector = new PredictorCorrector(thresholders, deviations,
state.getBaseDimension(), state.getRandomSeed());
predictorCorrector.setNumberOfAttributors(state.getNumberOfAttributors());
predictorCorrector.setLastStrategy(ScoringStrategy.valueOf(state.getLastStrategy()));
predictorCorrector.setLastScore(state.getLastScore());
predictorCorrector.setIgnoreNearExpected(state.getIgnoreNearExpected());
predictorCorrector.setAutoAdjust(state.isAutoAdjust());
predictorCorrector.setNoiseFactor(state.getNoiseFactor());
predictorCorrector.setRunLength(state.getRunLength());
if (state.getLastDescriptor() != null) {
ComputeDescriptorMapper descriptorMapper = new ComputeDescriptorMapper();
predictorCorrector.setLastDescriptor(descriptorMapper.toModel(state.getLastDescriptor()));
}
predictorCorrector.setIgnoreDrift(state.isIgnoreDrift());
predictorCorrector.setSamplingSupport(state.getSamplingSuppport());
return predictorCorrector;
}
}
| 605 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/threshold/BasicThresholder.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.threshold;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE_COEFFICIENT_IN_TIME_DECAY;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static java.lang.Math.sqrt;
import java.util.List;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
import com.amazon.randomcutforest.util.Weighted;
public class BasicThresholder {
public static double DEFAULT_SCORE_DIFFERENCING = 0.5;
public static int DEFAULT_MINIMUM_SCORES = 10;
public static double DEFAULT_FACTOR_ADJUSTMENT_THRESHOLD = 0.9;
public static double DEFAULT_ABSOLUTE_THRESHOLD = 0.8;
public static double DEFAULT_INITIAL_THRESHOLD = 1.5;
public static double DEFAULT_Z_FACTOR = 3.0;
public static double MINIMUM_Z_FACTOR = 2.0;
public static boolean DEFAULT_AUTO_THRESHOLD = true;
public static int DEFAULT_DEVIATION_STATES = 3;
// keeping a count of the values seen because both deviation variables
// primaryDeviation
// and secondaryDeviation may not be used always
protected int count = 0;
// horizon = 0 is short term, switches to secondary
// horizon = 1 long term, switches to primary
protected double scoreDifferencing = DEFAULT_SCORE_DIFFERENCING;
// below these many observations, deviation is not useful
protected int minimumScores = DEFAULT_MINIMUM_SCORES;
protected Deviation primaryDeviation;
protected Deviation secondaryDeviation;
protected Deviation thresholdDeviation;
protected boolean autoThreshold = DEFAULT_AUTO_THRESHOLD;
// an absoluteThreshold
protected double absoluteThreshold = DEFAULT_ABSOLUTE_THRESHOLD;
// the upper threshold of scores above which points are likely anomalies
protected double factorAdjustmentThreshold = DEFAULT_FACTOR_ADJUSTMENT_THRESHOLD;
// initial absolute threshold used to determine anomalies before sufficient
// values are seen
protected double initialThreshold = DEFAULT_INITIAL_THRESHOLD;
// used to determine the surprise coefficient above which we can call a
// potential anomaly
protected double zFactor = DEFAULT_Z_FACTOR;
public BasicThresholder(double primaryDiscount, double secondaryDiscount, boolean adjust) {
primaryDeviation = new Deviation(primaryDiscount);
secondaryDeviation = new Deviation(secondaryDiscount);
// a longer horizon to adjust
thresholdDeviation = new Deviation(primaryDiscount / 2);
autoThreshold = adjust;
}
public BasicThresholder(double discount) {
this(discount, discount, false);
}
public BasicThresholder(Deviation[] deviations) {
int length = (deviations == null) ? 0 : deviations.length;
if (length != DEFAULT_DEVIATION_STATES) {
double timeDecay = 1.0 / (DEFAULT_SAMPLE_SIZE * DEFAULT_SAMPLE_SIZE_COEFFICIENT_IN_TIME_DECAY);
this.primaryDeviation = new Deviation(timeDecay);
this.secondaryDeviation = new Deviation(timeDecay);
this.thresholdDeviation = new Deviation(0.1 * timeDecay);
} else {
this.primaryDeviation = deviations[0];
this.secondaryDeviation = deviations[1];
this.thresholdDeviation = deviations[2];
}
}
public BasicThresholder(List<Double> scores, double rate) {
this.primaryDeviation = new Deviation(0);
this.secondaryDeviation = new Deviation(0);
this.thresholdDeviation = new Deviation(0);
if (scores != null) {
scores.forEach(s -> update(s, s));
}
primaryDeviation.setDiscount(rate);
secondaryDeviation.setDiscount(rate);
thresholdDeviation.setDiscount(0.1 * rate);
}
/**
* a boolean that determines if enough values have been seen to be able to
* discern deviations
*
* @return true/false based on counts of various statistic
*/
public boolean isDeviationReady() {
if (count < minimumScores) {
return false;
}
if (scoreDifferencing != 0) {
return secondaryDeviation.getCount() >= minimumScores;
}
return true;
}
/**
* this function helps switch from short term (not able to use deviation, using
* absolute scores) which is the first minimumScores observations of the scoring
* function to using deviation (and not using absokute scores, except as a lower
* bound) at 2*minimumScores It is often the case that the data has "run"
* effects and the initial scopres can all come in low or can all come in high
*
* @return a parameter that helps smoot transition of initial to long term
* behavior
*/
protected double intermediateTermFraction() {
if (count < minimumScores) {
return 0;
} else if (count > 2 * minimumScores) {
return 1;
} else {
return (count - minimumScores) * 1.0 / minimumScores;
}
}
@Deprecated
public double threshold() {
return getPrimaryThreshold();
}
public double getPrimaryThreshold() {
if (!isDeviationReady()) {
return 0;
}
return primaryDeviation.getMean() + zFactor * primaryDeviation.getDeviation();
}
/**
* The simplest thresholder that does not use any auxilliary correction, an can
* be used for multiple scoring capabilities.
*
* @param score the value being thresholded
* @return a computation of grade between [-1,1], grades in the range (0,1] are
* to be considered anomalous
*/
public double getPrimaryGrade(double score) {
if (!isDeviationReady()) {
return 0;
}
double tFactor = 2 * zFactor;
double deviation = primaryDeviation.getDeviation();
if (deviation > 0) {
tFactor = min(tFactor, (score - primaryDeviation.getMean()) / deviation);
} else {
return (score > primaryDeviation.getMean() + 1e-10) ? 1.0 : 0;
}
double t = (tFactor - zFactor) / (zFactor);
return max(0, t);
}
public Weighted<Double> getPrimaryThresholdAndGrade(double score) {
if (!isDeviationReady() || score <= 0) {
return new Weighted<Double>(0.0, 0.0f);
}
double threshold = getPrimaryThreshold();
float grade = (threshold > 0 && score > threshold) ? (float) getPrimaryGrade(score) : 0f;
return new Weighted<>(threshold, grade);
}
@Deprecated
public double getAnomalyGrade(double score, boolean flag) {
return getPrimaryGrade(score);
}
/**
* The following adapts the notion of x-sigma (standard deviation) to admit the
* case that RCF scores are asymmetric and values lower than 1 (closer to 0.5)
* can be more common; whereas anomalies are typically larger the x-factor is
* automatically scaled to be calibrated with the average score (bounded below
* by an absolute constant like 0.7)
*
* @param factor the factor being scaled
* @param method transformation method
* @param dimension the dimension of the problem (currently unused)
* @return a scaled value of the factor
*/
protected double adjustedFactor(double factor, TransformMethod method, int dimension) {
double correctedFactor = factor;
double base = primaryDeviation.getMean();
if (base < factorAdjustmentThreshold && method != TransformMethod.NONE) {
correctedFactor = primaryDeviation.getMean() * factor / factorAdjustmentThreshold;
}
return max(correctedFactor, MINIMUM_Z_FACTOR);
}
/**
* The following computes the standard deviation of the scores. But we have
* multiple ways of measuring that -- if the scores are typically symmetric then
* many of these measures concide. However transformation of the values may
* cause the score distribution to be unusual. For example, if NORMALIZATION is
* used then the scores (below the average) end up being close to the average
* (an example of the asymmetry) and thus only standard deviation is used. But
* for other distributions we could directly estimate the deviation of the
* scores below the dynamic mean in an online manner, and we do so in
* thresholdDeviation. An orthogonal component is the effect of
* shingling/differencing which connect up the scores from consecutive input.
*
* @param method transformation method
* @param shingleSize shinglesize used
* @return an estimate of long term deviation from mean of a stochastic series
*/
protected double longTermDeviation(TransformMethod method, int shingleSize) {
if (shingleSize == 1
&& !(method == TransformMethod.DIFFERENCE || method == TransformMethod.NORMALIZE_DIFFERENCE)) {
// control the effect of large values above a threshold from raising the
// threshold
return min(sqrt(2.0) * thresholdDeviation.getDeviation(), primaryDeviation.getDeviation());
} else {
double first = primaryDeviation.getDeviation();
first = min(first, max(secondaryDeviation.getDeviation(), sqrt(2.0) * thresholdDeviation.getDeviation()));
// there is a role of differencing; either by shingling or by explicit
// transformation
return scoreDifferencing * first + (1 - scoreDifferencing) * secondaryDeviation.getDeviation();
}
}
public Weighted<Double> getThresholdAndGrade(double score, TransformMethod method, int dimension, int shingleSize) {
return getThresholdAndGrade(score, zFactor, method, dimension, shingleSize);
}
public Weighted<Double> getThresholdAndGrade(double score, double factor, TransformMethod method, int dimension,
int shingleSize) {
double intermediateFraction = intermediateTermFraction();
double newFactor = adjustedFactor(factor, method, dimension);
double longTerm = longTermDeviation(method, shingleSize);
double scaledDeviation = (newFactor - 1) * longTerm + primaryDeviation.getDeviation();
double absolute = absoluteThreshold;
if (autoThreshold && intermediateFraction >= 1.0 && primaryDeviation.getMean() < factorAdjustmentThreshold) {
absolute = primaryDeviation.getMean() * absolute / factorAdjustmentThreshold;
}
double threshold = (!isDeviationReady()) ? max(initialThreshold, absolute)
: max(absolute, intermediateFraction * (primaryDeviation.getMean() + scaledDeviation)
+ (1 - intermediateFraction) * initialThreshold);
if (score < threshold || threshold <= 0) {
return new Weighted<>(threshold, 0);
} else {
double t = getSurpriseIndex(score, threshold, newFactor, scaledDeviation / newFactor);
t = min((Math.floor(t * 20)) / 16, 1.0); // grade 1 at scaledDeviation at 4 sigma
if (t == 0) {
// round off errors
threshold = score;
}
return new Weighted<>(threshold, (float) t);
}
}
/**
* how surprised are seeing a value from a series with mean base with deviation,
* where factor controls the separation
*
* @param score score
* @param base mean of series
* @param factor control parameter for determining surprise
* @param deviation relevant deviation for the series
* @return a clipped value of the "surpise" index
*/
protected float getSurpriseIndex(double score, double base, double factor, double deviation) {
if (isDeviationReady()) {
double tFactor = 2 * factor;
if (deviation > 0) {
tFactor = min(factor, (score - base) / deviation);
}
return max(0, (float) (tFactor / factor));
} else {
return (float) min(1, max(0, (score - absoluteThreshold) / absoluteThreshold));
}
}
// mean or below; uses the asymmetry of the RCF score
protected void updateThreshold(double score) {
double gap = primaryDeviation.getMean() - score;
if (gap > 0) {
thresholdDeviation.update(gap);
}
}
protected void updatePrimary(double score) {
updateThreshold(score);
primaryDeviation.update(score);
++count;
}
public void update(double primary, double secondary) {
updateThreshold(primary);
primaryDeviation.update(primary);
secondaryDeviation.update(secondary);
++count;
}
public void update(double score, double secondScore, double lastScore, TransformMethod method) {
update(min(score, 2.0), secondScore - lastScore);
}
public Deviation getPrimaryDeviation() {
return primaryDeviation;
}
public Deviation getSecondaryDeviation() {
return secondaryDeviation;
}
public void setZfactor(double factor) {
zFactor = factor;
}
/**
* sets the lower threshold -- which is used to scale the factor variable
*/
public void setLowerThreshold(double lower) {
factorAdjustmentThreshold = lower;
}
/**
*
* @param value absolute lower bound thresholds turns off auto adjustment -- to
* respect the direct setting
*/
public void setAbsoluteThreshold(double value) {
autoThreshold = false;
absoluteThreshold = value;
}
public void setInitialThreshold(double initial) {
initialThreshold = initial;
}
public void setScoreDifferencing(double scoreDifferencing) {
checkArgument(scoreDifferencing >= 0 && scoreDifferencing <= 1, "incorrect score differencing parameter");
this.scoreDifferencing = scoreDifferencing;
}
// to be updated as more deviations are added
public Deviation[] getDeviations() {
Deviation[] deviations = new Deviation[DEFAULT_DEVIATION_STATES];
deviations[0] = primaryDeviation.copy();
deviations[1] = secondaryDeviation.copy();
deviations[2] = thresholdDeviation.copy();
return deviations;
}
public boolean isAutoThreshold() {
return autoThreshold;
}
public int getCount() {
return count;
}
public void setCount(int count) {
this.count = count;
}
public double getAbsoluteThreshold() {
return absoluteThreshold;
}
public double getLowerThreshold() {
return factorAdjustmentThreshold;
}
public double getInitialThreshold() {
return initialThreshold;
}
public double getScoreDifferencing() {
return scoreDifferencing;
}
public double getZFactor() {
return zFactor;
}
public int getMinimumScores() {
return minimumScores;
}
public void setMinimumScores(int minimumScores) {
this.minimumScores = minimumScores;
}
public void setAutoThreshold(boolean autoThreshold) {
this.autoThreshold = autoThreshold;
}
}
| 606 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/returntypes/GenericAnomalyDescriptor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.returntypes;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.util.Weighted;
@Getter
@Setter
public class GenericAnomalyDescriptor<P> {
// the following corresponds to the list of extected points in AnomalyDetector,
// which is returned from
// TRCF. The list corresponds to plausible values (cluster centers) and a weight
// representing the likelihood
// The list is sorted in decreasing order of likelihood. Most often, the first
// element should suffice.
// in case of an anomalous point, however the information here can provide more
// insight
List<Weighted<P>> representativeList;
// standard, as in AnomalyDetector; we do not recommend attempting to
// disambiguate scores of non-anomalous
// points. Note that scores can be low.
double score;
// standard as in AnomalyDetector
double threshold;
// a value between [0,1] indicating the strength of the anomaly, it can be
// viewed as a confidence score
// projected by the algorithm.
double anomalyGrade;
public GenericAnomalyDescriptor(List<Weighted<P>> representative, double score, double threshold,
double anomalyGrade) {
this.representativeList = representativeList;
this.score = score;
this.threshold = threshold;
this.anomalyGrade = anomalyGrade;
}
}
| 607 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/returntypes/AnalysisDescriptor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.returntypes;
import java.util.ArrayList;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.ForecastDescriptor;
@Getter
@Setter
public class AnalysisDescriptor {
/**
* the intent of this class is to describe the list of anomalies and the final
* forecast of some data this is most useful in sequential analysis when that
* data is processed sequentially
*/
ArrayList<AnomalyDescriptor> anomalies;
ForecastDescriptor forecastDescriptor;
public AnalysisDescriptor(ArrayList<AnomalyDescriptor> anomalies, ForecastDescriptor forecastDescriptor) {
this.anomalies = anomalies;
this.forecastDescriptor = forecastDescriptor;
}
}
| 608 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/returntypes/TimedRangeVector.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.returntypes;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static java.lang.Math.max;
import static java.lang.Math.min;
import java.util.Arrays;
import com.amazon.randomcutforest.returntypes.RangeVector;
/**
* ThresholdedRandomCutForests handle time internally and thus the forecast of
* values also correpond to the next sequential timestamps. The RangeVector
* corresponds to the forecast from RCF (based on the inverse of the
* transformation applied by TRCF as it invokes RCF). The timeStamps correspond
* to the predicted timestamps The upper and lower ranges are also present
* similar to RangeVector
*
* Note that if the timestamps cannot be predicted meaningfully (for example in
* STREAMING_IMPUTE mode), then those entries would be 0
*/
public class TimedRangeVector {
public final RangeVector rangeVector;
public final long[] timeStamps;
public final long[] upperTimeStamps;
public final long[] lowerTimeStamps;
public TimedRangeVector(int dimensions, int horizon) {
checkArgument(dimensions > 0, "dimensions must be greater than 0");
checkArgument(horizon > 0, "horizon must be greater than 0");
checkArgument(dimensions % horizon == 0, "horizon should divide dimensions");
rangeVector = new RangeVector(dimensions);
timeStamps = new long[horizon];
upperTimeStamps = new long[horizon];
lowerTimeStamps = new long[horizon];
}
public TimedRangeVector(RangeVector rangeVector, long[] timestamps, long[] upperTimeStamps,
long[] lowerTimeStamps) {
checkArgument(rangeVector.values.length % timestamps.length == 0,
" dimensions must be be divisible by horizon");
checkArgument(timestamps.length == upperTimeStamps.length && upperTimeStamps.length == lowerTimeStamps.length,
"horizon must be equal");
this.rangeVector = new RangeVector(rangeVector);
for (int i = 0; i < timestamps.length; i++) {
checkArgument(upperTimeStamps[i] >= timestamps[i] && timestamps[i] >= lowerTimeStamps[i],
"incorrect semantics");
}
this.timeStamps = Arrays.copyOf(timestamps, timestamps.length);
this.lowerTimeStamps = Arrays.copyOf(lowerTimeStamps, lowerTimeStamps.length);
this.upperTimeStamps = Arrays.copyOf(upperTimeStamps, upperTimeStamps.length);
}
public TimedRangeVector(TimedRangeVector base) {
this.rangeVector = new RangeVector(base.rangeVector);
this.timeStamps = Arrays.copyOf(base.timeStamps, base.timeStamps.length);
this.lowerTimeStamps = Arrays.copyOf(base.lowerTimeStamps, base.lowerTimeStamps.length);
this.upperTimeStamps = Arrays.copyOf(base.upperTimeStamps, base.upperTimeStamps.length);
}
/**
* Create a deep copy of the base RangeVector.
*
* @param base The RangeVector to copy.
*/
public TimedRangeVector(RangeVector base, int horizon) {
checkArgument(base.values.length % horizon == 0, "incorrect lengths");
this.rangeVector = new RangeVector(base);
this.timeStamps = new long[horizon];
this.upperTimeStamps = new long[horizon];
this.lowerTimeStamps = new long[horizon];
}
public void shiftTime(int i, long shift) {
checkArgument(i >= 0 && i < timeStamps.length, "incorrect index");
timeStamps[i] += shift;
// managing precision
upperTimeStamps[i] = max(timeStamps[i], upperTimeStamps[i] + shift);
lowerTimeStamps[i] = min(timeStamps[i], lowerTimeStamps[i] + shift);
}
public void scaleTime(int i, double weight) {
checkArgument(i >= 0 && i < timeStamps.length, "incorrect index");
checkArgument(weight > 0, " negative weight not permitted");
timeStamps[i] = (long) (timeStamps[i] * weight);
// managing precision
upperTimeStamps[i] = max((long) (upperTimeStamps[i] * weight), timeStamps[i]);
lowerTimeStamps[i] = min((long) (lowerTimeStamps[i] * weight), timeStamps[i]);
}
}
| 609 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor/ImputePreprocessor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.preprocessor;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.config.ImputationMethod.FIXED_VALUES;
import static com.amazon.randomcutforest.config.ImputationMethod.LINEAR;
import static com.amazon.randomcutforest.config.ImputationMethod.NEXT;
import static com.amazon.randomcutforest.config.ImputationMethod.PREVIOUS;
import static com.amazon.randomcutforest.config.ImputationMethod.RCF;
import static com.amazon.randomcutforest.config.ImputationMethod.ZERO;
import static com.amazon.randomcutforest.config.TransformMethod.DIFFERENCE;
import static com.amazon.randomcutforest.config.TransformMethod.NORMALIZE_DIFFERENCE;
import java.util.Arrays;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.ImputationMethod;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.RCFComputeDescriptor;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
@Getter
@Setter
public class ImputePreprocessor extends InitialSegmentPreprocessor {
public static ImputationMethod DEFAULT_INITIAL = LINEAR;
public static ImputationMethod DEFAULT_DYNAMIC = PREVIOUS;
ThresholdedRandomCutForest thresholdedRandomCutForest;
/**
* the builder initializes the numberOfImputed, which is not used in the other
* classes
*
* @param builder a builder for Preprocessor
*/
public ImputePreprocessor(Builder<?> builder) {
super(builder);
thresholdedRandomCutForest = builder.thresholdedRandomCutForest;
numberOfImputed = shingleSize;
}
/**
* stores initial data for normalization
*
* @param inputPoint input data
* @param timestamp timestamp
*/
protected void storeInitial(double[] inputPoint, long timestamp, int[] missingValues) {
initialTimeStamps[valuesSeen] = timestamp;
checkArgument(inputPoint.length == inputLength, "incorrect length");
checkArgument(missingValues == null || missingValues.length <= inputLength, "unusual missing values list");
int length = inputLength + ((missingValues == null) ? 0 : missingValues.length);
double[] temp = new double[length];
System.arraycopy(inputPoint, 0, temp, 0, inputLength);
if (missingValues != null) {
for (int i = 0; i < length - inputLength; i++) {
temp[inputLength + i] = missingValues[i];
}
}
initialValues[valuesSeen] = temp;
}
/**
* prepare initial values which can have missing entries in individual tuples.
* We use a simple interpolation strategy. At some level, lack of data simply
* cannot be solved easily without data. This is run as one of the initial steps
* in dischargeInitial() If all the entries corresponding to some variables are
* missing -- there is no good starting point; we assume the value is 0, unless
* there is a defaultFill()
*/
void prepareInitialInput() {
boolean[][] missing = new boolean[initialValues.length][inputLength];
for (int i = 0; i < initialValues.length; i++) {
Arrays.fill(missing[i], false);
int length = initialValues[i].length - inputLength;
for (int j = 0; j < length; j++) {
missing[i][(int) initialValues[i][inputLength + j]] = true;
}
}
boolean[] startingValuesSet = new boolean[inputLength];
if (imputationMethod == ZERO) {
for (int i = 0; i < initialValues.length - 1; i++) {
for (int j = 0; j < inputLength; j++) {
initialValues[i][j] = (missing[i][j]) ? initialValues[i][j] : 0;
}
}
} else if (imputationMethod == FIXED_VALUES || defaultFill != null) {
for (int i = 0; i < initialValues.length - 1; i++) {
for (int j = 0; j < inputLength; j++) {
initialValues[i][j] = (missing[i][j]) ? initialValues[i][j] : defaultFill[j];
}
}
} else { // no simple alternative other than linear interpolation
for (int j = 0; j < inputLength; j++) {
int next = 0;
while (next < initialValues.length && missing[next][j]) {
++next;
}
startingValuesSet[j] = (next < initialValues.length);
if (startingValuesSet[j]) {
initialValues[0][j] = initialValues[next][j];
missing[0][j] = false;
// note if the first value si present then i==0
int start = 0;
while (start < initialValues.length - 1) {
int end = start + 1;
while (end < initialValues.length && missing[end][j]) {
++end;
}
if (end < initialValues.length && end > start + 1) {
for (int y = start + 1; y < end; y++) { // linear interpolation
double factor = (1.0 * initialTimeStamps[start] - initialTimeStamps[y])
/ (initialTimeStamps[start] - initialTimeStamps[end]);
initialValues[y][j] = factor * initialValues[start][j]
+ (1 - factor) * initialValues[end][j];
}
}
start = end;
}
} else {
// set 0; note there is no value in the entire column.
for (int y = 0; y < initialValues.length; y++) {
initialValues[y][j] = 0;
}
}
}
}
// truncate to input length, since the missing values were stored as well
for (int i = 0; i < initialValues.length; i++) {
initialValues[i] = Arrays.copyOf(initialValues[i], inputLength);
}
}
/**
* preprocessor that can buffer the initial input as well as impute missing
* values on the fly note that the forest should not be updated before the point
* has been scored
*
* @param description description of the input
* @param lastAnomalyDescriptor the descriptor of the last anomaly
* @param forest RCF
* @return an AnomalyDescriptor used in anomaly detection
*/
@Override
public AnomalyDescriptor preProcess(AnomalyDescriptor description, RCFComputeDescriptor lastAnomalyDescriptor,
RandomCutForest forest) {
initialSetup(description, lastAnomalyDescriptor, forest);
if (valuesSeen < startNormalization) {
storeInitial(description.getCurrentInput(), description.getInputTimestamp(),
description.getMissingValues());
return description;
}
checkArgument(description.getInputTimestamp() > previousTimeStamps[shingleSize - 1],
"incorrect ordering of time");
// generate next tuple without changing the forest, these get modified in the
// transform
// a primary culprit is differencing, a secondary culprit is the numberOfImputed
long[] savedTimestamps = Arrays.copyOf(previousTimeStamps, previousTimeStamps.length);
double[] savedShingledInput = Arrays.copyOf(lastShingledInput, lastShingledInput.length);
double[] savedShingle = Arrays.copyOf(lastShingledPoint, lastShingledPoint.length);
int savedNumberOfImputed = numberOfImputed;
int lastActualInternal = internalTimeStamp;
double[] point = generateShingle(description, getTimeFactor(timeStampDeviations[1]), false, forest);
// restore state
internalTimeStamp = lastActualInternal;
numberOfImputed = savedNumberOfImputed;
previousTimeStamps = Arrays.copyOf(savedTimestamps, savedTimestamps.length);
lastShingledInput = Arrays.copyOf(savedShingledInput, savedShingledInput.length);
lastShingledPoint = Arrays.copyOf(savedShingle, savedShingle.length);
if (point != null) {
description.setRCFPoint(point);
}
description.setInternalTimeStamp(internalTimeStamp + description.getNumberOfNewImputes());
return description;
}
/**
* the timestamps are now used to calculate the number of imputed tuples in the
* shingle
*
* @param timestamp the timestamp of the current input
*/
@Override
protected void updateTimestamps(long timestamp) {
if (previousTimeStamps[0] == previousTimeStamps[1]) {
numberOfImputed = numberOfImputed - 1;
}
super.updateTimestamps(timestamp);
}
/**
* decides if the forest should be updated, this is needed for imputation on the
* fly. The main goal of this function is to avoid runaway sequences where a
* single input changes the forest too much. But in some cases that behavior can
* be warranted and then this function should be changed
*
* @return if the forest should be updated
*/
protected boolean updateAllowed() {
double fraction = numberOfImputed * 1.0 / (shingleSize);
if (numberOfImputed == shingleSize - 1 && previousTimeStamps[0] != previousTimeStamps[1]
&& (transformMethod == DIFFERENCE || transformMethod == NORMALIZE_DIFFERENCE)) {
// this shingle is disconnected from the previously seen values
// these transformations will have little meaning
// positions 0 and 1 corresponds to the oldest in the shingle -- if we admit
// that case
// then we would admit a shingle where impact of the most recent observation is
// shingleSize - 1
// and the oldest one is 1. It seemed conservative to not allow that --
// primarily to stop a
// "runaway" effect where a single value (and its imputations affect
// everything).
// A gap at positions 1 and 2 would correspond to a shingleSize - 2 and 2 (or
// two different points).
return false;
}
dataQuality[0].update(1 - fraction);
return (fraction < useImputedFraction && internalTimeStamp >= shingleSize);
}
/**
* the following function mutates the forest, the lastShingledPoint,
* lastShingledInput as well as previousTimeStamps, and adds the shingled input
* to the forest (provided it is allowed by the number of imputes and the
* transformation function)
*
* @param input the input point (can be imputed)
* @param timestamp the input timestamp (will be the most recent timestamp for
* imputes)
* @param forest the resident RCF
* @param isImputed is the current input imputed
*/
void updateForest(boolean changeForest, double[] input, long timestamp, RandomCutForest forest, boolean isImputed) {
double[] scaledInput = transformer.transformValues(internalTimeStamp, input, getShingledInput(shingleSize - 1),
null, clipFactor);
updateShingle(input, scaledInput);
updateTimestamps(timestamp);
if (isImputed) {
numberOfImputed = numberOfImputed + 1;
}
if (changeForest && updateAllowed()) {
forest.update(lastShingledPoint);
}
}
/**
* The postprocessing now has to handle imputation while changing the state;
* note that the imputation is repeated to avoid storing potentially large
* number of transient shingles (which would not be admitted to the forest
* unless there is at least one actual value in the shingle)
*
* @param result the descriptor of the evaluation on the current
* point
* @param lastAnomalyDescriptor the descriptor of the last known anomaly
* @param forest the resident RCF
* @return the description with the explanation added and state updated
*/
@Override
public AnomalyDescriptor postProcess(AnomalyDescriptor result, RCFComputeDescriptor lastAnomalyDescriptor,
RandomCutForest forest) {
if (valuesSeen == startNormalization - 1) {
dischargeInitial(forest);
}
double[] point = result.getRCFPoint();
if (point != null) {
if (result.getAnomalyGrade() > 0 && (numberOfImputed == 0 || (result.getTransformMethod() != DIFFERENCE)
&& (result.getTransformMethod() != NORMALIZE_DIFFERENCE))) {
// we cannot predict expected value easily if there are gaps in the shingle
// this is doubly complicated for differenced transforms (if there are any
// imputations in the shingle)
populateAnomalyDescriptorDetails(result);
}
generateShingle(result, getTimeFactor(timeStampDeviations[1]), true, forest);
}
++valuesSeen;
return result;
}
double getTimeFactor(Deviation deviation) {
double timeFactor = deviation.getMean();
double dev = deviation.getDeviation();
if (dev > 0 && dev < timeFactor / 2) {
// a correction
timeFactor -= dev * dev / (2 * timeFactor);
}
return timeFactor;
}
/**
* a block which is executed once. It first computes the multipliers for
* normalization and then processes each of the stored inputs
*/
protected void dischargeInitial(RandomCutForest forest) {
Deviation tempTimeDeviation = new Deviation();
for (int i = 0; i < initialTimeStamps.length - 1; i++) {
tempTimeDeviation.update(initialTimeStamps[i + 1] - initialTimeStamps[i]);
}
double timeFactor = getTimeFactor(tempTimeDeviation);
prepareInitialInput();
Deviation[] deviations = getDeviations();
Arrays.fill(previousTimeStamps, initialTimeStamps[0]);
numberOfImputed = shingleSize;
for (int i = 0; i < valuesSeen + 1; i++) {
// initial imputation; not using the global dependency
long lastInputTimeStamp = previousTimeStamps[shingleSize - 1];
if (internalTimeStamp > 0) {
double[] previous = new double[inputLength];
System.arraycopy(lastShingledInput, lastShingledInput.length - inputLength, previous, 0, inputLength);
int numberToImpute = determineGap(initialTimeStamps[i] - lastInputTimeStamp, timeFactor) - 1;
if (numberToImpute > 0) {
double step = 1.0 / (numberToImpute + 1);
// the last impute corresponds to the current observed value
for (int j = 0; j < numberToImpute; j++) {
double[] result = basicImpute(step * (j + 1), previous, initialValues[i], DEFAULT_INITIAL);
double[] scaledInput = transformer.transformValues(internalTimeStamp, result,
getShingledInput(shingleSize - 1), deviations, clipFactor);
updateShingle(result, scaledInput);
updateTimestamps(initialTimeStamps[i]);
numberOfImputed = numberOfImputed + 1;
if (updateAllowed()) {
forest.update(lastShingledPoint);
}
}
}
}
double[] scaledInput = transformer.transformValues(internalTimeStamp, initialValues[i],
getShingledInput(shingleSize - 1), deviations, clipFactor);
updateState(initialValues[i], scaledInput, initialTimeStamps[i], lastInputTimeStamp);
if (updateAllowed()) {
forest.update(lastShingledPoint);
}
}
initialTimeStamps = null;
initialValues = null;
}
/**
* determines the gap between the last known timestamp and the current timestamp
*
* @param timestampGap current gap
* @param averageGap the average gap (often determined by
* timeStampDeviation.getMean()
* @return the number of positions till timestamp
*/
protected int determineGap(long timestampGap, double averageGap) {
if (internalTimeStamp <= 1) {
return 1;
} else {
double gap = timestampGap / averageGap;
return (gap >= 1.5) ? (int) Math.ceil(gap) : 1;
}
}
/**
* a single function that constructs the next shingle, with the option of
* committing them to the forest However the shingle needs to be generated
* before we process a point; and can only be committed once the point has been
* scored. Having the same deterministic transformation is essential
*
* @param descriptor description of the current point
* @param averageGap the gap in timestamps
* @param changeForest boolean determining if we commit to the forest or not
* @param forest the resident RCF
* @return the next shingle
*/
protected double[] generateShingle(AnomalyDescriptor descriptor, double averageGap, boolean changeForest,
RandomCutForest forest) {
double[] input = descriptor.getCurrentInput();
long timestamp = descriptor.getInputTimestamp();
long lastInputTimeStamp = previousTimeStamps[shingleSize - 1];
int[] missingValues = descriptor.getMissingValues();
checkArgument(missingValues == null || (imputationMethod != LINEAR && imputationMethod != NEXT),
" cannot perform imputation on most recent missing value with this method");
/*
* Note only ZERO, FIXED_VALUES, PREVIOUS and RCF are reasonable options if
* missing values are present.
*/
checkArgument(internalTimeStamp > 0, "imputation should have forced normalization");
double[] savedInput = getShingledInput(shingleSize - 1);
// previous value should be defined
double[] previous = new double[inputLength];
System.arraycopy(lastShingledInput, lastShingledInput.length - inputLength, previous, 0, inputLength);
// using the global dependency
int numberToImpute = determineGap(timestamp - lastInputTimeStamp, averageGap) - 1;
if (numberToImpute > 0) {
descriptor.setNumberOfNewImputes(numberToImpute);
double step = 1.0 / (numberToImpute + 1);
// the last impute corresponds to the current observed value
for (int i = 0; i < numberToImpute; i++) {
double[] result = impute(false, descriptor, step * (i + 1), previous, forest);
updateForest(changeForest, result, timestamp, forest, true);
}
}
double[] newInput = (missingValues == null) ? input : impute(true, descriptor, 0, previous, forest);
updateForest(changeForest, newInput, timestamp, forest, false);
if (changeForest) {
updateTimeStampDeviations(timestamp, lastInputTimeStamp);
transformer.updateDeviation(newInput, savedInput);
}
return Arrays.copyOf(lastShingledPoint, lastShingledPoint.length);
}
/**
* The impute step which predicts the completion of a partial input or predicts
* the entire input for that timestamp
*
* @param isPartial a boolean indicating if the input is partial
* @param descriptor the state of the current evaluation (missing values
* cannot be null for partial tuples)
* @param stepFraction the (time) position of the point to impute (can also be
* the final possibly incomplete point)
* @param previous the last input
* @param forest the RCF
* @return the imputed tuple for the time position
*/
protected double[] impute(boolean isPartial, AnomalyDescriptor descriptor, double stepFraction, double[] previous,
RandomCutForest forest) {
double[] input = descriptor.getCurrentInput();
int[] missingValues = descriptor.getMissingValues();
// we will pass partial input, which would be true for only one tuple
double[] partialInput = (isPartial) ? Arrays.copyOf(input, inputLength) : null;
// use a default for RCF if trees are unusable, as reflected in the
// isReasonableForecast()
ImputationMethod method = descriptor.getImputationMethod();
if (method == RCF) {
if (descriptor.isReasonableForecast()) {
return imputeRCF(forest, partialInput, missingValues);
} else {
return basicImpute(stepFraction, previous, partialInput, DEFAULT_DYNAMIC);
}
} else {
return basicImpute(stepFraction, previous, input, method);
}
}
/**
* a basic function that performs a single step imputation in the input space
* the function has to be deterministic since it is run twice, first at scoring
* and then at committing to the RCF
*
* @param stepFraction the interpolation fraction
* @param previous the previous input point
* @param input the current input point
* @param method the imputation method of choice
* @return the imputed/interpolated result
*/
protected double[] basicImpute(double stepFraction, double[] previous, double[] input, ImputationMethod method) {
double[] result = new double[inputLength];
if (method == FIXED_VALUES) {
System.arraycopy(defaultFill, 0, result, 0, inputLength);
} else if (method == LINEAR) {
for (int z = 0; z < inputLength; z++) {
result[z] = previous[z] + stepFraction * (input[z] - previous[z]);
}
} else if (method == PREVIOUS) {
System.arraycopy(previous, 0, result, 0, inputLength);
} else if (method == NEXT) {
System.arraycopy(input, 0, result, 0, inputLength);
}
return result;
}
/**
* Uses RCF to impute the missing values in the current input or impute the
* entire set of values for that time step (based on partial input being null)
*
* @param forest the RCF
* @param partialInput the information available about the most recent point
* @param missingValues the array indicating missing values for the partial
* input
* @return the potential completion of the partial tuple or the predicted
* current value
*/
protected double[] imputeRCF(RandomCutForest forest, double[] partialInput, int[] missingValues) {
double[] temp = Arrays.copyOf(lastShingledPoint, lastShingledPoint.length);
shiftLeft(temp, inputLength);
int startPosition = inputLength * (shingleSize - 1);
int[] missingIndices;
if (missingValues == null) {
missingIndices = new int[inputLength];
for (int i = 0; i < inputLength; i++) {
missingIndices[i] = startPosition + i;
}
} else {
checkArgument(partialInput != null, "incorrect input");
missingIndices = Arrays.copyOf(missingValues, missingValues.length);
double[] scaledInput = transformer.transformValues(internalTimeStamp, partialInput,
getShingledInput(shingleSize - 1), null, clipFactor);
copyAtEnd(temp, scaledInput);
}
double[] newPoint = forest.imputeMissingValues(temp, missingIndices.length, missingIndices);
return invert(inputLength, startPosition, 0, newPoint);
}
}
| 610 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor/InitialSegmentPreprocessor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.preprocessor;
import static com.amazon.randomcutforest.parkservices.preprocessor.transform.WeightedTransformer.NUMBER_OF_STATS;
import java.util.Arrays;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.RCFComputeDescriptor;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
@Getter
@Setter
public class InitialSegmentPreprocessor extends Preprocessor {
public InitialSegmentPreprocessor(Builder<?> builder) {
super(builder);
initialValues = new double[startNormalization][];
initialTimeStamps = new long[startNormalization];
}
/**
* a modified preprocessing block which buffers the initial number of points
* (startNormalization) and then switches to streaming transformation
*
* @param description the description of the input point
* @param lastAnomalyDescriptor the descriptor of the last anomaly
* @param forest RCF
* @return an AnomalyDescriptor object to be used in anomaly detection
*/
@Override
public AnomalyDescriptor preProcess(AnomalyDescriptor description, RCFComputeDescriptor lastAnomalyDescriptor,
RandomCutForest forest) {
if (valuesSeen < startNormalization) {
initialSetup(description, lastAnomalyDescriptor, forest);
storeInitial(description.getCurrentInput(), description.getInputTimestamp());
return description;
}
return super.preProcess(description, lastAnomalyDescriptor, forest);
}
// same for post process
@Override
public AnomalyDescriptor postProcess(AnomalyDescriptor description, RCFComputeDescriptor lastAnomalyDescriptor,
RandomCutForest forest) {
AnomalyDescriptor answer = super.postProcess(description, lastAnomalyDescriptor, forest);
if (valuesSeen == startNormalization) {
dischargeInitial(forest);
answer.setPostDeviations(getSmoothedDeviations());
}
return answer;
}
/**
* stores initial data for normalization
*
* @param inputPoint input data
* @param timestamp timestamp
*/
protected void storeInitial(double[] inputPoint, long timestamp) {
initialTimeStamps[valuesSeen] = timestamp;
initialValues[valuesSeen] = Arrays.copyOf(inputPoint, inputPoint.length);
++valuesSeen;
}
// computes the normalization statistics
protected Deviation[] getDeviations() {
if (requireInitialSegment(normalizeTime, transformMethod, mode)) {
Deviation[] tempList = new Deviation[NUMBER_OF_STATS * inputLength];
for (int j = 0; j < NUMBER_OF_STATS * inputLength; j++) {
tempList[j] = new Deviation(transformDecay);
}
for (int i = 0; i < initialValues.length; i++) {
for (int j = 0; j < inputLength; j++) {
tempList[j].update(initialValues[i][j]);
double value = (i == 0) ? 0 : initialValues[i][j] - initialValues[i - 1][j];
tempList[j + inputLength].update(value);
}
}
for (int i = 0; i < initialValues.length; i++) {
for (int j = 0; j < inputLength; j++) {
tempList[j + 2 * inputLength].update(tempList[j].getDeviation());
tempList[j + 3 * inputLength].update(tempList[j + inputLength].getMean());
tempList[j + 4 * inputLength].update(tempList[j + inputLength].getDeviation());
}
}
return tempList;
}
return null;
}
/**
* a block which executes once; it first computes the multipliers for
* normalization and then processes each of the stored inputs
*/
protected void dischargeInitial(RandomCutForest forest) {
Deviation tempTimeDeviation = new Deviation();
for (int i = 0; i < initialTimeStamps.length - 1; i++) {
tempTimeDeviation.update(initialTimeStamps[i + 1] - initialTimeStamps[i]);
}
// should agree with getTimeScale()
double timeFactor = 1.0 + tempTimeDeviation.getDeviation();
Deviation[] deviations = getDeviations();
for (int i = 0; i < valuesSeen; i++) {
double[] scaledInput = getScaledInput(initialValues[i], initialTimeStamps[i], deviations, timeFactor);
updateState(initialValues[i], scaledInput, initialTimeStamps[i], previousTimeStamps[shingleSize - 1]);
dataQuality[0].update(1.0);
forest.update(scaledInput);
}
initialTimeStamps = null;
initialValues = null;
}
}
| 611 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor/IPreprocessor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.preprocessor;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.RCFComputeDescriptor;
import com.amazon.randomcutforest.parkservices.returntypes.TimedRangeVector;
import com.amazon.randomcutforest.returntypes.RangeVector;
public interface IPreprocessor {
int getShingleSize();
int getInputLength();
double[] getLastShingledPoint();
double[] getShift();
double[] getScale();
int getInternalTimeStamp();
TimedRangeVector invertForecastRange(RangeVector ranges, RCFComputeDescriptor lastRelevant);
<P extends AnomalyDescriptor> P preProcess(P current, RCFComputeDescriptor lastRelevant, RandomCutForest forest);
<P extends AnomalyDescriptor> P postProcess(P current, RCFComputeDescriptor lastRelevant, RandomCutForest forest);
}
| 612 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor/Preprocessor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.preprocessor;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.toDoubleArray;
import static com.amazon.randomcutforest.CommonUtils.toFloatArray;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SHINGLE_SIZE;
import static com.amazon.randomcutforest.config.ImputationMethod.FIXED_VALUES;
import static com.amazon.randomcutforest.config.ImputationMethod.PREVIOUS;
import static com.amazon.randomcutforest.parkservices.preprocessor.transform.WeightedTransformer.NUMBER_OF_STATS;
import static java.lang.Math.exp;
import static java.lang.Math.max;
import static java.lang.Math.min;
import java.util.Arrays;
import java.util.Optional;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.ImputationMethod;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.RCFComputeDescriptor;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.amazon.randomcutforest.parkservices.preprocessor.transform.DifferenceTransformer;
import com.amazon.randomcutforest.parkservices.preprocessor.transform.ITransformer;
import com.amazon.randomcutforest.parkservices.preprocessor.transform.NormalizedDifferenceTransformer;
import com.amazon.randomcutforest.parkservices.preprocessor.transform.NormalizedTransformer;
import com.amazon.randomcutforest.parkservices.preprocessor.transform.SubtractMATransformer;
import com.amazon.randomcutforest.parkservices.preprocessor.transform.WeightedTransformer;
import com.amazon.randomcutforest.parkservices.returntypes.TimedRangeVector;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
import com.amazon.randomcutforest.returntypes.DiVector;
import com.amazon.randomcutforest.returntypes.RangeVector;
@Getter
@Setter
public class Preprocessor implements IPreprocessor {
public static double NORMALIZATION_SCALING_FACTOR = 2.0;
// in case of normalization, uses this constant in denominator to ensure
// smoothness near 0
public static double DEFAULT_NORMALIZATION_PRECISION = 1e-3;
// the number of points to buffer before starting to normalize/gather statistic
public static int DEFAULT_START_NORMALIZATION = 10;
// the number at which to stop normalization -- it may not e easy to imagine why
// this is required
// but this is comforting to those interested in "stopping" a model from
// learning continuously
public static int DEFAULT_STOP_NORMALIZATION = Integer.MAX_VALUE;
// in case of normalization the deviations beyond 10 Sigma are likely measure 0
// events
public static int DEFAULT_CLIP_NORMALIZATION = 100;
// normalization is not turned on by default
public static boolean DEFAULT_NORMALIZATION = false;
// differencing is not turned on by default
// for some smooth predictable data differencing is helpful, but have unintended
// consequences
public static boolean DEFAULT_DIFFERENCING = false;
// the fraction of data points that can be imputed in a shingle before the
// shingle is admitted in a forest
public static double DEFAULT_USE_IMPUTED_FRACTION = 0.5;
// minimum number of observations before using a model to predict any expected
// behavior -- if we can score, we should predict
public static int MINIMUM_OBSERVATIONS_FOR_EXPECTED = 100;
public static int DEFAULT_DATA_QUALITY_STATES = 1;
// the input corresponds to timestamp data and this statistic helps align input
protected Deviation[] timeStampDeviations;
// normalize time difference;
protected boolean normalizeTime;
protected double weightTime;
protected double transformDecay;
// recording the last seen timestamp
protected long[] previousTimeStamps;
// this parameter is used as a clock if imputing missing values in the input
// this is different from valuesSeen in STREAMING_IMPUTE
protected int internalTimeStamp = 0;
// initial values used for normalization
protected double[][] initialValues;
protected long[] initialTimeStamps;
// initial values after which to start normalization
protected int startNormalization;
// sequence number to stop normalization at
protected Integer stopNormalization;
// a number indicating the actual values seen (not imputed)
protected int valuesSeen = 0;
// to use a set of default values for imputation
protected double[] defaultFill;
// fraction of data that should be actual input before they are added to RCF
protected double useImputedFraction = DEFAULT_USE_IMPUTED_FRACTION;
// number of imputed values in stored shingle
protected int numberOfImputed;
// particular strategy for impute
protected ImputationMethod imputationMethod = PREVIOUS;
// used in normalization
protected double clipFactor = DEFAULT_CLIP_NORMALIZATION;
// last shingled values (without normalization/change or augmentation by time)
protected double[] lastShingledInput;
// last point
protected double[] lastShingledPoint;
// method used to transform data in the preprocessor
protected TransformMethod transformMethod;
// shingle size in the forest
protected int shingleSize;
// actual dimension of the forest
protected int dimension;
// length of input to be seen, may depend on internal/external shingling
protected int inputLength;
// the mode of the forest used in this preprocessing
protected ForestMode mode;
// measures the data quality in imputed modes
protected Deviation[] dataQuality;
protected ITransformer transformer;
public Preprocessor(Builder<?> builder) {
checkArgument(builder.transformMethod != null, "transform required");
checkArgument(builder.forestMode != null, " forest mode is required");
checkArgument(builder.inputLength > 0, "incorrect input length");
checkArgument(builder.shingleSize > 0, "incorrect shingle size");
checkArgument(builder.dimensions > 0, "incorrect dimensions");
checkArgument(builder.shingleSize == 1 || builder.dimensions % builder.shingleSize == 0,
" shingle size should divide the dimensions");
checkArgument(builder.forestMode == ForestMode.TIME_AUGMENTED || builder.inputLength == builder.dimensions
|| builder.inputLength * builder.shingleSize == builder.dimensions, "incorrect input size");
checkArgument(
builder.forestMode != ForestMode.TIME_AUGMENTED
|| (builder.inputLength + 1) * builder.shingleSize == builder.dimensions,
"incorrect input size");
checkArgument(builder.startNormalization <= builder.stopNormalization, "incorrect normalization parameters");
checkArgument(builder.startNormalization > 0 || !builder.normalizeTime, " start of normalization cannot be 0");
checkArgument(
builder.startNormalization > 0 || !(builder.transformMethod == TransformMethod.NORMALIZE
|| builder.transformMethod == TransformMethod.NORMALIZE_DIFFERENCE),
" start of normalization cannot be 0 for these transformations");
checkArgument(builder.weights == null || builder.weights.length >= builder.inputLength, " incorrect weights");
inputLength = builder.inputLength;
dimension = builder.dimensions;
shingleSize = builder.shingleSize;
mode = builder.forestMode;
lastShingledPoint = new double[dimension];
this.transformMethod = builder.transformMethod;
this.startNormalization = builder.startNormalization;
this.stopNormalization = builder.stopNormalization;
this.normalizeTime = builder.normalizeTime;
double[] weights = new double[inputLength];
Arrays.fill(weights, 1.0);
if (builder.weights != null) {
if (builder.weights.length == inputLength) {
System.arraycopy(builder.weights, 0, weights, 0, inputLength);
weightTime = builder.weightTime;
} else {
System.arraycopy(builder.weights, 0, weights, 0, inputLength);
weightTime = builder.weights[inputLength];
}
} else {
weightTime = builder.weightTime;
}
previousTimeStamps = new long[shingleSize];
if (inputLength == dimension) {
lastShingledInput = new double[dimension];
} else {
lastShingledInput = new double[shingleSize * inputLength];
}
transformDecay = builder.timeDecay;
dataQuality = builder.dataQuality.orElse(new Deviation[] { new Deviation(transformDecay) });
Deviation[] deviationList = new Deviation[NUMBER_OF_STATS * inputLength];
manageDeviations(deviationList, builder.deviations, transformDecay);
timeStampDeviations = new Deviation[NUMBER_OF_STATS];
manageDeviations(timeStampDeviations, builder.timeDeviations, transformDecay);
if (transformMethod == TransformMethod.NONE) {
for (int i = 0; i < inputLength; i++) {
checkArgument(weights[i] == 1.0, "incorrect weights");
}
transformer = new WeightedTransformer(weights, deviationList);
} else if (transformMethod == TransformMethod.WEIGHTED) {
transformer = new WeightedTransformer(weights, deviationList);
} else if (transformMethod == TransformMethod.DIFFERENCE) {
transformer = new DifferenceTransformer(weights, deviationList);
} else if (transformMethod == TransformMethod.SUBTRACT_MA) {
transformer = new SubtractMATransformer(weights, deviationList);
} else if (transformMethod == TransformMethod.NORMALIZE) {
transformer = new NormalizedTransformer(weights, deviationList);
} else {
transformer = new NormalizedDifferenceTransformer(weights, deviationList);
}
if (mode == ForestMode.STREAMING_IMPUTE) {
imputationMethod = builder.imputationMethod;
normalizeTime = true;
if (imputationMethod == FIXED_VALUES) {
int baseDimension = builder.dimensions / builder.shingleSize;
// shingling will be performed in this layer and not in forest
// so that we control admittance of imputed shingles
checkArgument(builder.fillValues != null && builder.fillValues.length == baseDimension,
" the number of values should match the shingled input");
this.defaultFill = Arrays.copyOf(builder.fillValues, builder.fillValues.length);
}
this.useImputedFraction = builder.useImputedFraction.orElse(0.5);
}
}
// the following fills the first argument as copies of the original
// but if the original is null or otherwise then new deviations are created; the
// last third
// are filled with 0.1 * transformDecay and are reserved for smoothing
void manageDeviations(Deviation[] deviationList, Optional<Deviation[]> original, double timeDecay) {
checkArgument(deviationList.length % NUMBER_OF_STATS == 0, " has to be a multiple of five");
int usedDeviations = 0;
if (original.isPresent()) {
Deviation[] list = original.get();
usedDeviations = min(list.length, deviationList.length);
// note the lengths can be different based on a different version of the model
// we will convert the model; and rely on RCF's ability to adjust to new data
for (int i = 0; i < usedDeviations; i++) {
deviationList[i] = list[i].copy();
}
}
for (int i = usedDeviations; i < deviationList.length - 2 * deviationList.length / 5; i++) {
deviationList[i] = new Deviation(timeDecay);
}
usedDeviations = max(usedDeviations, deviationList.length - 2 * deviationList.length / 5);
for (int i = usedDeviations; i < deviationList.length; i++) {
deviationList[i] = new Deviation(0.1 * timeDecay);
}
}
/**
* decides if normalization is required, and then is used to store and discharge
* an initial segment
*
* @return a boolean indicating th need to store initial values
*/
public static boolean requireInitialSegment(boolean normalizeTime, TransformMethod transformMethod,
ForestMode mode) {
return (normalizeTime || transformMethod == TransformMethod.NORMALIZE
|| transformMethod == TransformMethod.NORMALIZE_DIFFERENCE)
|| transformMethod == TransformMethod.SUBTRACT_MA;
}
/**
* sets up the AnomalyDescriptor object
*
* @param description description of the input point
* @param lastAnomalyDescriptor the descriptor of the last anomaly
* @param forest the RCF
* @return the descriptor to be used for anomaly scoring
*/
<P extends AnomalyDescriptor> P initialSetup(P description, RCFComputeDescriptor lastAnomalyDescriptor,
RandomCutForest forest) {
description.setForestMode(mode);
description.setTransformMethod(transformMethod);
description.setImputationMethod(imputationMethod);
description.setNumberOfTrees(forest.getNumberOfTrees());
description.setTotalUpdates(forest.getTotalUpdates());
description.setLastAnomalyInternalTimestamp(lastAnomalyDescriptor.getInternalTimeStamp());
description.setLastExpectedRCFPoint(lastAnomalyDescriptor.getExpectedRCFPoint());
description.setDataConfidence(forest.getTimeDecay(), valuesSeen, forest.getOutputAfter(),
dataQuality[0].getMean());
description.setShingleSize(shingleSize);
description.setInputLength(inputLength);
description.setDimension(dimension);
// the adjustments ensure that external and internal shingling always follow the
// same path note that the preprocessor performs the shingling for
// STREAMING_IMPUTE
long adjustedInternal = internalTimeStamp + (forest.isInternalShinglingEnabled() ? 0 : shingleSize - 1);
int dataDimension = forest.isInternalShinglingEnabled() || mode == ForestMode.STREAMING_IMPUTE
? inputLength * shingleSize
: inputLength;
description
.setReasonableForecast((adjustedInternal > MINIMUM_OBSERVATIONS_FOR_EXPECTED) && (dataDimension >= 4));
description.setScale(getScale());
description.setShift(getShift());
description.setDeviations(getSmoothedDeviations());
return description;
}
/**
* a generic preprocessing invoked by ThresholdedRandomCutForest
*
* @param description description of the input point (so far)
* @param lastAnomalyDescriptor the descriptor of the last anomaly
* @param forest RCF
* @return the initialized AnomalyDescriptor with the actual RCF point filled in
* (could be a result of multiple transformations)
*/
public <P extends AnomalyDescriptor> P preProcess(P description, RCFComputeDescriptor lastAnomalyDescriptor,
RandomCutForest forest) {
initialSetup(description, lastAnomalyDescriptor, forest);
double[] inputPoint = description.getCurrentInput();
long timestamp = description.getInputTimestamp();
double[] scaledInput = getScaledInput(inputPoint, timestamp, null, getTimeShift());
if (scaledInput == null) {
return description;
}
double[] point;
if (forest.isInternalShinglingEnabled()) {
point = toDoubleArray(forest.transformToShingledPoint(toFloatArray(scaledInput)));
} else {
int dimension = forest.getDimensions();
if (scaledInput.length == dimension) {
point = scaledInput;
} else {
point = new double[dimension];
System.arraycopy(getLastShingledPoint(), scaledInput.length, point, 0, dimension - scaledInput.length);
System.arraycopy(scaledInput, 0, point, dimension - scaledInput.length, scaledInput.length);
}
}
if (description.getMissingValues() != null) {
int[] missing = new int[description.getMissingValues().length];
for (int i = 0; i < missing.length; i++) {
missing[i] = description.getMissingValues()[i] + dimension - scaledInput.length + i;
}
point = forest.imputeMissingValues(point, missing.length, missing);
}
description.setRCFPoint(point);
description.setInternalTimeStamp(internalTimeStamp); // no impute
description.setNumberOfNewImputes(0);
return description;
}
public double[] getScale() {
if (mode != ForestMode.TIME_AUGMENTED) {
return transformer.getScale();
} else {
double[] scale = new double[inputLength + 1];
System.arraycopy(transformer.getScale(), 0, scale, 0, inputLength);
scale[inputLength] = (weightTime == 0) ? 0 : 1.0 / weightTime;
if (normalizeTime) {
scale[inputLength] *= NORMALIZATION_SCALING_FACTOR
* (getTimeGapDifference() + DEFAULT_NORMALIZATION_PRECISION);
}
return scale;
}
}
public double[] getShift() {
double[] previous = (inputLength == lastShingledInput.length) ? lastShingledInput
: getShingledInput(shingleSize - 1);
if (mode != ForestMode.TIME_AUGMENTED) {
return transformer.getShift(previous);
} else {
double[] shift = new double[inputLength + 1];
System.arraycopy(transformer.getShift(previous), 0, shift, 0, inputLength);
// time is always differenced
shift[inputLength] = ((normalizeTime) ? getTimeShift() : 0) + previousTimeStamps[shingleSize - 1];
return shift;
}
}
public double[] getSmoothedDeviations() {
if (mode != ForestMode.TIME_AUGMENTED) {
double[] deviations = new double[2 * inputLength];
System.arraycopy(transformer.getSmoothedDeviations(), 0, deviations, 0, inputLength);
System.arraycopy(transformer.getSmoothedDifferenceDeviations(), 0, deviations, inputLength, inputLength);
return deviations;
} else {
double[] deviations = new double[2 * inputLength + 2];
System.arraycopy(transformer.getSmoothedDeviations(), 0, deviations, 0, inputLength);
System.arraycopy(transformer.getSmoothedDifferenceDeviations(), 0, deviations, inputLength + 1,
inputLength);
// time is differenced (for now) or unchanged
deviations[inputLength + 1] = timeStampDeviations[4].getMean();
deviations[2 * inputLength + 1] = timeStampDeviations[4].getMean();
return deviations;
}
}
/**
* a generic postprocessor which updates all the state
*
* @param result the descriptor of the evaluation on the current point
* @param forest the resident RCF
* @return the descriptor (mutated and augmented appropriately)
*/
public <P extends AnomalyDescriptor> P postProcess(P result, RCFComputeDescriptor lastAnomalyDescriptor,
RandomCutForest forest) {
double[] point = result.getRCFPoint();
if (point == null) {
return result;
}
if (result.getAnomalyGrade() > 0) {
populateAnomalyDescriptorDetails(result);
}
double[] inputPoint = result.getCurrentInput();
long timestamp = result.getInputTimestamp();
updateState(inputPoint, point, timestamp, previousTimeStamps[shingleSize - 1]);
++valuesSeen;
dataQuality[0].update(1.0);
if (forest.isInternalShinglingEnabled()) {
int length = inputLength + ((mode == ForestMode.TIME_AUGMENTED) ? 1 : 0);
double[] scaledInput = new double[length];
System.arraycopy(point, point.length - length, scaledInput, 0, length);
forest.update(scaledInput);
} else {
forest.update(point);
}
if (result.getAnomalyGrade() > 0) {
double[] postShift = getShift();
result.setPostShift(postShift);
result.setTransformDecay(transformDecay);
}
// after the insertions
result.setPostDeviations(getSmoothedDeviations());
return result;
}
/**
* adds information of expected point to the result descriptor (provided it is
* marked anomalous) Note that is uses relativeIndex; that is, it can determine
* that the anomaly occurred in the past (but within the shingle) and not at the
* current point -- even though the detection has triggered now While this may
* appear to be improper, information theoretically we may have a situation
* where an anomaly is only discoverable after the "horse has bolted" -- suppose
* that we see a random mixture of the triples { 1, 2, 3} and {2, 4, 5}
* corresponding to "slow weeks" and "busy weeks". For example 1, 2, 3, 1, 2, 3,
* 2, 4, 5, 1, 2, 3, 2, 4, 5, ... etc. If we see { 2, 2, X } (at positions 0 and
* 1 (mod 3)) and are yet to see X, then we can infer that the pattern is
* anomalous -- but we cannot determine which of the 2's are to blame. If it
* were the first 2, then the detection is late. If X = 3 then we know it is the
* first 2 in that unfinished triple; and if X = 5 then it is the second 2. In a
* sense we are only truly wiser once the bolted horse has returned! But if we
* were to say that the anomaly was always at the second 2 then that appears to
* be suboptimal -- one natural path can be based on the ratio of the triples {
* 1, 2, 3} and {2, 4, 5} seen before. Even better, we can attempt to estimate a
* dynamic time dependent ratio -- and that is what RCF would do.
*
* @param result the description of the current point
*/
protected void populateAnomalyDescriptorDetails(AnomalyDescriptor result) {
int base = dimension / shingleSize;
double[] reference = result.getCurrentInput();
double[] point = result.getRCFPoint();
double[] newPoint = result.getExpectedRCFPoint();
int index = result.getRelativeIndex();
if (index < 0) {
reference = getShingledInput(shingleSize + index);
result.setPastTimeStamp(getTimeStamp(shingleSize + index));
}
result.setPastValues(reference);
if (newPoint != null) {
if (mode == ForestMode.TIME_AUGMENTED) {
int endPosition = (shingleSize - 1 + index + 1) * dimension / shingleSize;
double timeGap = (newPoint[endPosition - 1] - point[endPosition - 1]);
long expectedTimestamp = (timeGap == 0) ? getTimeStamp(shingleSize - 1 + index)
: inverseMapTime(timeGap, index);
result.setExpectedTimeStamp(expectedTimestamp);
}
double[] values = getExpectedValue(index, reference, point, newPoint);
result.setExpectedValues(0, values, 1.0);
}
int startPosition = (shingleSize - 1 + result.getRelativeIndex()) * base;
DiVector attribution = result.getAttribution();
if (mode == ForestMode.TIME_AUGMENTED) {
--base;
}
double[] flattenedAttribution = new double[base];
for (int i = 0; i < base; i++) {
flattenedAttribution[i] = attribution.getHighLowSum(startPosition + i);
}
result.setRelevantAttribution(flattenedAttribution);
if (mode == ForestMode.TIME_AUGMENTED) {
result.setTimeAttribution(attribution.getHighLowSum(startPosition + base));
}
}
/**
* maps the time back. The returned value is an approximation for
* relativePosition less than 0 which corresponds to an anomaly in the past.
* Since the state of the statistic is now changed based on more recent values
*
* @param gap estimated value
* @param relativePosition how far back in the shingle
* @return transform of the time value to original input space
*/
protected long inverseMapTime(double gap, int relativePosition) {
// note this corresponds to differencing being always on
checkArgument(shingleSize + relativePosition >= 0, " error");
return inverseMapTimeValue(gap, previousTimeStamps[shingleSize - 1 + relativePosition]);
}
// same as inverseMapTime, using explicit value also useful in forecast
protected long inverseMapTimeValue(double gap, long timestamp) {
double factor = (weightTime == 0) ? 0 : 1.0 / weightTime;
if (factor == 0) {
return 0;
}
if (normalizeTime) {
return (long) Math
.round(timestamp + getTimeShift() + NORMALIZATION_SCALING_FACTOR * gap * getTimeScale() * factor);
} else {
return (long) Math.round(gap * factor + timestamp);
}
}
/**
* returns the input values corresponding to a position in the shingle; this is
* needed in the corrector steps; and avoids the need for replicating this
* information downstream
*
* @param index position in the shingle
* @return the input values for those positions in the shingle
*/
public double[] getShingledInput(int index) {
int base = lastShingledInput.length / shingleSize;
double[] values = new double[base];
System.arraycopy(lastShingledInput, index * base, values, 0, base);
return values;
}
/**
* produces the expected value given location of the anomaly -- being aware that
* the nearest anomaly may be behind us in time.
*
* @param relativeBlockIndex the relative index of the anomaly
* @param reference the reference input (so that we do not generate
* arbitrary rounding errors of transformations which
* can be indistinguishable from true expected values)
* @param point the point (in the RCF shingled space)
* @param newPoint the expected point (in the RCF shingled space) --
* where only the most egregiously offending entries
* corresponding to the shingleSize - 1 +
* relativeBlockIndex are changed.
* @return the set of values (in the input space) that would have produced
* newPoint
*/
protected double[] getExpectedValue(int relativeBlockIndex, double[] reference, double[] point, double[] newPoint) {
int base = dimension / shingleSize;
int startPosition = (shingleSize - 1 + relativeBlockIndex) * base;
if (mode == ForestMode.TIME_AUGMENTED) {
--base;
}
double[] values = invert(base, startPosition, relativeBlockIndex, newPoint);
for (int i = 0; i < base; i++) {
double currentValue = (reference.length == base) ? reference[i] : reference[startPosition + i];
values[i] = (point[startPosition + i] == newPoint[startPosition + i]) ? currentValue : values[i];
}
return values;
}
/**
* inverts the values to the input space from the RCF space
*
* @param base the number of baseDimensions (often inputLength,
* unless time augmented)
* @param startPosition the position in the vector to invert
* @param relativeBlockIndex the relative blockIndex (related to the position)
* @param newPoint the vector
* @return the values [startPosition, startPosition + base -1] which would
* (approximately) produce newPoint
*/
protected double[] invert(int base, int startPosition, int relativeBlockIndex, double[] newPoint) {
double[] values = new double[base];
System.arraycopy(newPoint, startPosition, values, 0, base);
return transformer.invert(values, getShingledInput(shingleSize - 1 + relativeBlockIndex));
}
/**
*
* The function inverts the forecast generated by a TRCF model; however as is
* clear from below, the inversion mandates that the different transformations
* be handled by different and separate classes. At the same time, different
* parts of TRCF refer to the same information and thus it makes sense to add
* this testable function, and refactor with the newer tests present.
*
* @param ranges the forecast with ranges
* @param lastAnomalyDescriptor description of last anomaly
* @return a timed range vector that also contains the time information
* corresponding to the forecasts; note that the time values would be 0
* for STREAMING_IMPUTE mode
*/
public TimedRangeVector invertForecastRange(RangeVector ranges, RCFComputeDescriptor lastAnomalyDescriptor) {
int baseDimension = inputLength + (mode == ForestMode.TIME_AUGMENTED ? 1 : 0);
checkArgument(ranges.values.length % baseDimension == 0, " incorrect length of ranges");
int horizon = ranges.values.length / baseDimension;
int gap = (int) (internalTimeStamp - lastAnomalyDescriptor.getInternalTimeStamp());
double[] correction = lastAnomalyDescriptor.getDeltaShift();
if (correction != null) {
double decay = max(lastAnomalyDescriptor.getTransformDecay(), 1.0 / (3 * shingleSize));
double factor = exp(-gap * decay);
for (int i = 0; i < correction.length; i++) {
correction[i] *= factor;
}
} else {
correction = new double[baseDimension];
}
long localTimeStamp = previousTimeStamps[shingleSize - 1];
TimedRangeVector timedRangeVector;
if (mode != ForestMode.TIME_AUGMENTED) {
timedRangeVector = new TimedRangeVector(ranges, horizon);
// Note that STREAMING_IMPUTE we are already using the time values
// to fill in values -- moreover such missing values can be large in number
// predicting next timestamps in the future in such a scenario would correspond
// to a joint prediction and TIME_AUGMENTED mode may be more suitable.
// therefore for STREAMING_IMPUTE the timestamps values are not predicted
if (mode != ForestMode.STREAMING_IMPUTE) {
double timeGap = getTimeDrift();
double timeBound = 1.3 * getTimeGapDifference();
for (int i = 0; i < horizon; i++) {
timedRangeVector.timeStamps[i] = inverseMapTimeValue(timeGap, localTimeStamp);
timedRangeVector.upperTimeStamps[i] = max(timedRangeVector.timeStamps[i],
inverseMapTimeValue(timeGap + timeBound, localTimeStamp));
timedRangeVector.lowerTimeStamps[i] = min(timedRangeVector.timeStamps[i],
inverseMapTimeValue(max(0, timeGap - timeBound), localTimeStamp));
localTimeStamp = timedRangeVector.timeStamps[i];
}
}
} else {
if (gap <= shingleSize && lastAnomalyDescriptor.getExpectedRCFPoint() != null && gap == 1) {
localTimeStamp = lastAnomalyDescriptor.getExpectedTimeStamp();
}
timedRangeVector = new TimedRangeVector(inputLength * horizon, horizon);
for (int i = 0; i < horizon; i++) {
for (int j = 0; j < inputLength; j++) {
timedRangeVector.rangeVector.values[i * inputLength + j] = ranges.values[i * baseDimension + j];
timedRangeVector.rangeVector.upper[i * inputLength + j] = ranges.upper[i * baseDimension + j];
timedRangeVector.rangeVector.lower[i * inputLength + j] = ranges.lower[i * baseDimension + j];
}
timedRangeVector.timeStamps[i] = inverseMapTimeValue(
max(ranges.values[i * baseDimension + inputLength], 0), localTimeStamp);
timedRangeVector.upperTimeStamps[i] = max(timedRangeVector.timeStamps[i],
inverseMapTimeValue(max(ranges.upper[i * baseDimension + inputLength], 0), localTimeStamp));
timedRangeVector.lowerTimeStamps[i] = min(timedRangeVector.timeStamps[i],
inverseMapTimeValue(max(ranges.lower[i * baseDimension + inputLength], 0), localTimeStamp));
localTimeStamp = timedRangeVector.upperTimeStamps[i];
}
}
// the following is the post-anomaly transformation, can be impacted by
// anomalies
transformer.invertForecastRange(timedRangeVector.rangeVector, inputLength, getShingledInput(shingleSize - 1),
correction);
return timedRangeVector;
}
/**
* given an input produces a scaled transform to be used in the forest
*
* @param input the actual input seen
* @param timestamp timestamp of said input
* @param defaults default statistics, potentially used in
* initialization
* @param defaultTimeFactor default time statistic
* @return a scaled/transformed input which can be used in the forest
*/
protected double[] getScaledInput(double[] input, long timestamp, Deviation[] defaults, double defaultTimeFactor) {
double[] previous = (input.length == lastShingledInput.length) ? lastShingledInput
: getShingledInput(shingleSize - 1);
double[] scaledInput = transformer.transformValues(internalTimeStamp, input, previous, defaults, clipFactor);
if (mode == ForestMode.TIME_AUGMENTED) {
scaledInput = augmentTime(scaledInput, timestamp, defaultTimeFactor);
}
return scaledInput;
}
/**
* updates the various shingles
*
* @param inputPoint the input point
* @param scaledPoint the scaled/transformed point which is used in the RCF
*/
protected void updateShingle(double[] inputPoint, double[] scaledPoint) {
if (inputPoint.length == lastShingledInput.length) {
lastShingledInput = Arrays.copyOf(inputPoint, inputPoint.length);
} else {
shiftLeft(lastShingledInput, inputPoint.length);
copyAtEnd(lastShingledInput, inputPoint);
}
if (scaledPoint.length == lastShingledPoint.length) {
lastShingledPoint = Arrays.copyOf(scaledPoint, scaledPoint.length);
} else {
shiftLeft(lastShingledPoint, scaledPoint.length);
copyAtEnd(lastShingledPoint, scaledPoint);
}
}
/**
* updates timestamps
*
* @param timestamp the timestamp of the current input
*/
protected void updateTimestamps(long timestamp) {
for (int i = 0; i < shingleSize - 1; i++) {
previousTimeStamps[i] = previousTimeStamps[i + 1];
}
previousTimeStamps[shingleSize - 1] = timestamp;
++internalTimeStamp;
}
protected void updateTimeStampDeviations(long timestamp, long previous) {
if (timeStampDeviations != null) {
timeStampDeviations[0].update(timestamp);
timeStampDeviations[1].update(timestamp - previous);
// smoothing - not used currently
timeStampDeviations[2].update(timeStampDeviations[0].getDeviation());
timeStampDeviations[3].update(timeStampDeviations[1].getMean());
timeStampDeviations[4].update(timeStampDeviations[1].getDeviation());
}
}
double getTimeScale() {
return 1.0 + getTimeGapDifference();
}
double getTimeGapDifference() {
return Math.abs(timeStampDeviations[4].getMean());
}
double getTimeShift() {
return timeStampDeviations[1].getMean();
}
double getTimeDrift() {
return timeStampDeviations[3].getMean();
}
/**
* updates the state of the preprocessor
*
* @param inputPoint the actual input
* @param scaledInput the transformed input
* @param timestamp the timestamp of the input
* @param previous the previous timestamp
*/
protected void updateState(double[] inputPoint, double[] scaledInput, long timestamp, long previous) {
updateTimeStampDeviations(timestamp, previous);
updateTimestamps(timestamp);
double[] previousInput = (inputLength == lastShingledInput.length) ? lastShingledInput
: getShingledInput(shingleSize - 1);
transformer.updateDeviation(inputPoint, previousInput);
updateShingle(inputPoint, scaledInput);
}
/**
* copies at the end for a shingle
*
* @param array shingled array
* @param small new small array
*/
protected void copyAtEnd(double[] array, double[] small) {
checkArgument(array.length > small.length, " incorrect operation ");
System.arraycopy(small, 0, array, array.length - small.length, small.length);
}
// a utility function
protected double[] copyIfNotnull(double[] array) {
return array == null ? null : Arrays.copyOf(array, array.length);
}
// left shifting used for the shingles
protected void shiftLeft(double[] array, int baseDimension) {
for (int i = 0; i < array.length - baseDimension; i++) {
array[i] = array[i + baseDimension];
}
}
/**
* maps a value shifted to the current mean or to a relative space for time
*
* @return the normalized value
*/
protected double normalize(double value, double factor) {
double currentFactor = (factor != 0) ? factor : getTimeScale();
if (value - getTimeShift() >= NORMALIZATION_SCALING_FACTOR * clipFactor
* (currentFactor + DEFAULT_NORMALIZATION_PRECISION)) {
return clipFactor;
}
if (value - getTimeShift() <= -NORMALIZATION_SCALING_FACTOR * clipFactor
* (currentFactor + DEFAULT_NORMALIZATION_PRECISION)) {
return -clipFactor;
} else {
// deviation cannot be 0
return (value - getTimeShift())
/ (NORMALIZATION_SCALING_FACTOR * (currentFactor + DEFAULT_NORMALIZATION_PRECISION));
}
}
/**
* augments (potentially normalized) input with time (which is always
* differenced)
*
* @param normalized (potentially normalized) input point
* @param timestamp timestamp of current point
* @param timeFactor a factor used in normalizing time
* @return a tuple with one extra field
*/
protected double[] augmentTime(double[] normalized, long timestamp, double timeFactor) {
double[] scaledInput = new double[normalized.length + 1];
System.arraycopy(normalized, 0, scaledInput, 0, normalized.length);
if (valuesSeen <= 1) {
scaledInput[normalized.length] = 0;
} else {
double timeShift = timestamp - previousTimeStamps[shingleSize - 1];
scaledInput[normalized.length] = weightTime
* ((normalizeTime) ? normalize(timeShift, timeFactor) : timeShift);
}
return scaledInput;
}
// mapper
public long[] getInitialTimeStamps() {
return (initialTimeStamps == null) ? null : Arrays.copyOf(initialTimeStamps, initialTimeStamps.length);
}
// mapper
public void setInitialTimeStamps(long[] values) {
initialTimeStamps = (values == null) ? null : Arrays.copyOf(values, values.length);
}
// mapper
public double[][] getInitialValues() {
if (initialValues == null) {
return null;
} else {
double[][] result = new double[initialValues.length][];
for (int i = 0; i < initialValues.length; i++) {
result[i] = copyIfNotnull(initialValues[i]);
}
return result;
}
}
// mapper
public void setInitialValues(double[][] values) {
if (values == null) {
initialValues = null;
} else {
initialValues = new double[values.length][];
for (int i = 0; i < values.length; i++) {
initialValues[i] = copyIfNotnull(values[i]);
}
}
}
// mapper
public double[] getLastShingledInput() {
return copyIfNotnull(lastShingledInput);
}
// mapper
public void setLastShingledInput(double[] point) {
lastShingledInput = copyIfNotnull(point);
}
// mapper
public void setPreviousTimeStamps(long[] values) {
if (values == null) {
numberOfImputed = shingleSize;
previousTimeStamps = null;
} else {
checkArgument(values.length == shingleSize, " incorrect length ");
previousTimeStamps = Arrays.copyOf(values, values.length);
numberOfImputed = 0;
for (int i = 0; i < previousTimeStamps.length - 1; i++) {
if (previousTimeStamps[i] == previousTimeStamps[i + 1]) {
++numberOfImputed;
}
}
}
}
// mapper
public Deviation[] getTimeStampDeviations() {
return timeStampDeviations;
}
// mapper
public long[] getPreviousTimeStamps() {
return (previousTimeStamps == null) ? null : Arrays.copyOf(previousTimeStamps, previousTimeStamps.length);
}
public Deviation[] getDeviationList() {
return transformer.getDeviations();
}
public double getTimeDecay() {
return transformDecay;
}
/**
* used in mapper; augments weightTime to the weights array to produce a single
* array of length inputLength + 1
*/
public double[] getWeights() {
double[] basic = transformer.getWeights();
double[] answer = new double[inputLength + 1];
Arrays.fill(answer, 1.0);
if (basic != null) {
checkArgument(basic.length == inputLength, " incorrect length returned");
System.arraycopy(basic, 0, answer, 0, inputLength);
}
answer[inputLength] = weightTime;
return answer;
}
// mapper
public double[] getDefaultFill() {
return copyIfNotnull(defaultFill);
}
// mapper
public void setDefaultFill(double[] values) {
defaultFill = copyIfNotnull(values);
}
// mapper
public long getTimeStamp(int index) {
return previousTimeStamps[index];
}
/**
* @return a new builder.
*/
public static Builder<?> builder() {
return new Builder<>();
}
public static class Builder<T extends Builder<T>> {
// We use Optional types for optional primitive fields when it doesn't make
// sense to use a constant default.
protected int dimensions;
protected int startNormalization = DEFAULT_START_NORMALIZATION;
protected Integer stopNormalization = DEFAULT_STOP_NORMALIZATION;
protected double timeDecay;
protected Optional<Long> randomSeed = Optional.empty();
protected int shingleSize = DEFAULT_SHINGLE_SIZE;
protected double anomalyRate = 0.01;
protected TransformMethod transformMethod = TransformMethod.NONE;
protected ImputationMethod imputationMethod = PREVIOUS;
protected ForestMode forestMode = ForestMode.STANDARD;
protected int inputLength;
protected boolean normalizeTime = false;
protected double[] fillValues = null;
protected double[] weights = null;
protected double weightTime = 1.0;
protected ThresholdedRandomCutForest thresholdedRandomCutForest = null;
protected Optional<Double> useImputedFraction = Optional.empty();
protected Optional<Deviation[]> deviations = Optional.empty();
protected Optional<Deviation[]> timeDeviations = Optional.empty();
protected Optional<Deviation[]> dataQuality = Optional.empty();
public Preprocessor build() {
if (forestMode == ForestMode.STREAMING_IMPUTE) {
return new ImputePreprocessor(this);
} else if (requireInitialSegment(normalizeTime, transformMethod, forestMode)) {
return new InitialSegmentPreprocessor(this);
}
return new Preprocessor(this);
}
public T dimensions(int dimensions) {
this.dimensions = dimensions;
return (T) this;
}
public T inputLength(int inputLength) {
this.inputLength = inputLength;
return (T) this;
}
public T startNormalization(int startNormalization) {
this.startNormalization = startNormalization;
return (T) this;
}
public T stopNormalization(Integer stopNormalization) {
this.stopNormalization = stopNormalization;
return (T) this;
}
public T shingleSize(int shingleSize) {
this.shingleSize = shingleSize;
return (T) this;
}
public T timeDecay(double timeDecay) {
this.timeDecay = timeDecay;
return (T) this;
}
public T useImputedFraction(double fraction) {
this.useImputedFraction = Optional.of(fraction);
return (T) this;
}
public T randomSeed(long randomSeed) {
this.randomSeed = Optional.of(randomSeed);
return (T) this;
}
public T imputationMethod(ImputationMethod imputationMethod) {
this.imputationMethod = imputationMethod;
return (T) this;
}
public T fillValues(double[] values) {
// values can be null
this.fillValues = (values == null) ? null : Arrays.copyOf(values, values.length);
return (T) this;
}
public T weights(double[] values) {
// values can be null
this.weights = (values == null) ? null : Arrays.copyOf(values, values.length);
return (T) this;
}
public T weightTime(double value) {
this.weightTime = value;
return (T) this;
}
public T normalizeTime(boolean normalizeTime) {
this.normalizeTime = normalizeTime;
return (T) this;
}
public T transformMethod(TransformMethod method) {
this.transformMethod = method;
return (T) this;
}
public T forestMode(ForestMode forestMode) {
this.forestMode = forestMode;
return (T) this;
}
// mapper
public T deviations(Deviation[] deviations) {
this.deviations = Optional.ofNullable(deviations);
return (T) this;
}
// mapper
public T dataQuality(Deviation[] dataQuality) {
this.dataQuality = Optional.ofNullable(dataQuality);
return (T) this;
}
// mapper
public T timeDeviations(Deviation[] timeDeviations) {
this.timeDeviations = Optional.ofNullable(timeDeviations);
return (T) this;
}
}
}
| 613 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor/transform/NormalizedDifferenceTransformer.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.preprocessor.transform;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.Arrays;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
import com.amazon.randomcutforest.returntypes.RangeVector;
@Getter
@Setter
public class NormalizedDifferenceTransformer extends NormalizedTransformer {
public NormalizedDifferenceTransformer(double[] weights, Deviation[] deviation) {
super(weights, deviation);
}
@Override
public double[] invert(double[] values, double[] previousInput) {
double[] output = super.invert(values, previousInput);
for (int i = 0; i < values.length; i++) {
output[i] += previousInput[i];
}
return output;
}
/**
* inverts a forecast (and upper and lower limits) provided by RangeVector range
* the values are scaled by the factor used in the transformation note that the
* expected difference maintained in deviation[j + inputLength] is added for
* each attribute j, once for each iteration; and the resulting value is added
* back as an inverse of the differencing operation.
*
* @param ranges provides p50 values with upper and lower estimates
* @param baseDimension the number of variables being forecast (often 1)
* @param previousInput the last input of length baseDimension
*/
@Override
public void invertForecastRange(RangeVector ranges, int baseDimension, double[] previousInput,
double[] correction) {
int inputLength = weights.length;
int horizon = ranges.values.length / baseDimension;
double[] last = Arrays.copyOf(previousInput, previousInput.length);
checkArgument(correction.length >= inputLength, " incorrect length ");
for (int i = 0; i < horizon; i++) {
for (int j = 0; j < inputLength; j++) {
double weight = (weights[j] == 0) ? 0 : getScale(j, deviations) / weights[j];
ranges.scale(i * baseDimension + j, (float) weight);
double shift = last[j] + getShift(j, deviations);
ranges.shift(i * baseDimension + j, (float) shift);
last[j] = ranges.values[i * baseDimension + j];
}
}
}
/**
* a transformation that differences and then normalizes the results of
* multivariate values
*
* @param internalTimeStamp timestamp corresponding to this operation; used to
* ensure smoothness at 0
* @param inputPoint the actual input
* @param previousInput the previous input
* @param initials an array containing normalization statistics, used
* only for the initial segment; otherwise it is null
* @param clipFactor the factor used in clipping the normalized values
* @return the transformed values to be shingled and used in RCF
*/
@Override
public double[] transformValues(int internalTimeStamp, double[] inputPoint, double[] previousInput,
Deviation[] initials, double clipFactor) {
double[] input = new double[inputPoint.length];
for (int i = 0; i < input.length; i++) {
input[i] = (internalTimeStamp == 0) ? 0 : inputPoint[i] - previousInput[i];
}
return super.transformValues(internalTimeStamp, input, null, initials, clipFactor);
}
@Override
public double[] getShift(double[] previous) {
double[] answer = new double[weights.length];
for (int i = 0; i < weights.length; i++) {
answer[i] = getShift(i, deviations) + previous[i];
}
return answer;
}
@Override
protected double getShift(int i, Deviation[] devs) {
return devs[i + weights.length].getMean();
}
@Override
protected double getScale(int i, Deviation[] devs) {
return (devs[i + weights.length].getDeviation() + 1.0);
}
}
| 614 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor/transform/ITransformer.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.preprocessor.transform;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
import com.amazon.randomcutforest.returntypes.RangeVector;
/**
* ThresholdedRCF allows transformers that transform the data in a streaming
* manner; invoke RCF on the transformed data; and invert the results to the
* original input space. A typical examples are differencing,
* (streaming/stochastic) normalization, etc.
*
* This interface class spells out the operations required from such
* transformers. Some operations below are specific to the existing
* implementation and required by the mappers to produce state classes.
*/
public interface ITransformer {
// required by the mapper; this corresponds to providing each input
// column/attribute a weight
// different from 1.0 -- changing these weights can alter the RCF predictions
// significantly
// these weights should be informed by the domain and the intent of the overall
// computation
double[] getWeights();
// reverse of the above, used in mappers
void setWeights(double[] weights);
// used in mappers stores basic discounted averages and discounted (single step)
// differenced average
Deviation[] getDeviations();
// If the RCF expects values described by values[] corresponding to the
// correspondingInput[]
// then what should be alternative input that would have been transformed into
// values[]
double[] invert(double[] values, double[] previousInput);
// similar to invert() but applies to a forecast provided by RangeVector over an
// input length (number of variables in a multivariate analysis) baseDimension
// and
// previousInput[] corresponds to the last observed values of those input.
// correction is the effect of last anomaly
void invertForecastRange(RangeVector ranges, int baseDimension, double[] previousInput, double[] correction);
// update the internal data structures based on the current (multivariate) input
// inputPoint
// previousInput[] is the corresponding values of the last observed values
void updateDeviation(double[] inputPoint, double[] previousInput);
// transforms inputPoint[] to RCF space, non-null values of initials[] are
// used in normalization
// and are specific to this implementation, internalStamp corresponds to the
// sequence number of the
// input and clipFactor is a parameter that clips any normalization
double[] transformValues(int internalTimeStamp, double[] inputPoint, double[] previousInput, Deviation[] initials,
double clipFactor);
// used for converting RCF representations to actuals, used in
// predictor-corrector
double[] getShift(double[] previous);
// used for converting RCF representations to actuals, used in
// predictor-corrector
double[] getScale();
// used for computing errors in RCFcaster before the model is calibrated
double[] getSmoothedDeviations();
// used for determining noise
double[] getSmoothedDifferenceDeviations();
} | 615 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor/transform/NormalizedTransformer.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.preprocessor.transform;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
@Getter
@Setter
public class NormalizedTransformer extends WeightedTransformer {
public NormalizedTransformer(double[] weights, Deviation[] deviation) {
super(weights, deviation);
}
protected double clipValue(double clipfactor) {
return clipfactor;
}
protected double getScale(int i, Deviation[] devs) {
return (Math.abs(devs[i + 2 * weights.length].getMean()) + 1.0);
}
protected double getShift(int i, Deviation[] devs) {
return devs[i].getMean();
}
}
| 616 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor/transform/WeightedTransformer.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.preprocessor.transform;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.Arrays;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
import com.amazon.randomcutforest.returntypes.RangeVector;
/**
* A weighted transformer maintains several data structures ( currently 3X) that
* measure discounted averages and the corresponding standard deviations. for
* input length X. The element i corresponds to discounted average of the
* variable i, element (X+i) corresponds to the discounted average of the single
* step differences of the same variable i, and element (2X+i) corresponds to
* difference of variable i and the dicounted average, to capture second order
* differences These quantities together can help answer a number of estimation
* questions of a time series, and in particular help solve for simple linear
* drifts. Even though the discounted averages are not obviously required --
* they are useful in forecasts.
*
*/
@Getter
@Setter
public class WeightedTransformer implements ITransformer {
public static int NUMBER_OF_STATS = 5;
double[] weights;
Deviation[] deviations;
public WeightedTransformer(double[] weights, Deviation[] deviations) {
checkArgument(NUMBER_OF_STATS * weights.length == deviations.length, "incorrect lengths");
this.weights = Arrays.copyOf(weights, weights.length);
this.deviations = new Deviation[deviations.length];
for (int i = 0; i < deviations.length; i++) {
checkArgument(deviations[i] != null, "cannot be null");
this.deviations[i] = deviations[i].copy();
}
}
/**
* the inversion does not require previousInput; note that weight == 0, would
* produce 0 values in the inversion
*
* @param values what the RCF would like to observe
* @param previousInput what was the real (or previously imputed) observation
* @return the observations that would (approximately) transform to values[]
*/
@Override
public double[] invert(double[] values, double[] previousInput) {
double[] output = new double[values.length];
for (int i = 0; i < values.length; i++) {
output[i] = (weights[i] == 0) ? 0 : values[i] * getScale(i, deviations) / weights[i];
output[i] += getShift(i, deviations);
}
return output;
}
/**
* inverts a forecast (and upper and lower limits) provided by RangeVector range
* note that the expected difference maintained in deviation[j + inputLength] is
* added for each attribute j
*
* @param ranges provides p50 values with upper and lower estimates
* @param baseDimension the number of variables being forecast (often 1)
* @param previousInput the last input of length baseDimension
* @param correction correction due to last anomaly updates the RangeVector
* to the inverse transform and applies correction
*/
public void invertForecastRange(RangeVector ranges, int baseDimension, double[] previousInput,
double[] correction) {
int horizon = ranges.values.length / baseDimension;
int inputLength = weights.length;
checkArgument(correction.length >= inputLength, " incorrect length ");
for (int i = 0; i < horizon; i++) {
for (int j = 0; j < inputLength; j++) {
double weight = (weights[j] == 0) ? 0 : getScale(j, deviations) / weights[j];
ranges.scale(i * baseDimension + j, (float) weight);
ranges.shift(i * baseDimension + j,
(float) (getShift(j, deviations) + (i + 1) * getDrift(j, deviations)));
}
}
}
/**
* updates the 3*inputPoint.length statistics; the statistic i corresponds to
* discounted average of variable i and statistic i + inputPoint.length
* corresponds to the discounted average single step difference
*
* @param inputPoint the input seen by TRCF
* @param previousInput the previous input
*/
public void updateDeviation(double[] inputPoint, double[] previousInput) {
checkArgument(inputPoint.length * NUMBER_OF_STATS == deviations.length, "incorrect lengths");
checkArgument(inputPoint.length == previousInput.length, " lengths must match");
for (int i = 0; i < inputPoint.length; i++) {
deviations[i].update(inputPoint[i]);
if (deviations[i + inputPoint.length].getCount() == 0) {
deviations[i + inputPoint.length].update(0);
} else {
deviations[i + inputPoint.length].update(inputPoint[i] - previousInput[i]);
}
deviations[i + 2 * inputPoint.length].update(deviations[i].getDeviation());
deviations[i + 3 * inputPoint.length].update(deviations[i + inputPoint.length].getMean());
deviations[i + 4 * inputPoint.length].update(deviations[i + inputPoint.length].getDeviation());
}
}
/**
* a normalization function
*
* @param value argument to be normalized
* @param shift the shift in the value
* @param scale the scaling factor
* @param clipFactor the output value is bound is in [-clipFactor,clipFactor]
* @return the normalized value
*/
protected double normalize(double value, double shift, double scale, double clipFactor) {
checkArgument(scale > 0, " should be non-negative");
double t = (value - shift) / (scale);
if (t >= clipFactor) {
return clipFactor;
}
if (t < -clipFactor) {
return -clipFactor;
}
return t;
}
/**
* a transformation that normalizes the multivariate values
*
* @param internalTimeStamp timestamp corresponding to this operation; used to
* ensure smoothness at 0
* @param inputPoint the actual input
* @param previousInput the previous input
* @param initials an array containing normalization statistics, used
* only for the initial segment; otherwise it is null
* @param clipFactor the factor used in clipping the normalized values
* @return the transformed values to be shingled and used in RCF
*/
@Override
public double[] transformValues(int internalTimeStamp, double[] inputPoint, double[] previousInput,
Deviation[] initials, double clipFactor) {
double[] output = new double[inputPoint.length];
for (int i = 0; i < inputPoint.length; i++) {
Deviation[] devs = (initials == null) ? deviations : initials;
output[i] = weights[i]
* normalize(inputPoint[i], getShift(i, devs), getScale(i, devs), clipValue(clipFactor));
}
return output;
}
protected double clipValue(double clipfactor) {
return Double.MAX_VALUE;
}
public Deviation[] getDeviations() {
Deviation[] answer = new Deviation[deviations.length];
for (int i = 0; i < deviations.length; i++) {
answer[i] = deviations[i].copy();
}
return answer;
}
public double[] getWeights() {
return Arrays.copyOf(weights, weights.length);
}
public void setWeights(double[] weights) {
checkArgument(weights.length == this.weights.length, " incorrect length");
this.weights = Arrays.copyOf(weights, weights.length);
}
protected double getScale(int i, Deviation[] devs) {
return (1.0);
}
protected double getShift(int i, Deviation[] devs) {
return 0;
}
protected double getDrift(int i, Deviation[] devs) {
return devs[i + 3 * weights.length].getMean();
}
@Override
public double[] getScale() {
double[] answer = new double[weights.length];
for (int i = 0; i < weights.length; i++) {
answer[i] = (weights[i] == 0) ? 0 : getScale(i, deviations) / weights[i];
}
return answer;
}
@Override
public double[] getShift(double[] previous) {
double[] answer = new double[weights.length];
for (int i = 0; i < weights.length; i++) {
answer[i] = getShift(i, deviations);
}
return answer;
}
public double[] getSmoothedDeviations() {
double[] answer = new double[weights.length];
for (int i = 0; i < weights.length; i++) {
answer[i] = Math.abs(deviations[i + 2 * weights.length].getMean());
}
return answer;
}
public double[] getSmoothedDifferenceDeviations() {
double[] answer = new double[weights.length];
for (int i = 0; i < weights.length; i++) {
answer[i] = Math.abs(deviations[i + 4 * weights.length].getMean());
}
return answer;
}
}
| 617 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor/transform/SubtractMATransformer.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.preprocessor.transform;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
@Getter
@Setter
public class SubtractMATransformer extends WeightedTransformer {
public SubtractMATransformer(double[] weights, Deviation[] deviations) {
super(weights, deviations);
}
@Override
protected double getShift(int i, Deviation[] devs) {
return devs[i].getMean();
}
}
| 618 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/preprocessor/transform/DifferenceTransformer.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.preprocessor.transform;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.Arrays;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
import com.amazon.randomcutforest.returntypes.RangeVector;
@Getter
@Setter
public class DifferenceTransformer extends WeightedTransformer {
public DifferenceTransformer(double[] weights, Deviation[] deviation) {
super(weights, deviation);
}
@Override
public double[] invert(double[] values, double[] previousInput) {
double[] output = super.invert(values, previousInput);
for (int i = 0; i < values.length; i++) {
output[i] += previousInput[i];
}
return output;
}
/**
* inverts a forecast (and upper and lower limits) provided by RangeVector range
* the values are scaled by the factor used in the transformation for each
* iteration; and the resulting value is added back as an inverse of the
* differencing operation.
*
* @param ranges provides p50 values with upper and lower estimates
* @param baseDimension the number of variables being forecast (often 1)
* @param previousInput the last input of length baseDimension
*/
@Override
public void invertForecastRange(RangeVector ranges, int baseDimension, double[] previousInput,
double[] correction) {
int inputLength = weights.length;
int horizon = ranges.values.length / baseDimension;
double[] last = Arrays.copyOf(previousInput, previousInput.length);
checkArgument(correction.length >= inputLength, " incorrect length ");
for (int i = 0; i < horizon; i++) {
for (int j = 0; j < inputLength; j++) {
float weight = (weights[j] == 0) ? 0f : 1.0f / (float) weights[j];
ranges.scale(i * baseDimension + j, weight);
ranges.shift(i * baseDimension + j, (float) (getShift(j, deviations) + last[j]));
last[j] = ranges.values[j];
}
}
}
@Override
public double[] transformValues(int internalTimeStamp, double[] inputPoint, double[] previousInput,
Deviation[] initials, double clipFactor) {
double[] input = new double[inputPoint.length];
for (int i = 0; i < input.length; i++) {
input[i] = (internalTimeStamp == 0) ? 0 : (inputPoint[i] - previousInput[i]);
}
return super.transformValues(internalTimeStamp, input, null, initials, clipFactor);
}
@Override
public double[] getShift(double[] previous) {
double[] answer = new double[weights.length];
for (int i = 0; i < weights.length; i++) {
answer[i] = getShift(i, deviations) + previous[i];
}
return answer;
}
}
| 619 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/Example.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples;
public interface Example {
String command();
String description();
void run() throws Exception;
}
| 620 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/Main.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples;
import java.util.Map;
import java.util.TreeMap;
import com.amazon.randomcutforest.examples.dynamicinference.DynamicDensity;
import com.amazon.randomcutforest.examples.dynamicinference.DynamicNearNeighbor;
import com.amazon.randomcutforest.examples.serialization.JsonExample;
import com.amazon.randomcutforest.examples.serialization.ProtostuffExample;
public class Main {
public static final String ARCHIVE_NAME = "randomcutforest-examples-1.0.jar";
public static void main(String[] args) throws Exception {
new Main().run(args);
}
private final Map<String, Example> examples;
private int maxCommandLength;
public Main() {
examples = new TreeMap<>();
maxCommandLength = 0;
add(new JsonExample());
add(new ProtostuffExample());
add(new DynamicDensity());
add(new DynamicNearNeighbor());
}
private void add(Example example) {
examples.put(example.command(), example);
if (maxCommandLength < example.command().length()) {
maxCommandLength = example.command().length();
}
}
public void run(String[] args) throws Exception {
if (args == null || args.length < 1 || args[0].equals("-h") || args[0].equals("--help")) {
printUsage();
return;
}
String command = args[0];
if (!examples.containsKey(command)) {
throw new IllegalArgumentException("No such example: " + command);
}
examples.get(command).run();
}
public void printUsage() {
System.out.printf("Usage: java -cp %s [example]%n", ARCHIVE_NAME);
System.out.println("Examples:");
String formatString = String.format("\t %%%ds - %%s%%n", maxCommandLength);
for (Example example : examples.values()) {
System.out.printf(formatString, example.command(), example.description());
}
}
}
| 621 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/dynamicinference/DynamicNearNeighbor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.dynamicinference;
import static com.amazon.randomcutforest.testutils.ExampleDataSets.generate;
import static com.amazon.randomcutforest.testutils.ExampleDataSets.rotateClockWise;
import static java.lang.Math.PI;
import java.io.BufferedWriter;
import java.io.FileWriter;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.examples.Example;
public class DynamicNearNeighbor implements Example {
public static void main(String[] args) throws Exception {
new DynamicNearNeighbor().run();
}
@Override
public String command() {
return "dynamic_near_neighbor";
}
@Override
public String description() {
return "shows an example of dynamic near neighbor computation where both the data and query are "
+ "evolving in time";
}
@Override
public void run() throws Exception {
int newDimensions = 2;
long randomSeed = 123;
RandomCutForest newForest = RandomCutForest.builder().numberOfTrees(100).sampleSize(256)
.dimensions(newDimensions).randomSeed(randomSeed).timeDecay(1.0 / 800).centerOfMassEnabled(true)
.storeSequenceIndexesEnabled(true).build();
String name = "dynamic_near_neighbor_example";
BufferedWriter file = new BufferedWriter(new FileWriter(name));
double[][] data = generate(1000);
double[] queryPoint = new double[] { 0.5, 0.6 };
for (int degree = 0; degree < 360; degree += 2) {
for (double[] datum : data) {
double[] transformed = rotateClockWise(datum, -2 * PI * degree / 360);
file.append(transformed[0] + " " + transformed[1] + "\n");
newForest.update(transformed);
}
file.append("\n");
file.append("\n");
double[] movingQuery = rotateClockWise(queryPoint, -3 * PI * degree / 360);
float[] neighbor = newForest.getNearNeighborsInSample(movingQuery, 1).get(0).point;
file.append(movingQuery[0] + " " + movingQuery[1] + " " + (neighbor[0] - movingQuery[0]) + " "
+ (neighbor[1] - movingQuery[1]) + "\n");
file.append("\n");
file.append("\n");
}
}
}
| 622 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/dynamicinference/DynamicDensity.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.dynamicinference;
import static com.amazon.randomcutforest.testutils.ExampleDataSets.generate;
import static com.amazon.randomcutforest.testutils.ExampleDataSets.rotateClockWise;
import static java.lang.Math.PI;
import java.io.BufferedWriter;
import java.io.FileWriter;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.returntypes.DensityOutput;
public class DynamicDensity implements Example {
public static void main(String[] args) throws Exception {
new DynamicDensity().run();
}
@Override
public String command() {
return "dynamic_sampling";
}
@Override
public String description() {
return "shows two potential use of dynamic density computations; estimating density as well "
+ "as its directional components";
}
/**
* plot the dynamic_density_example using any tool in gnuplot one can plot the
* directions to higher density via do for [i=0:358:2] {plot
* "dynamic_density_example" index (i+1) u 1:2:3:4 w vectors t ""} or the raw
* density at the points via do for [i=0:358:2] {plot "dynamic_density_example"
* index i w p pt 7 palette t ""}
*
* @throws Exception
*/
@Override
public void run() throws Exception {
int newDimensions = 2;
long randomSeed = 123;
RandomCutForest newForest = RandomCutForest.builder().numberOfTrees(100).sampleSize(256)
.dimensions(newDimensions).randomSeed(randomSeed).timeDecay(1.0 / 800).centerOfMassEnabled(true)
.build();
String name = "dynamic_density_example";
BufferedWriter file = new BufferedWriter(new FileWriter(name));
double[][] data = generate(1000);
double[] queryPoint;
for (int degree = 0; degree < 360; degree += 2) {
for (double[] datum : data) {
newForest.update(rotateClockWise(datum, -2 * PI * degree / 360));
}
for (double[] datum : data) {
queryPoint = rotateClockWise(datum, -2 * PI * degree / 360);
DensityOutput density = newForest.getSimpleDensity(queryPoint);
double value = density.getDensity(0.001, 2);
file.append(queryPoint[0] + " " + queryPoint[1] + " " + value + "\n");
}
file.append("\n");
file.append("\n");
for (double x = -0.95; x < 1; x += 0.1) {
for (double y = -0.95; y < 1; y += 0.1) {
DensityOutput density = newForest.getSimpleDensity(new double[] { x, y });
double aboveInY = density.getDirectionalDensity(0.001, 2).low[1];
double belowInY = density.getDirectionalDensity(0.001, 2).high[1];
double toTheLeft = density.getDirectionalDensity(0.001, 2).high[0];
double toTheRight = density.getDirectionalDensity(0.001, 2).low[0];
double len = Math.sqrt(aboveInY * aboveInY + belowInY * belowInY + toTheLeft * toTheLeft
+ toTheRight * toTheRight);
file.append(x + " " + y + " " + ((toTheRight - toTheLeft) * 0.05 / len) + " "
+ ((aboveInY - belowInY) * 0.05 / len) + "\n");
}
}
file.append("\n");
file.append("\n");
}
file.close();
}
}
| 623 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/summarization/RCFStringSummarizeExample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.summarization;
import static java.lang.Math.min;
import java.util.List;
import java.util.Random;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.summarization.ICluster;
import com.amazon.randomcutforest.summarization.Summarizer;
import com.amazon.randomcutforest.util.Weighted;
/**
* the following example showcases the use of RCF multi-summarization on generic
* types R, when provided with a distance function from (R,R) into double. In
* this example R correpsonds to Strings and the distance is EditDistance The
* srings are genrated from two clusters one where character A (or '-' for viz)
* occurs with probability 2/3 and anothewr where it occurs with probability 1/3
* (and the character B or '_' occurs with probability 2/3)
*
* Clearly, and the following example makes it visual, multicentroid approach is
* necessary.
*
* All the strings do not have the same length. Note that the summarization is
* asked with a maximum of 10 clusters but the algorithm self-adjusts to 2
* clusters.
*/
public class RCFStringSummarizeExample implements Example {
public static void main(String[] args) throws Exception {
new com.amazon.randomcutforest.examples.summarization.RCFStringSummarizeExample().run();
}
@Override
public String command() {
return "RCF_String_Summarize_Example";
}
@Override
public String description() {
return "Example of using RCF String Summarization, uses multi-centroid approach";
}
@Override
public void run() throws Exception {
long seed = -8436172895711381300L;
new Random().nextLong();
System.out.println("String summarization seed : " + seed);
Random random = new Random(seed);
int size = 100;
int numberOfStrings = 20000;
String[] points = new String[numberOfStrings];
for (int i = 0; i < numberOfStrings; i++) {
if (random.nextDouble() < 0.5) {
points[i] = getABString(size, 0.8, random);
} else {
points[i] = getABString(size, 0.2, random);
}
}
int nextSeed = random.nextInt();
List<ICluster<String>> summary = Summarizer.multiSummarize(points, 5, 10, 1, false, 0.8,
RCFStringSummarizeExample::toyDistance, nextSeed, true, 0.1, 5);
System.out.println();
for (int i = 0; i < summary.size(); i++) {
double weight = summary.get(i).getWeight();
System.out.println(
"Cluster " + i + " representatives, weight " + ((float) Math.round(1000 * weight) * 0.001));
List<Weighted<String>> representatives = summary.get(i).getRepresentatives();
for (int j = 0; j < representatives.size(); j++) {
double t = representatives.get(j).weight;
t = Math.round(1000.0 * t / weight) * 0.001;
System.out.print(
"relative weight " + (float) t + " length " + representatives.get(j).index.length() + " ");
printString(representatives.get(j).index);
System.out.println();
}
System.out.println();
}
}
public static double toyDistance(String a, String b) {
if (a.length() > b.length()) {
return toyDistance(b, a);
}
double[][] dist = new double[2][b.length() + 1];
for (int j = 0; j < b.length() + 1; j++) {
dist[0][j] = j;
}
for (int i = 1; i < a.length() + 1; i++) {
dist[1][0] = i;
for (int j = 1; j < b.length() + 1; j++) {
double t = dist[0][j - 1] + ((a.charAt(i - 1) == b.charAt(j - 1)) ? 0 : 1);
dist[1][j] = min(min(t, dist[0][j] + 1), dist[1][j - 1] + 1);
}
for (int j = 0; j < b.length() + 1; j++) {
dist[0][j] = dist[1][j];
}
}
return dist[1][b.length()];
}
// colors
public static final String ANSI_RESET = "\u001B[0m";
public static final String ANSI_RED = "\u001B[31m";
public static final String ANSI_BLUE = "\u001B[34m";
public static void printString(String a) {
for (int i = 0; i < a.length(); i++) {
if (a.charAt(i) == '-') {
System.out.print(ANSI_RED + a.charAt(i) + ANSI_RESET);
} else {
System.out.print(ANSI_BLUE + a.charAt(i) + ANSI_RESET);
}
}
}
public String getABString(int size, double probabilityOfA, Random random) {
StringBuilder stringBuilder = new StringBuilder();
int newSize = size + random.nextInt(size / 5);
for (int i = 0; i < newSize; i++) {
if (random.nextDouble() < probabilityOfA) {
stringBuilder.append("-");
} else {
stringBuilder.append("_");
}
}
return stringBuilder.toString();
}
} | 624 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/summarization/DynamicSummarization.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.summarization;
import static com.amazon.randomcutforest.testutils.ExampleDataSets.rotateClockWise;
import static java.lang.Math.PI;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.summarization.ICluster;
import com.amazon.randomcutforest.summarization.Summarizer;
import com.amazon.randomcutforest.testutils.NormalMixtureTestData;
import com.amazon.randomcutforest.util.Weighted;
/**
* Summarized representation of the stored points provide a convenient view into
* the "current state" of the stream seen/sampled by an RCF. However since RCFs
* provide a generic sketch for multple different scenrios
* https://opensearch.org/blog/odfe-updates/2019/11/random-cut-forests/ the
* summarization can be used repeatedly to provide a dynamic clustering a
* numeric data stream as shown in the example below.
*
* The summarization is based on a well-scattered multi-centroid representation
* as in CURE https://en.wikipedia.org/wiki/CURE_algorithm and distance based
* clustering as in https://en.wikipedia.org/wiki/Data_stream_clustering
*
* The example corresponds to a wheel like arrangement -- where numberOfBlades
* determine the number of spokes. For many settings of the parameter the spokes
* are closer to each other near the center than the extremity at the rim. Thus
* a centroidal representation cannot conceptually capture each spoke as a
* cluster, and multi-centroid approach is necessary. Note that the input to the
* summarization is not the same as the numberOfBladed; the maxAllowed number
* corresponds to the maximum number of clusters which can be much larger. In a
* clustering application, the number of clusters are typically not known
* apriori.
*
* The pointset is generated once and are input to RCF with rotations. As the
* "blades are running", the output clusters can be colored and we can visualize
* the clusters produced. For the parameters below, simplistic plotting
* functions such as gnuplot using do for [i = 0:359] { plot [-15:15][-15:15]
* "sum" index i u 1:2:3:4 w circles fill solid noborder fc palette z t "" }
* would show the rotating clusters where the representatives corresponding to
* the same cluster has the same color. We note that the visualizations is
* neither polished nor complete, since the goal is to highlight the
* functionality of summarization in RCFs.
*/
public class DynamicSummarization implements Example {
public static void main(String[] args) throws Exception {
new DynamicSummarization().run();
}
@Override
public String command() {
return "dynamic_summarization";
}
@Override
public String description() {
return "shows a potential use of dynamic clustering/summarization";
}
@Override
public void run() throws Exception {
int newDimensions = 2;
long randomSeed = 123;
int dataSize = 1350;
int numberOfBlades = 9;
RandomCutForest newForest = RandomCutForest.builder().numberOfTrees(100).sampleSize(256)
.dimensions(newDimensions).randomSeed(randomSeed).timeDecay(1.0 / 800).centerOfMassEnabled(true)
.build();
String name = "dynamic_summarization_example";
BufferedWriter file = new BufferedWriter(new FileWriter(name));
double[][] data = getData(dataSize, 0, numberOfBlades);
boolean printData = false;
boolean printClusters = true;
List<ICluster<float[]>> oldSummary = null;
int[] oldColors = null;
int count = 0;
int sum = 0;
for (int degree = 0; degree < 360; degree += 1) {
for (double[] datum : data) {
double[] vec = rotateClockWise(datum, -2 * PI * degree / 360);
if (printData) {
file.append(vec[0] + " " + vec[1] + "\n");
}
newForest.update(vec);
}
if (printData) {
file.append("\n");
file.append("\n");
}
List<ICluster<float[]>> summary = newForest.summarize(2 * numberOfBlades + 2, 0.05, 5, 0.8,
Summarizer::L2distance, oldSummary);
sum += summary.size();
System.out.println(degree + " " + summary.size());
if (summary.size() == numberOfBlades) {
++count;
}
int[] colors = align(summary, oldSummary, oldColors);
for (int i = 0; i < summary.size(); i++) {
double weight = summary.get(i).getWeight();
for (Weighted<float[]> representative : summary.get(i).getRepresentatives()) {
double t = representative.weight / weight;
if (t > 0.05 && printClusters) {
file.append(representative.index[0] + " " + representative.index[1] + " " + t + " " + colors[i]
+ "\n");
}
}
}
if (summary.size() == numberOfBlades) {
oldSummary = summary;
oldColors = colors;
}
if (printClusters) {
file.append("\n");
file.append("\n");
}
}
System.out.println("Exact detection :" + ((float) Math.round(count / 3.6) * 0.01)
+ " fraction, average number of clusters " + ((float) Math.round(sum / 3.6) * 0.01));
file.close();
}
public double[][] getData(int dataSize, int seed, int fans) {
Random prg = new Random(0);
NormalMixtureTestData generator = new NormalMixtureTestData(0.0, 1.0, 0.0, 1.0, 0.0, 1.0);
int newDimensions = 2;
double[][] data = generator.generateTestData(dataSize, newDimensions, seed);
for (int i = 0; i < dataSize; i++) {
int nextFan = prg.nextInt(fans);
// scale, make an ellipse
data[i][1] *= 1.0 / fans;
data[i][0] *= 2.0;
// shift
data[i][0] += 5.0 + fans / 2;
data[i] = rotateClockWise(data[i], 2 * PI * nextFan / fans);
}
return data;
}
int[] align(List<ICluster<float[]>> current, List<ICluster<float[]>> previous, int[] oldColors) {
int[] nearest = new int[current.size()];
if (previous == null || previous.size() == 0) {
for (int i = 0; i < current.size(); i++) {
nearest[i] = i;
}
} else {
Arrays.fill(nearest, previous.size() + 1);
for (int i = 0; i < current.size(); i++) {
double dist = previous.get(0).distance(current.get(i), Summarizer::L1distance);
nearest[i] = oldColors[0];
for (int j = 1; j < previous.size(); j++) {
double t = previous.get(j).distance(current.get(i), Summarizer::L1distance);
if (t < dist) {
dist = t;
nearest[i] = oldColors[j];
}
}
}
}
return nearest;
}
}
| 625 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/summarization/RCFMultiSummarizeExample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.summarization;
import static com.amazon.randomcutforest.CommonUtils.toFloatArray;
import static java.lang.Math.abs;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.function.BiFunction;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.summarization.ICluster;
import com.amazon.randomcutforest.summarization.Summarizer;
import com.amazon.randomcutforest.testutils.NormalMixtureTestData;
import com.amazon.randomcutforest.util.Weighted;
/**
* centroidal clustering fails in many scenarios; primarily because a single
* point in combination with a distance metric can only represent a sphere. A
* reasonable solution is to use multiple well scattered centroids to represent
* a cluster and has been long in use, see CURE
* https://en.wikipedia.org/wiki/CURE_algorithm
*
* The following example demonstrates the use of a multicentroid clustering; the
* data corresponds to 2*d clusters in d dimensions (d chosen randomly) such
* that the clusters almost touch, but remain separable. Note that the knowledge
* of the true number of clusters is not required -- the clustering is invoked
* with a maximum of 5*d potential clusters, and yet the example often finds the
* true 2*d clusters.
*/
public class RCFMultiSummarizeExample implements Example {
public static void main(String[] args) throws Exception {
new com.amazon.randomcutforest.examples.summarization.RCFMultiSummarizeExample().run();
}
@Override
public String command() {
return "RCF_Multi_Summarize_Example";
}
@Override
public String description() {
return "Example of using RCF Multi Summarization";
}
@Override
public void run() throws Exception {
long seed = new Random().nextLong();
Random random = new Random(seed);
int newDimensions = random.nextInt(10) + 3;
int dataSize = 200000;
float[][] points = getData(dataSize, newDimensions, random.nextInt(), Summarizer::L2distance);
double epsilon = 0.01;
List<ICluster<float[]>> summary = Summarizer.multiSummarize(points, 5 * newDimensions, 0.1, 5,
random.nextLong());
System.out.println(summary.size() + " clusters for " + newDimensions + " dimensions, seed : " + seed);
double weight = summary.stream().map(e -> e.getWeight()).reduce(Double::sum).get();
System.out.println(
"Total weight " + ((float) Math.round(weight * 1000) * 0.001) + " rounding to multiples of " + epsilon);
System.out.println();
for (int i = 0; i < summary.size(); i++) {
double clusterWeight = summary.get(i).getWeight();
System.out.println(
"Cluster " + i + " representatives, weight " + ((float) Math.round(1000 * clusterWeight) * 0.001));
List<Weighted<float[]>> representatives = summary.get(i).getRepresentatives();
for (int j = 0; j < representatives.size(); j++) {
double t = representatives.get(j).weight;
t = Math.round(1000.0 * t / clusterWeight) * 0.001;
System.out.print("relative weight " + (float) t + " center (approx) ");
printArray(representatives.get(j).index, epsilon);
System.out.println();
}
System.out.println();
}
}
void printArray(float[] values, double epsilon) {
System.out.print(" [");
if (abs(values[0]) < epsilon) {
System.out.print("0");
} else {
if (epsilon <= 0) {
System.out.print(values[0]);
} else {
long t = (int) Math.round(values[0] / epsilon);
System.out.print(t * epsilon);
}
}
for (int i = 1; i < values.length; i++) {
if (abs(values[i]) < epsilon) {
System.out.print(", 0");
} else {
if (epsilon <= 0) {
System.out.print(", " + values[i]);
} else {
long t = Math.round(values[i] / epsilon);
System.out.print(", " + t * epsilon);
}
}
}
System.out.print("]");
}
public float[][] getData(int dataSize, int newDimensions, int seed, BiFunction<float[], float[], Double> distance) {
double baseMu = 0.0;
double baseSigma = 1.0;
double anomalyMu = 0.0;
double anomalySigma = 1.0;
double transitionToAnomalyProbability = 0.0;
// ignoring anomaly cluster for now
double transitionToBaseProbability = 1.0;
Random prg = new Random(0);
NormalMixtureTestData generator = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,
transitionToAnomalyProbability, transitionToBaseProbability);
double[][] data = generator.generateTestData(dataSize, newDimensions, seed);
float[][] floatData = new float[dataSize][];
float[] allZero = new float[newDimensions];
float[] sigma = new float[newDimensions];
Arrays.fill(sigma, 1f);
double scale = distance.apply(allZero, sigma);
for (int i = 0; i < dataSize; i++) {
// shrink, shift at random
int nextD = prg.nextInt(newDimensions);
for (int j = 0; j < newDimensions; j++) {
data[i][j] *= 1.0 / (3.0);
// standard deviation adds up across dimension; taking square root
// and using s 3 sigma ball
if (j == nextD) {
if (prg.nextDouble() < 0.5)
data[i][j] += 2.0 * scale;
else
data[i][j] -= 2.0 * scale;
}
}
floatData[i] = toFloatArray(data[i]);
}
return floatData;
}
} | 626 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/summarization/RCFSummarizeExample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.summarization;
import static com.amazon.randomcutforest.CommonUtils.toFloatArray;
import static java.lang.Math.abs;
import java.util.Arrays;
import java.util.Random;
import java.util.function.BiFunction;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.returntypes.SampleSummary;
import com.amazon.randomcutforest.summarization.Summarizer;
import com.amazon.randomcutforest.testutils.NormalMixtureTestData;
/**
* The following example is based off a test of summarization and provides an
* example use of summarization based on centroidal representation. The
* clustering takes a distance function from (float[],float []) into double as
* input, along with a maximum number of allowed clusters and provides a summary
* which contains the list of cluster centers as "typical points" along with
* relative likelihood.
*
* The specific example below corresponds to 2*d clusters (one each in +ve and
* -ve axis for each of the d dimensions) where d is chosen at random between 3
* and 13. The clusters are designed to almost touch -- but are separable (with
* high probability) and should be discoverable separately. Note that the
* algorithm does not require the knowledge of the true number of clusters (2*d)
* but is run with a maximum allowed number 5*d.
*/
public class RCFSummarizeExample implements Example {
public static void main(String[] args) throws Exception {
new com.amazon.randomcutforest.examples.summarization.RCFSummarizeExample().run();
}
@Override
public String command() {
return "RCF_Summarize_Example";
}
@Override
public String description() {
return "Example of using RCF Summarization";
}
@Override
public void run() throws Exception {
long seed = new Random().nextLong();
Random random = new Random(seed);
int newDimensions = random.nextInt(10) + 3;
int dataSize = 200000;
float[][] points = getData(dataSize, newDimensions, random.nextInt(), Summarizer::L2distance);
SampleSummary summary = Summarizer.l2summarize(points, 5 * newDimensions, 42);
System.out.println(
summary.summaryPoints.length + " clusters for " + newDimensions + " dimensions, seed : " + seed);
double epsilon = 0.01;
System.out.println("Total weight " + summary.weightOfSamples + " rounding to multiples of " + epsilon);
System.out.println();
for (int i = 0; i < summary.summaryPoints.length; i++) {
long t = Math.round(summary.relativeWeight[i] / epsilon);
System.out.print("Cluster " + i + " relative weight " + ((float) t * epsilon) + " center (approx): ");
printArray(summary.summaryPoints[i], epsilon);
System.out.println();
}
}
void printArray(float[] values, double epsilon) {
System.out.print(" [");
if (abs(values[0]) < epsilon) {
System.out.print("0");
} else {
if (epsilon <= 0) {
System.out.print(values[0]);
} else {
long t = (int) Math.round(values[0] / epsilon);
System.out.print((float) t * epsilon);
}
}
for (int i = 1; i < values.length; i++) {
if (abs(values[i]) < epsilon) {
System.out.print(", 0");
} else {
if (epsilon <= 0) {
System.out.print(", " + values[i]);
} else {
long t = Math.round(values[i] / epsilon);
System.out.print(", " + ((float) t * epsilon));
}
}
}
System.out.print("]");
}
public float[][] getData(int dataSize, int newDimensions, int seed, BiFunction<float[], float[], Double> distance) {
double baseMu = 0.0;
double baseSigma = 1.0;
double anomalyMu = 0.0;
double anomalySigma = 1.0;
double transitionToAnomalyProbability = 0.0;
// ignoring anomaly cluster for now
double transitionToBaseProbability = 1.0;
Random prg = new Random(0);
NormalMixtureTestData generator = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,
transitionToAnomalyProbability, transitionToBaseProbability);
double[][] data = generator.generateTestData(dataSize, newDimensions, seed);
float[][] floatData = new float[dataSize][];
float[] allZero = new float[newDimensions];
float[] sigma = new float[newDimensions];
Arrays.fill(sigma, 1f);
double scale = distance.apply(allZero, sigma);
for (int i = 0; i < dataSize; i++) {
// shrink, shift at random
int nextD = prg.nextInt(newDimensions);
for (int j = 0; j < newDimensions; j++) {
data[i][j] *= 1.0 / (3.0);
// standard deviation adds up across dimension; taking square root
// and using s 3 sigma ball
if (j == nextD) {
if (prg.nextDouble() < 0.5)
data[i][j] += 2.0 * scale;
else
data[i][j] -= 2.0 * scale;
}
}
floatData[i] = toFloatArray(data[i]);
}
return floatData;
}
} | 627 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/serialization/ObjectStreamExample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.serialization;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.state.RandomCutForestMapper;
import com.amazon.randomcutforest.state.RandomCutForestState;
import com.amazon.randomcutforest.testutils.NormalMixtureTestData;
public class ObjectStreamExample implements Example {
public static void main(String[] args) throws Exception {
new ObjectStreamExample().run();
}
@Override
public String command() {
return "object_stream";
}
@Override
public String description() {
return "serialize a Random Cut Forest with object stream";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int dimensions = 10;
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_32;
RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)
.numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();
int dataSize = 1000 * sampleSize;
NormalMixtureTestData testData = new NormalMixtureTestData();
for (double[] point : testData.generateTestData(dataSize, dimensions)) {
forest.update(point);
}
// Convert to an array of bytes and print the size
RandomCutForestMapper mapper = new RandomCutForestMapper();
mapper.setSaveExecutorContextEnabled(true);
System.out.printf("dimensions = %d, numberOfTrees = %d, sampleSize = %d, precision = %s%n", dimensions,
numberOfTrees, sampleSize, precision);
byte[] bytes = serialize(mapper.toState(forest));
System.out.printf("Object output stream size = %d bytes%n", bytes.length);
// Restore from object stream and compare anomaly scores produced by the two
// forests
RandomCutForestState state2 = (RandomCutForestState) deserialize(bytes);
RandomCutForest forest2 = mapper.toModel(state2);
int testSize = 100;
double delta = Math.log(sampleSize) / Math.log(2) * 0.05;
int differences = 0;
int anomalies = 0;
for (double[] point : testData.generateTestData(testSize, dimensions)) {
double score = forest.getAnomalyScore(point);
double score2 = forest2.getAnomalyScore(point);
// we mostly care that points that are scored as an anomaly by one forest are
// also scored as an anomaly by the other forest
if (score > 1 || score2 > 1) {
anomalies++;
if (Math.abs(score - score2) > delta) {
differences++;
}
}
forest.update(point);
forest2.update(point);
}
// first validate that this was a nontrivial test
if (anomalies == 0) {
throw new IllegalStateException("test data did not produce any anomalies");
}
// validate that the two forests agree on anomaly scores
if (differences >= 0.01 * testSize) {
throw new IllegalStateException("restored forest does not agree with original forest");
}
System.out.println("Looks good!");
}
private byte[] serialize(Object model) {
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteArrayOutputStream)) {
objectOutputStream.writeObject(model);
objectOutputStream.flush();
return byteArrayOutputStream.toByteArray();
} catch (IOException e) {
throw new RuntimeException("Failed to serialize model.", e.getCause());
}
}
private Object deserialize(byte[] modelBin) {
try (ObjectInputStream objectInputStream = new ObjectInputStream(new ByteArrayInputStream(modelBin))) {
return objectInputStream.readObject();
} catch (IOException | ClassNotFoundException e) {
throw new RuntimeException("Failed to deserialize model.", e.getCause());
}
}
}
| 628 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/serialization/JsonExample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.serialization;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.state.RandomCutForestMapper;
import com.amazon.randomcutforest.state.RandomCutForestState;
import com.amazon.randomcutforest.testutils.NormalMixtureTestData;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* Serialize a Random Cut Forest to JSON using
* <a href="https://github.com/FasterXML/jackson">Jackson</a>.
*/
public class JsonExample implements Example {
public static void main(String[] args) throws Exception {
new JsonExample().run();
}
@Override
public String command() {
return "json";
}
@Override
public String description() {
return "serialize a Random Cut Forest as a JSON string";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int dimensions = 4;
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_64;
RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)
.numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();
int dataSize = 4 * sampleSize;
NormalMixtureTestData testData = new NormalMixtureTestData();
for (double[] point : testData.generateTestData(dataSize, dimensions)) {
forest.update(point);
}
// Convert to JSON and print the number of bytes
RandomCutForestMapper mapper = new RandomCutForestMapper();
mapper.setSaveExecutorContextEnabled(true);
ObjectMapper jsonMapper = new ObjectMapper();
String json = jsonMapper.writeValueAsString(mapper.toState(forest));
System.out.printf("dimensions = %d, numberOfTrees = %d, sampleSize = %d, precision = %s%n", dimensions,
numberOfTrees, sampleSize, precision);
System.out.printf("JSON size = %d bytes%n", json.getBytes().length);
// Restore from JSON and compare anomaly scores produced by the two forests
RandomCutForest forest2 = mapper.toModel(jsonMapper.readValue(json, RandomCutForestState.class));
int testSize = 100;
double delta = Math.log(sampleSize) / Math.log(2) * 0.05;
int differences = 0;
int anomalies = 0;
for (double[] point : testData.generateTestData(testSize, dimensions)) {
double score = forest.getAnomalyScore(point);
double score2 = forest2.getAnomalyScore(point);
// we mostly care that points that are scored as an anomaly by one forest are
// also scored as an anomaly by the other forest
if (score > 1 || score2 > 1) {
anomalies++;
if (Math.abs(score - score2) > delta) {
differences++;
}
}
forest.update(point);
forest2.update(point);
}
// first validate that this was a nontrivial test
if (anomalies == 0) {
throw new IllegalStateException("test data did not produce any anomalies");
}
// validate that the two forests agree on anomaly scores
if (differences >= 0.01 * testSize) {
throw new IllegalStateException("restored forest does not agree with original forest");
}
System.out.println("Looks good!");
}
}
| 629 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/serialization/ProtostuffExampleWithDynamicLambda.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.serialization;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.executor.SamplerPlusTree;
import com.amazon.randomcutforest.sampler.CompactSampler;
import com.amazon.randomcutforest.state.RandomCutForestMapper;
import com.amazon.randomcutforest.state.RandomCutForestState;
import com.amazon.randomcutforest.testutils.NormalMixtureTestData;
import io.protostuff.LinkedBuffer;
import io.protostuff.ProtostuffIOUtil;
import io.protostuff.Schema;
import io.protostuff.runtime.RuntimeSchema;
/**
* Serialize a Random Cut Forest using the
* <a href="https://github.com/protostuff/protostuff">protostuff</a> library.
*/
public class ProtostuffExampleWithDynamicLambda implements Example {
public static void main(String[] args) throws Exception {
new ProtostuffExampleWithDynamicLambda().run();
}
@Override
public String command() {
return "protostuff_dynamic";
}
@Override
public String description() {
return "serialize a Random Cut Forest with the protostuff library";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int dimensions = 4;
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_64;
RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)
.numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();
int dataSize = 4 * sampleSize;
NormalMixtureTestData testData = new NormalMixtureTestData();
for (double[] point : testData.generateTestData(dataSize, dimensions)) {
forest.update(point);
}
// Convert to an array of bytes and print the size
RandomCutForestMapper mapper = new RandomCutForestMapper();
mapper.setSaveExecutorContextEnabled(true);
Schema<RandomCutForestState> schema = RuntimeSchema.getSchema(RandomCutForestState.class);
LinkedBuffer buffer = LinkedBuffer.allocate(512);
byte[] bytes;
try {
RandomCutForestState state = mapper.toState(forest);
bytes = ProtostuffIOUtil.toByteArray(state, schema, buffer);
} finally {
buffer.clear();
}
System.out.printf("dimensions = %d, numberOfTrees = %d, sampleSize = %d, precision = %s%n", dimensions,
numberOfTrees, sampleSize, precision);
System.out.printf("protostuff size = %d bytes%n", bytes.length);
// Restore from protostuff and compare anomaly scores produced by the two
// forests
RandomCutForestState state2 = schema.newMessage();
ProtostuffIOUtil.mergeFrom(bytes, state2, schema);
RandomCutForest forest2 = mapper.toModel(state2);
double saveLambda = forest.getTimeDecay();
forest.setTimeDecay(10 * forest.getTimeDecay());
forest2.setTimeDecay(10 * forest2.getTimeDecay());
for (int i = 0; i < numberOfTrees; i++) {
CompactSampler sampler = (CompactSampler) ((SamplerPlusTree) forest.getComponents().get(i)).getSampler();
CompactSampler sampler2 = (CompactSampler) ((SamplerPlusTree) forest2.getComponents().get(i)).getSampler();
if (sampler.getMaxSequenceIndex() != sampler2.getMaxSequenceIndex()) {
throw new IllegalStateException("Incorrect sampler state");
}
if (sampler.getMostRecentTimeDecayUpdate() != sampler2.getMostRecentTimeDecayUpdate()) {
throw new IllegalStateException("Incorrect sampler state");
}
if (sampler2.getMostRecentTimeDecayUpdate() != dataSize - 1) {
throw new IllegalStateException("Incorrect sampler state");
}
}
int testSize = 100;
double delta = Math.log(sampleSize) / Math.log(2) * 0.05;
int differences = 0;
int anomalies = 0;
for (double[] point : testData.generateTestData(testSize, dimensions)) {
double score = forest.getAnomalyScore(point);
double score2 = forest2.getAnomalyScore(point);
// we mostly care that points that are scored as an anomaly by one forest are
// also scored as an anomaly by the other forest
if (score > 1 || score2 > 1) {
anomalies++;
if (Math.abs(score - score2) > delta) {
differences++;
}
}
forest.update(point);
forest2.update(point);
}
// first validate that this was a nontrivial test
if (anomalies == 0) {
throw new IllegalStateException("test data did not produce any anomalies");
}
// validate that the two forests agree on anomaly scores
if (differences >= 0.01 * testSize) {
throw new IllegalStateException("restored forest does not agree with original forest");
}
System.out.println("Looks good!");
}
}
| 630 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/serialization/ProtostuffExampleWithShingles.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.serialization;
import static java.lang.Math.PI;
import java.util.Random;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.state.RandomCutForestMapper;
import com.amazon.randomcutforest.state.RandomCutForestState;
import io.protostuff.LinkedBuffer;
import io.protostuff.ProtostuffIOUtil;
import io.protostuff.Schema;
import io.protostuff.runtime.RuntimeSchema;
/**
* Serialize a Random Cut Forest using the
* <a href="https://github.com/protostuff/protostuff">protostuff</a> library.
*/
public class ProtostuffExampleWithShingles implements Example {
public static void main(String[] args) throws Exception {
new ProtostuffExampleWithShingles().run();
}
@Override
public String command() {
return "protostuffWithShingles";
}
@Override
public String description() {
return "serialize a Random Cut Forest with the protostuff library for shingled points";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int dimensions = 10;
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_64;
RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)
.numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).shingleSize(dimensions)
.build();
int count = 1;
int dataSize = 1000 * sampleSize;
for (double[] point : generateShingledData(dataSize, dimensions, 0)) {
forest.update(point);
}
// Convert to an array of bytes and print the size
RandomCutForestMapper mapper = new RandomCutForestMapper();
mapper.setSaveExecutorContextEnabled(true);
mapper.setSaveTreeStateEnabled(false);
Schema<RandomCutForestState> schema = RuntimeSchema.getSchema(RandomCutForestState.class);
LinkedBuffer buffer = LinkedBuffer.allocate(512);
byte[] bytes;
try {
RandomCutForestState state = mapper.toState(forest);
bytes = ProtostuffIOUtil.toByteArray(state, schema, buffer);
} finally {
buffer.clear();
}
System.out.printf("dimensions = %d, numberOfTrees = %d, sampleSize = %d, precision = %s%n", dimensions,
numberOfTrees, sampleSize, precision);
System.out.printf("protostuff size = %d bytes%n", bytes.length);
// Restore from protostuff and compare anomaly scores produced by the two
// forests
RandomCutForestState state2 = schema.newMessage();
ProtostuffIOUtil.mergeFrom(bytes, state2, schema);
RandomCutForest forest2 = mapper.toModel(state2);
int testSize = 10000;
double delta = Math.log(sampleSize) / Math.log(2) * 0.05;
int differences = 0;
int anomalies = 0;
for (double[] point : generateShingledData(testSize, dimensions, 2)) {
double score = forest.getAnomalyScore(point);
double score2 = forest2.getAnomalyScore(point);
// we mostly care that points that are scored as an anomaly by one forest are
// also scored as an anomaly by the other forest
if (score > 1 || score2 > 1) {
anomalies++;
if (Math.abs(score - score2) > delta) {
differences++;
}
}
forest.update(point);
forest2.update(point);
}
// validate that the two forests agree on anomaly scores
if (differences >= 0.01 * testSize) {
throw new IllegalStateException("restored forest does not agree with original forest");
}
System.out.println("Looks good!");
}
private double[][] generateShingledData(int size, int dimensions, long seed) {
double[][] answer = new double[size][];
int entryIndex = 0;
boolean filledShingleAtleastOnce = false;
double[] history = new double[dimensions];
int count = 0;
double[] data = getDataD(size + dimensions - 1, 100, 5, seed);
for (int j = 0; j < size + dimensions - 1; ++j) { // we stream here ....
history[entryIndex] = data[j];
entryIndex = (entryIndex + 1) % dimensions;
if (entryIndex == 0) {
filledShingleAtleastOnce = true;
}
if (filledShingleAtleastOnce) {
// System.out.println("Adding " + j);
answer[count++] = getShinglePoint(history, entryIndex, dimensions);
}
}
return answer;
}
private static double[] getShinglePoint(double[] recentPointsSeen, int indexOfOldestPoint, int shingleLength) {
double[] shingledPoint = new double[shingleLength];
int i = 0;
for (int j = 0; j < shingleLength; ++j) {
double point = recentPointsSeen[(j + indexOfOldestPoint) % shingleLength];
shingledPoint[i++] = point;
}
return shingledPoint;
}
double[] getDataD(int num, double amplitude, double noise, long seed) {
double[] data = new double[num];
Random noiseprg = new Random(seed);
for (int i = 0; i < num; i++) {
data[i] = amplitude * Math.cos(2 * PI * (i + 50) / 1000) + noise * noiseprg.nextDouble();
}
return data;
}
}
| 631 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/serialization/ProtostuffExample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.serialization;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.state.RandomCutForestMapper;
import com.amazon.randomcutforest.state.RandomCutForestState;
import com.amazon.randomcutforest.testutils.NormalMixtureTestData;
import io.protostuff.LinkedBuffer;
import io.protostuff.ProtostuffIOUtil;
import io.protostuff.Schema;
import io.protostuff.runtime.RuntimeSchema;
/**
* Serialize a Random Cut Forest using the
* <a href="https://github.com/protostuff/protostuff">protostuff</a> library.
*/
public class ProtostuffExample implements Example {
public static void main(String[] args) throws Exception {
new ProtostuffExample().run();
}
@Override
public String command() {
return "protostuff";
}
@Override
public String description() {
return "serialize a Random Cut Forest with the protostuff library";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int dimensions = 10;
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_32;
RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)
.numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();
int dataSize = 1000 * sampleSize;
NormalMixtureTestData testData = new NormalMixtureTestData();
for (double[] point : testData.generateTestData(dataSize, dimensions)) {
forest.update(point);
}
// Convert to an array of bytes and print the size
RandomCutForestMapper mapper = new RandomCutForestMapper();
mapper.setSaveExecutorContextEnabled(true);
Schema<RandomCutForestState> schema = RuntimeSchema.getSchema(RandomCutForestState.class);
LinkedBuffer buffer = LinkedBuffer.allocate(512);
byte[] bytes;
try {
RandomCutForestState state = mapper.toState(forest);
bytes = ProtostuffIOUtil.toByteArray(state, schema, buffer);
} finally {
buffer.clear();
}
System.out.printf("dimensions = %d, numberOfTrees = %d, sampleSize = %d, precision = %s%n", dimensions,
numberOfTrees, sampleSize, precision);
System.out.printf("protostuff size = %d bytes%n", bytes.length);
// Restore from protostuff and compare anomaly scores produced by the two
// forests
RandomCutForestState state2 = schema.newMessage();
ProtostuffIOUtil.mergeFrom(bytes, state2, schema);
RandomCutForest forest2 = mapper.toModel(state2);
int testSize = 100;
double delta = Math.log(sampleSize) / Math.log(2) * 0.05;
int differences = 0;
int anomalies = 0;
for (double[] point : testData.generateTestData(testSize, dimensions)) {
double score = forest.getAnomalyScore(point);
double score2 = forest2.getAnomalyScore(point);
// we mostly care that points that are scored as an anomaly by one forest are
// also scored as an anomaly by the other forest
if (score > 1 || score2 > 1) {
anomalies++;
if (Math.abs(score - score2) > delta) {
differences++;
}
}
forest.update(point);
forest2.update(point);
}
// first validate that this was a nontrivial test
if (anomalies == 0) {
throw new IllegalStateException("test data did not produce any anomalies");
}
// validate that the two forests agree on anomaly scores
if (differences >= 0.01 * testSize) {
throw new IllegalStateException("restored forest does not agree with original forest");
}
System.out.println("Looks good!");
}
}
| 632 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ThresholdedInternalShinglingExample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.parkservices;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.Arrays;
import java.util.Random;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
public class ThresholdedInternalShinglingExample implements Example {
public static void main(String[] args) throws Exception {
new ThresholdedInternalShinglingExample().run();
}
@Override
public String command() {
return "Thresholded_Multi_Dim_example";
}
@Override
public String description() {
return "Thresholded Multi Dimensional Example";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int shingleSize = 4;
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_32;
int dataSize = 4 * sampleSize;
// change this to try different number of attributes,
// this parameter is not expected to be larger than 5 for this example
int baseDimensions = 1;
long count = 0;
int dimensions = baseDimensions * shingleSize;
TransformMethod transformMethod = TransformMethod.NORMALIZE_DIFFERENCE;
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.internalShinglingEnabled(true).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STANDARD)
.weightTime(0).transformMethod(transformMethod).normalizeTime(true).outputAfter(32)
.initialAcceptFraction(0.125).build();
ThresholdedRandomCutForest second = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.internalShinglingEnabled(true).precision(precision).anomalyRate(0.01)
.forestMode(ForestMode.TIME_AUGMENTED).weightTime(0).transformMethod(transformMethod)
.normalizeTime(true).outputAfter(32).initialAcceptFraction(0.125).build();
// ensuring that the parameters are the same; otherwise the grades/scores cannot
// be the same
// weighTime has to be 0
forest.setLowerThreshold(1.1);
second.setLowerThreshold(1.1);
forest.setHorizon(0.75);
second.setHorizon(0.75);
long seed = new Random().nextLong();
Random noise = new Random(0);
System.out.println("seed = " + seed);
// change the last argument seed for a different run
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,
100, 5, seed, baseDimensions);
int keyCounter = 0;
for (double[] point : dataWithKeys.data) {
// idea is that we expect the arrival order to be roughly 100 apart (say
// seconds)
// then the noise corresponds to a jitter; one can try TIME_AUGMENTED and
// .normalizeTime(true)
long timestamp = 100 * count + noise.nextInt(10) - 5;
AnomalyDescriptor result = forest.process(point, timestamp);
AnomalyDescriptor test = second.process(point, timestamp);
checkArgument(Math.abs(result.getRCFScore() - test.getRCFScore()) < 1e-10, " error");
checkArgument(Math.abs(result.getAnomalyGrade() - test.getAnomalyGrade()) < 1e-10, " error");
if (keyCounter < dataWithKeys.changeIndices.length && count == dataWithKeys.changeIndices[keyCounter]) {
System.out
.println("timestamp " + count + " CHANGE " + Arrays.toString(dataWithKeys.changes[keyCounter]));
++keyCounter;
}
if (result.getAnomalyGrade() != 0) {
System.out.print("timestamp " + count + " RESULT value " + result.getInternalTimeStamp() + " ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getCurrentInput()[i] + ", ");
}
System.out.print("score " + result.getRCFScore() + ", grade " + result.getAnomalyGrade() + ", ");
if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {
System.out.print(-result.getRelativeIndex() + " steps ago, ");
}
if (result.isExpectedValuesPresent()) {
if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {
System.out.print("instead of ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getPastValues()[i] + ", ");
}
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (result.getPastValues()[i] != result.getExpectedValuesList()[0][i]) {
System.out.print("( "
+ (result.getPastValues()[i] - result.getExpectedValuesList()[0][i]) + " ) ");
}
}
} else {
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (result.getCurrentInput()[i] != result.getExpectedValuesList()[0][i]) {
System.out.print("( "
+ (result.getCurrentInput()[i] - result.getExpectedValuesList()[0][i]) + " ) ");
}
}
}
} else {
System.out.print("insufficient data to provide expected values");
}
System.out.println();
}
++count;
}
}
}
| 633 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ScoringStrategyExample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.parkservices;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.Arrays;
import java.util.Random;
import com.amazon.randomcutforest.config.ScoringStrategy;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
public class ScoringStrategyExample implements Example {
public static void main(String[] args) throws Exception {
new ScoringStrategyExample().run();
}
@Override
public String command() {
return "Scoring_strategy_example";
}
@Override
public String description() {
return "Scoring Strategy Example";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int shingleSize = 4;
int numberOfTrees = 50;
int sampleSize = 256;
int dataSize = 4 * sampleSize;
// change this to try different number of attributes,
// this parameter is not expected to be larger than 5 for this example
int baseDimensions = 1;
long seed = new Random().nextLong();
long count = 0;
int dimensions = baseDimensions * shingleSize;
TransformMethod transformMethod = TransformMethod.NORMALIZE;
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.randomSeed(seed).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.internalShinglingEnabled(true).scoringStrategy(ScoringStrategy.EXPECTED_INVERSE_DEPTH)
.transformMethod(transformMethod).outputAfter(32).initialAcceptFraction(0.125).build();
ThresholdedRandomCutForest second = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.randomSeed(seed).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.internalShinglingEnabled(true).scoringStrategy(ScoringStrategy.MULTI_MODE)
.transformMethod(transformMethod).outputAfter(32).initialAcceptFraction(0.125).build();
ThresholdedRandomCutForest third = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.randomSeed(seed).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.internalShinglingEnabled(true).scoringStrategy(ScoringStrategy.MULTI_MODE_RECALL)
.transformMethod(transformMethod).outputAfter(32).initialAcceptFraction(0.125).build();
System.out.println("seed = " + seed);
// change the last argument seed for a different run
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,
100, 5, seed, baseDimensions);
int keyCounter = 0;
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor result = forest.process(point, 0L);
AnomalyDescriptor multi_mode = second.process(point, 0L);
AnomalyDescriptor multi_mode_recall = third.process(point, 0L);
checkArgument(Math.abs(result.getRCFScore() - multi_mode.getRCFScore()) < 1e-10, " error");
checkArgument(Math.abs(result.getRCFScore() - multi_mode_recall.getRCFScore()) < 1e-10, " error");
if (keyCounter < dataWithKeys.changeIndices.length && count == dataWithKeys.changeIndices[keyCounter]) {
System.out
.println("timestamp " + count + " CHANGE " + Arrays.toString(dataWithKeys.changes[keyCounter]));
++keyCounter;
}
printResult("MULTI_MODE_RECALL", multi_mode_recall, count, baseDimensions);
printResult("EXPECTED_INVERSE_DEPTH", result, count, baseDimensions);
printResult("MULTI_MODE", multi_mode, count, baseDimensions);
++count;
}
}
void printResult(String description, AnomalyDescriptor result, long count, int baseDimensions) {
if (result.getAnomalyGrade() != 0) {
System.out.print(description + " timestamp " + count + " RESULT value ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getCurrentInput()[i] + ", ");
}
System.out.print("score " + result.getRCFScore() + ", grade " + result.getAnomalyGrade() + ", ");
if (result.getRelativeIndex() != 0) {
System.out.print(-result.getRelativeIndex() + " steps ago, ");
}
if (result.isExpectedValuesPresent()) {
if (result.getRelativeIndex() != 0) {
System.out.print("instead of ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getPastValues()[i] + ", ");
}
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (result.getPastValues()[i] != result.getExpectedValuesList()[0][i]) {
System.out.print(
"( " + (result.getPastValues()[i] - result.getExpectedValuesList()[0][i]) + " ) ");
}
}
} else {
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (result.getCurrentInput()[i] != result.getExpectedValuesList()[0][i]) {
System.out.print("( " + (result.getCurrentInput()[i] - result.getExpectedValuesList()[0][i])
+ " ) ");
}
}
}
} else {
System.out.print("insufficient data to provide expected values");
}
System.out.println();
}
}
}
| 634 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/SequentialAnomalyExample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.parkservices;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.SequentialAnalysis;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
public class SequentialAnomalyExample implements Example {
public static void main(String[] args) throws Exception {
new SequentialAnomalyExample().run();
}
@Override
public String command() {
return "Sequential_analysis_example";
}
@Override
public String description() {
return "Sequential Analysis Example";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int shingleSize = 8;
int numberOfTrees = 50;
int sampleSize = 256;
int dataSize = 4 * sampleSize;
// change this to try different number of attributes,
// this parameter is not expected to be larger than 5 for this example
int baseDimensions = 2;
long seed = new Random().nextLong();
System.out.println("seed = " + seed);
// change the last argument seed for a different run
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,
100, 5, seed, baseDimensions);
double timeDecay = 1.0 / (10 * sampleSize);
List<AnomalyDescriptor> anomalies = SequentialAnalysis.detectAnomalies(dataWithKeys.data, shingleSize,
sampleSize, timeDecay, TransformMethod.NONE, seed);
int keyCounter = 0;
for (AnomalyDescriptor result : anomalies) {
// first print the changes
while (keyCounter < dataWithKeys.changeIndices.length
&& dataWithKeys.changeIndices[keyCounter] <= result.getInternalTimeStamp()) {
System.out.println("timestamp " + dataWithKeys.changeIndices[keyCounter] + " CHANGE "
+ Arrays.toString(dataWithKeys.changes[keyCounter]));
++keyCounter;
}
if (result.getAnomalyGrade() != 0) {
System.out.print("timestamp " + result.getInternalTimeStamp() + " RESULT value ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getCurrentInput()[i] + ", ");
}
System.out.print("score " + result.getRCFScore() + ", grade " + result.getAnomalyGrade() + ", ");
if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {
System.out.print(-result.getRelativeIndex() + " step(s) ago, ");
}
if (result.isExpectedValuesPresent()) {
if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {
System.out.print("instead of ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getPastValues()[i] + ", ");
}
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (result.getPastValues()[i] != result.getExpectedValuesList()[0][i]) {
System.out.print("( "
+ (result.getPastValues()[i] - result.getExpectedValuesList()[0][i]) + " ) ");
}
}
} else {
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (result.getCurrentInput()[i] != result.getExpectedValuesList()[0][i]) {
System.out.print("( "
+ (result.getCurrentInput()[i] - result.getExpectedValuesList()[0][i]) + " ) ");
}
}
}
} else {
System.out.print("insufficient data to provide expected values");
}
System.out.println();
}
}
}
}
| 635 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ThresholdedTime.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.parkservices;
import java.util.Random;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.NormalMixtureTestData;
public class ThresholdedTime implements Example {
public static void main(String[] args) throws Exception {
new ThresholdedTime().run();
}
@Override
public String command() {
return "Thresholded_Time_example";
}
@Override
public String description() {
return "Thresholded Time Example";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int shingleSize = 4;
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_32;
int dataSize = 4 * sampleSize;
// change this to try different number of attributes,
// this parameter is not expected to be larger than 5 for this example
int baseDimensions = 1;
int count = 0;
int dimensions = baseDimensions * shingleSize;
ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)
.sampleSize(sampleSize).internalShinglingEnabled(true).precision(precision).anomalyRate(0.01)
.forestMode(ForestMode.TIME_AUGMENTED).normalizeTime(true).build();
long seed = new Random().nextLong();
double[] data = new double[] { 1.0 };
System.out.println("seed = " + seed);
NormalMixtureTestData normalMixtureTestData = new NormalMixtureTestData(10, 50);
MultiDimDataWithKey dataWithKeys = normalMixtureTestData.generateTestDataWithKey(dataSize, 1, 0);
/**
* the anomalies will move from normal -> anomalous -> normal starts from normal
*/
boolean anomalyState = false;
int keyCounter = 0;
for (double[] point : dataWithKeys.data) {
long time = (long) (1000L * count + Math.floor(10 * point[0]));
AnomalyDescriptor result = forest.process(data, time);
if (keyCounter < dataWithKeys.changeIndices.length && count == dataWithKeys.changeIndices[keyCounter]) {
System.out.print("Sequence " + count + " stamp " + (result.getInternalTimeStamp()) + " CHANGE ");
if (!anomalyState) {
System.out.println(" to Distribution 1 ");
} else {
System.out.println(" to Distribution 0 ");
}
anomalyState = !anomalyState;
++keyCounter;
}
if (result.getAnomalyGrade() != 0) {
System.out.print("Sequence " + count + " stamp " + (result.getInternalTimeStamp()) + " RESULT ");
System.out.print("score " + result.getRCFScore() + ", grade " + result.getAnomalyGrade() + ", ");
if (result.isExpectedValuesPresent()) {
if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {
System.out.print(-result.getRelativeIndex() + " steps ago, instead of stamp "
+ result.getPastTimeStamp());
System.out.print(", expected timestamp " + result.getExpectedTimeStamp() + " ( "
+ (result.getPastTimeStamp() - result.getExpectedTimeStamp() + ")"));
} else {
System.out.print("expected " + result.getExpectedTimeStamp() + " ( "
+ (result.getInternalTimeStamp() - result.getExpectedTimeStamp() + ")"));
}
}
System.out.println();
}
++count;
}
}
}
| 636 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ThresholdedRCFJsonExample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.parkservices;
import java.util.Random;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.amazon.randomcutforest.parkservices.state.ThresholdedRandomCutForestMapper;
import com.amazon.randomcutforest.parkservices.state.ThresholdedRandomCutForestState;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* Serialize a Random Cut Forest to JSON using
* <a href="https://github.com/FasterXML/jackson">Jackson</a>.
*/
public class ThresholdedRCFJsonExample implements Example {
public static void main(String[] args) throws Exception {
new ThresholdedRCFJsonExample().run();
}
@Override
public String command() {
return "json";
}
@Override
public String description() {
return "serialize a Thresholded Random Cut Forest as a JSON string";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int baseDimension = 2;
int shingleSize = 8;
int numberOfTrees = 50;
int sampleSize = 256;
long seed = new Random().nextLong();
System.out.println("seed :" + seed);
Random rng = new Random(seed);
int dimensions = baseDimension * shingleSize;
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().dimensions(dimensions)
.shingleSize(shingleSize).transformMethod(TransformMethod.NORMALIZE).numberOfTrees(numberOfTrees)
.sampleSize(sampleSize).build();
int dataSize = 4 * sampleSize;
int testSize = sampleSize;
double[][] data = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50, 100, 5,
rng.nextLong(), baseDimension, 5.0, false).data;
for (int i = 0; i < data.length - testSize; i++) {
forest.process(data[i], 0L);
}
// Convert to JSON and print the number of bytes
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
ObjectMapper jsonMapper = new ObjectMapper();
String json = jsonMapper.writeValueAsString(mapper.toState(forest));
System.out.printf("JSON size = %d bytes%n", json.getBytes().length);
// Restore from JSON and compare anomaly scores produced by the two forests
ThresholdedRandomCutForest forest2 = mapper
.toModel(jsonMapper.readValue(json, ThresholdedRandomCutForestState.class));
for (int i = data.length; i < data.length; i++) {
AnomalyDescriptor result = forest.process(data[i], 0L);
AnomalyDescriptor shadow = forest2.process(data[i], 0L);
assert (Math.abs(result.getRCFScore() - shadow.getRCFScore()) < 1e-6);
}
System.out.println("Looks good!");
}
}
| 637 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/StringGLADexample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.parkservices;
import static java.lang.Math.min;
import java.util.List;
import java.util.Random;
import java.util.function.BiFunction;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.parkservices.GlobalLocalAnomalyDetector;
import com.amazon.randomcutforest.parkservices.returntypes.GenericAnomalyDescriptor;
import com.amazon.randomcutforest.summarization.ICluster;
import com.amazon.randomcutforest.util.Weighted;
/**
* A clustering based anomaly detection for strings for two characters using
* edit distance. Note that the algorithm does not have any inbuilt test for
* verifying if the distance is indeed a metric (other than checking for
* non-negative values.
*/
public class StringGLADexample implements Example {
public static void main(String[] args) throws Exception {
new StringGLADexample().run();
}
@Override
public String command() {
return "Clustering based Global-Local Anomaly Detection Example for strings";
}
@Override
public String description() {
return "Clustering based Global-Local Anomaly Detection Example for strings";
}
@Override
public void run() throws Exception {
long seed = new Random().nextLong();
System.out.println("seed : " + seed);
Random random = new Random(seed);
int stringSize = 70;
int numberOfStrings = 200000;
int reservoirSize = 2000;
boolean changeInMiddle = true;
// the following should be away from 0.5 in [0.5,1]
double gapProbOfA = 0.85;
double anomalyRate = 0.05;
char[][] points = new char[numberOfStrings][];
boolean[] injected = new boolean[numberOfStrings];
boolean printClusters = true;
boolean printFalseNeg = false;
boolean printFalsePos = false;
int numberOfInjected = 0;
for (int i = 0; i < numberOfStrings; i++) {
if (random.nextDouble() < anomalyRate && i > reservoirSize / 2) {
injected[i] = true;
++numberOfInjected;
points[i] = getABArray(stringSize + 10, 0.5, random, false, 0);
} else {
boolean flag = changeInMiddle && random.nextDouble() < 0.25;
double prob = (random.nextDouble() < 0.5) ? gapProbOfA : (1 - gapProbOfA);
points[i] = getABArray(stringSize, prob, random, flag, 0.25 * i / numberOfStrings);
}
}
System.out.println("Injected " + numberOfInjected + " 'anomalies' in " + points.length);
int recluster = reservoirSize / 2;
BiFunction<char[], char[], Double> dist = (a, b) -> toyD(a, b, stringSize / 2.0);
GlobalLocalAnomalyDetector<char[]> reservoir = GlobalLocalAnomalyDetector.builder().randomSeed(42)
.numberOfRepresentatives(5).timeDecay(1.0 / reservoirSize).capacity(reservoirSize).build();
reservoir.setGlobalDistance(dist);
// for non-geometric bounded distances, such as for strings, keep the factor at
// 3.0 or below
// minimum is 2.5, set as default; uncomment to change
// reservoir.setZfactor(DEFAULT_Z_FACTOR);
int truePos = 0;
int falsePos = 0;
int falseNeg = 0;
for (int y = 0; y < points.length; y++) {
GenericAnomalyDescriptor<char[]> result = reservoir.process(points[y], 1.0f, null, true);
if (result.getAnomalyGrade() > 0) {
if (!injected[y]) {
++falsePos;
List<Weighted<char[]>> list = result.getRepresentativeList();
if (printFalsePos) {
System.out.println(result.getScore() + " " + injected[y] + " at " + y + " dist "
+ dist.apply(points[y], list.get(0).index) + " " + result.getThreshold());
printCharArray(list.get(0).index);
System.out.println();
printCharArray(points[y]);
System.out.println();
}
} else {
++truePos;
}
} else if (injected[y]) {
++falseNeg;
if (printFalseNeg) {
System.out.println(" missed " + result.getScore() + " " + result.getThreshold());
}
}
if (printClusters && y % 10000 == 0 && y > 0) {
System.out.println(" at " + y);
printClusters(reservoir.getClusters());
}
if (10 * y % points.length == 0 && y > 0) {
System.out.println(" at " + y);
System.out.println("Precision = " + precision(truePos, falsePos));
System.out.println("Recall = " + recall(truePos, falseNeg));
}
}
System.out.println(" Final: ");
System.out.println("Precision = " + precision(truePos, falsePos));
System.out.println("Recall = " + recall(truePos, falseNeg));
}
public static double toyD(char[] a, char[] b, double u) {
if (a.length > b.length) {
return toyD(b, a, u);
}
double[][] dist = new double[2][b.length + 1];
for (int j = 0; j < b.length + 1; j++) {
dist[0][j] = j;
}
for (int i = 1; i < a.length + 1; i++) {
dist[1][0] = i;
for (int j = 1; j < b.length + 1; j++) {
double t = dist[0][j - 1] + ((a[i - 1] == b[j - 1]) ? 0 : 1);
dist[1][j] = min(min(t, dist[0][j] + 1), dist[1][j - 1] + 1);
}
for (int j = 0; j < b.length + 1; j++) {
dist[0][j] = dist[1][j];
}
}
return dist[1][b.length];
}
// colors
public static final String ANSI_RESET = "\u001B[0m";
public static final String ANSI_RED = "\u001B[31m";
public static final String ANSI_BLUE = "\u001B[34m";
public static void printCharArray(char[] a) {
for (int i = 0; i < a.length; i++) {
if (a[i] == '-') {
System.out.print(ANSI_RED + a[i] + ANSI_RESET);
} else {
System.out.print(ANSI_BLUE + a[i] + ANSI_RESET);
}
}
}
public void printClusters(List<ICluster<char[]>> summary) {
for (int i = 0; i < summary.size(); i++) {
double weight = summary.get(i).getWeight();
System.out.println("Cluster " + i + " representatives, weight "
+ ((float) Math.round(1000 * weight) * 0.001) + " avg radius " + summary.get(i).averageRadius());
List<Weighted<char[]>> representatives = summary.get(i).getRepresentatives();
for (int j = 0; j < representatives.size(); j++) {
double t = representatives.get(j).weight;
t = Math.round(1000.0 * t / weight) * 0.001;
System.out
.print("relative weight " + (float) t + " length " + representatives.get(j).index.length + " ");
printCharArray(representatives.get(j).index);
System.out.println();
}
System.out.println();
}
}
public char[] getABArray(int size, double probabilityOfA, Random random, Boolean changeInMiddle, double fraction) {
int newSize = size + random.nextInt(size / 5);
char[] a = new char[newSize];
for (int i = 0; i < newSize; i++) {
double toss = (changeInMiddle && (i > (1 - fraction) * newSize || i < newSize * fraction))
? (1 - probabilityOfA)
: probabilityOfA;
if (random.nextDouble() < toss) {
a[i] = '-';
} else {
a[i] = '_';
}
}
return a;
}
double precision(int truePos, int falsePos) {
return (truePos + falsePos > 0) ? 1.0 * truePos / (truePos + falsePos) : 1.0;
}
double recall(int truePos, int falseNeg) {
return (truePos + falseNeg > 0) ? 1.0 * truePos / (truePos + falseNeg) : 1.0;
}
}
| 638 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/Thresholded1DGaussianMix.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.parkservices;
import java.util.Random;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.NormalMixtureTestData;
public class Thresholded1DGaussianMix implements Example {
public static void main(String[] args) throws Exception {
new Thresholded1DGaussianMix().run();
}
@Override
public String command() {
return "Thresholded_1D_Gaussian_example";
}
@Override
public String description() {
return "Thresholded one dimensional gassian mixture Example";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int shingleSize = 4;
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_32;
int dataSize = 4 * sampleSize;
// change this to try different number of attributes,
// this parameter is not expected to be larger than 5 for this example
int baseDimensions = 1;
int count = 0;
int dimensions = baseDimensions * shingleSize;
ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)
.sampleSize(sampleSize).precision(precision).anomalyRate(0.01).forestMode(ForestMode.TIME_AUGMENTED)
.build();
long seed = new Random().nextLong();
System.out.println("Anomalies would correspond to a run, based on a change of state.");
System.out.println("Each change is normal <-> anomaly; so after the second change the data is normal");
System.out.println("seed = " + seed);
NormalMixtureTestData normalMixtureTestData = new NormalMixtureTestData(10, 1.0, 50, 2.0, 0.01, 0.1);
MultiDimDataWithKey dataWithKeys = normalMixtureTestData.generateTestDataWithKey(dataSize, 1, 0);
int keyCounter = 0;
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor result = forest.process(point, count);
if (keyCounter < dataWithKeys.changeIndices.length
&& result.getInternalTimeStamp() == dataWithKeys.changeIndices[keyCounter]) {
System.out.println("timestamp " + (result.getInputTimestamp()) + " CHANGE");
++keyCounter;
}
if (keyCounter < dataWithKeys.changeIndices.length && count == dataWithKeys.changeIndices[keyCounter]) {
System.out.println("timestamp " + (count) + " CHANGE ");
++keyCounter;
}
if (result.getAnomalyGrade() != 0) {
System.out.print("timestamp " + (count) + " RESULT value ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getCurrentInput()[i] + ", ");
}
System.out.print("score " + result.getRCFScore() + ", grade " + result.getAnomalyGrade() + ", ");
if (result.isExpectedValuesPresent()) {
if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {
System.out.print(-result.getRelativeIndex() + " steps ago, instead of ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getPastValues()[i] + ", ");
}
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (result.getPastValues()[i] != result.getExpectedValuesList()[0][i]) {
System.out.print("( "
+ (result.getPastValues()[i] - result.getExpectedValuesList()[0][i]) + " ) ");
}
}
} else {
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (result.getCurrentInput()[i] != result.getExpectedValuesList()[0][i]) {
System.out.print("( "
+ (result.getCurrentInput()[i] - result.getExpectedValuesList()[0][i]) + " ) ");
}
}
}
}
System.out.println();
}
++count;
}
}
}
| 639 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ThresholdedForecast.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.parkservices;
import static java.lang.Math.min;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.amazon.randomcutforest.parkservices.returntypes.TimedRangeVector;
import com.amazon.randomcutforest.returntypes.RangeVector;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
public class ThresholdedForecast implements Example {
public static void main(String[] args) throws Exception {
new com.amazon.randomcutforest.examples.parkservices.ThresholdedForecast().run();
}
@Override
public String command() {
return "Thresholded_Forecast_example";
}
@Override
public String description() {
return "Example of Forecast using Thresholded RCF";
}
@Override
public void run() throws Exception {
int sampleSize = 256;
int baseDimensions = 1;
long seed = 100L;
int length = 4 * sampleSize;
int outputAfter = 128;
// as the ratio of amplitude (signal) to noise is changed, the estimation range
// in forecast
// (or any other inference) should increase
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 10, seed,
baseDimensions, true);
System.out.println(dataWithKeys.changes.length + " anomalies injected ");
// horizon/lookahead can be larger than shingleSize for transformations that do
// not
// involve differencing -- but longer horizon would have larger error
int horizon = 60;
int shingleSize = 30;
// if the useSlope is set as true then it is recommended to use NORMALIZE or
// SUBTRACT_MA as
// transformation methods to adjust to the linear drift
ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(baseDimensions * shingleSize).precision(Precision.FLOAT_32).randomSeed(seed)
.internalShinglingEnabled(true).shingleSize(shingleSize).outputAfter(outputAfter)
.transformMethod(TransformMethod.NORMALIZE).build();
if (forest.getTransformMethod() == TransformMethod.NORMALIZE_DIFFERENCE
|| forest.getTransformMethod() == TransformMethod.DIFFERENCE) {
// single step differencing will not produce stable forecasts over long horizons
horizon = min(horizon, shingleSize / 2 + 1);
}
double[] error = new double[horizon];
double[] lowerError = new double[horizon];
double[] upperError = new double[horizon];
for (int j = 0; j < dataWithKeys.data.length; j++) {
// forecast first; change centrality to achieve a control over the sampling
// setting centrality = 0 would correspond to random sampling from the leaves
// reached by
// impute visitor
// the following prints
// <sequenceNo> <predicted_next_value> <likely_upper_bound> <likely_lower_bound>
// where the sequence number varies between next-to-be-read .. (next + horizon
// -1 )
//
// Every new element corresponds to a new set of horizon forecasts; we measure
// the
// errors keeping the leadtime fixed.
//
// verify that forecast is done before seeing the actual value (in the process()
// function)
//
TimedRangeVector extrapolate = forest.extrapolate(horizon, true, 1.0);
RangeVector forecast = extrapolate.rangeVector;
for (int i = 0; i < horizon; i++) {
System.out.println(
(j + i) + " " + forecast.values[i] + " " + forecast.upper[i] + " " + forecast.lower[i]);
// compute errors
if (j > outputAfter + shingleSize - 1 && j + i < dataWithKeys.data.length) {
double t = dataWithKeys.data[j + i][0] - forecast.values[i];
error[i] += t * t;
t = dataWithKeys.data[j + i][0] - forecast.lower[i];
lowerError[i] += t * t;
t = dataWithKeys.data[j + i][0] - forecast.upper[i];
upperError[i] += t * t;
}
}
System.out.println();
System.out.println();
forest.process(dataWithKeys.data[j], j);
}
System.out.println(forest.getTransformMethod().name() + " RMSE (as horizon increases) ");
for (int i = 0; i < horizon; i++) {
double t = error[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);
System.out.print(Math.sqrt(t) + " ");
}
System.out.println();
System.out.println("RMSE Lower (as horizon increases)");
for (int i = 0; i < horizon; i++) {
double t = lowerError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);
System.out.print(Math.sqrt(t) + " ");
}
System.out.println();
System.out.println("RMSE Upper (as horizon increases)");
for (int i = 0; i < horizon; i++) {
double t = upperError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);
System.out.print(Math.sqrt(t) + " ");
}
System.out.println();
}
} | 640 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ThresholdedImpute.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.parkservices;
import java.util.Arrays;
import java.util.Random;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.ImputationMethod;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
public class ThresholdedImpute implements Example {
public static void main(String[] args) throws Exception {
new ThresholdedImpute().run();
}
@Override
public String command() {
return "Thresholded_Imputation_example";
}
@Override
public String description() {
return "Thresholded Imputation Example";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int shingleSize = 4;
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_32;
int dataSize = 4 * sampleSize;
int baseDimensions = 1;
long count = 0;
int dropped = 0;
int dimensions = baseDimensions * shingleSize;
ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)
.sampleSize(sampleSize).precision(precision).anomalyRate(0.01).imputationMethod(ImputationMethod.RCF)
.forestMode(ForestMode.STREAMING_IMPUTE).transformMethod(TransformMethod.NORMALIZE_DIFFERENCE)
.autoAdjust(true).build();
long seed = new Random().nextLong();
Random noisePRG = new Random(0);
System.out.println("seed = " + seed);
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,
100, 5, seed, baseDimensions);
// as we loop over the data we will be dropping observations with probability
// 0.2
// note that as a result the predictor correct method would like be more
// error-prone
// note that estimation of the number of entries to be imputed is also another
// estimation
// therefore the overall method may have runaway effects if more values are
// dropped.
int keyCounter = 0;
for (double[] point : dataWithKeys.data) {
if (noisePRG.nextDouble() < 0.2 && !((keyCounter < dataWithKeys.changeIndices.length
&& count == dataWithKeys.changeIndices[keyCounter]))) {
dropped++;
if (keyCounter < dataWithKeys.changeIndices.length && count == dataWithKeys.changeIndices[keyCounter]) {
System.out.println(" dropped sequence " + (count) + " INPUT " + Arrays.toString(point) + " CHANGE "
+ Arrays.toString(dataWithKeys.changes[keyCounter]));
}
} else {
long newStamp = 100 * count + 2 * noisePRG.nextInt(10) - 5;
AnomalyDescriptor result = forest.process(point, newStamp);
if (keyCounter < dataWithKeys.changeIndices.length && count == dataWithKeys.changeIndices[keyCounter]) {
System.out.println("sequence " + (count) + " INPUT " + Arrays.toString(point) + " CHANGE "
+ Arrays.toString(dataWithKeys.changes[keyCounter]));
++keyCounter;
}
if (result.getAnomalyGrade() != 0) {
System.out.print("sequence " + (count) + " RESULT value ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getCurrentInput()[i] + ", ");
}
System.out.print("score " + result.getRCFScore() + ", grade " + result.getAnomalyGrade() + ", ");
if (result.isExpectedValuesPresent()) {
if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {
System.out.print(-result.getRelativeIndex() + " steps ago, instead of ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getPastValues()[i] + ", ");
}
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (result.getPastValues()[i] != result.getExpectedValuesList()[0][i]) {
System.out.print(
"( " + (result.getPastValues()[i] - result.getExpectedValuesList()[0][i])
+ " ) ");
}
}
} else {
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (result.getCurrentInput()[i] != result.getExpectedValuesList()[0][i]) {
System.out.print(
"( " + (result.getCurrentInput()[i] - result.getExpectedValuesList()[0][i])
+ " ) ");
}
}
}
}
System.out.println();
}
}
++count;
}
System.out.println("Dropped " + dropped + " out of " + count);
}
}
| 641 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/NumericGLADexample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.parkservices;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.toDoubleArray;
import static com.amazon.randomcutforest.CommonUtils.toFloatArray;
import static com.amazon.randomcutforest.testutils.ExampleDataSets.rotateClockWise;
import static java.lang.Math.PI;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.util.Random;
import com.amazon.randomcutforest.config.ScoringStrategy;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.GlobalLocalAnomalyDetector;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.amazon.randomcutforest.parkservices.returntypes.GenericAnomalyDescriptor;
import com.amazon.randomcutforest.summarization.Summarizer;
import com.amazon.randomcutforest.testutils.NormalMixtureTestData;
/**
* The following example demonstrates clustering based anomaly detection for
* numeric vectors. The clustering can use an arbitrary distance metric (but it
* has no mechanism to verify if the function provided is a metric beyond
* checking that distances are non-negative; improper implementations of
* distances can produce uninterpretable results). The clustering corresponds to
* clustering a recency biased sample of points (using the exact same as RCF)
* and clustering using multi-centroid method (CURE algorithm).
*
* There is a natural question that given that this is the RCF library, how does
* this clustering based algorithm perform vis-a-vis RCF. First, RCF is
* preferred/natural for shingled/sequenced data, e.g., in analysis of time
* series. Simple clustering of shingles do not seem to provide similar benefit.
* In fact, even for shinglesize 1, which correponds to time dependent
* population analysis, the recursive decomposition provided by RCF can provide
* a richer detail (even though RCF naturally considers the L1/Manhattan
* metric). That recursive decomposition can be viewed as a (randomized) partion
* based clustering. That distance function is used to compute the DensityOutput
* in RCF. Multilevel clustering is known to be more useful than simple
* clustering in many applications. Here we show such an application which
*
* (i) shows an example use of GlobalLocalAnomalyDetector (GLAD) for dynamic
* data as well as
*
* (ii) a comparable use using a new ForestMode.DISTANCE exposed for RCF.
*
* RCF seems to perform better for this simple two dimensional dynamic case. At
* the same time, the new clusering based algorithm works for generic types with
* just a distance function. In applications where distances are meaningful and
* key, such geo etc., user-defined distance based anomalies can be extremely
* beneficial. If the data can be mapped to explicit vectors then perhaps RCF
* and its multi-level partitioning can provide more useful insights.
*
* Try the following in a visualizer. For example in vanilla gnuplot try
*
* set terminal gif transparent animate delay 5
*
* set size square
*
* set output "test.gif"
*
* do for [i = 0:359] { plot [-15:15][-15:15] "clustering_example" i i u 1:2:3 w
* p palette pt 7 t "" }
*
*
* Try the above/equivalent for setting printFlaggedGLAD = true (setting
* printFlaggedRCF = false), or to see the data, printData = true. Try changing
* the number of blades in the fan, the zFactor setting etc.
*/
public class NumericGLADexample implements Example {
public static void main(String[] args) throws Exception {
new NumericGLADexample().run();
}
@Override
public String command() {
return "An example of Global-Local Anomaly Detector on numeric vectors";
}
@Override
public String description() {
return "An example of Global-Local Anomaly Detector on numeric vectors";
}
@Override
public void run() throws Exception {
long randomSeed = new Random().nextLong();
System.out.println("Seed " + randomSeed);
// we would be sending dataSize * 360 vectors
int dataSize = 2000;
double range = 10.0;
int numberOfFans = 3;
// corresponds to number of clusters
double[][] data = shiftedEllipse(dataSize, 7, range / 2, numberOfFans);
int truePos = 0;
int falsePos = 0;
int falseNeg = 0;
int truePosRCF = 0;
int falsePosRCF = 0;
int falseNegRCF = 0;
int reservoirSize = dataSize;
// this ensures that the points are flushed out (albeit randomly) duting the
// rotation
double timedecay = 1.0 / reservoirSize;
GlobalLocalAnomalyDetector<float[]> reservoir = GlobalLocalAnomalyDetector.builder().randomSeed(42)
.numberOfRepresentatives(3).timeDecay(timedecay).capacity(reservoirSize).build();
reservoir.setGlobalDistance(Summarizer::L2distance);
double zFactor = 6.0; // six sigma deviation; seems to work best
reservoir.setZfactor(zFactor);
ThresholdedRandomCutForest test = ThresholdedRandomCutForest.builder().dimensions(2).shingleSize(1)
.randomSeed(77).timeDecay(timedecay).scoringStrategy(ScoringStrategy.DISTANCE).build();
test.setZfactor(zFactor); // using the zFactor for same apples to apples comparison
String name = "clustering_example";
BufferedWriter file = new BufferedWriter(new FileWriter(name));
boolean printData = true;
boolean printAnomalies = false;
// use one or the other prints below
boolean printFlaggedRCF = false;
boolean printFlaggedGLAD = true;
Random noiseGen = new Random(randomSeed + 1);
for (int degree = 0; degree < 360; degree += 1) {
int index = 0;
while (index < data.length) {
boolean injected = false;
float[] vec;
if (noiseGen.nextDouble() < 0.005) {
injected = true;
double[] candAnomaly = new double[2];
// generate points along x axis
candAnomaly[0] = (range / 2 * noiseGen.nextDouble() + range / 2);
candAnomaly[1] = 0.1 * (2.0 * noiseGen.nextDouble() - 1.0);
int antiFan = noiseGen.nextInt(numberOfFans);
// rotate to be 90-180 degrees away -- these are decidedly anomalous
vec = toFloatArray(rotateClockWise(candAnomaly,
-2 * PI * (degree + 180 * (1 + 2 * antiFan) / numberOfFans) / 360));
if (printAnomalies) {
file.append(vec[0] + " " + vec[1] + " " + 0.0 + "\n");
}
} else {
vec = toFloatArray(rotateClockWise(data[index], -2 * PI * degree / 360));
if (printData) {
file.append(vec[0] + " " + vec[1] + " " + 0.0 + "\n");
}
++index;
}
GenericAnomalyDescriptor<float[]> result = reservoir.process(vec, 1.0f, null, true);
AnomalyDescriptor res = test.process(toDoubleArray(vec), 0L);
double grade = res.getAnomalyGrade();
if (injected) {
if (result.getAnomalyGrade() > 0) {
++truePos;
} else {
++falseNeg;
}
if (grade > 0) {
++truePosRCF;
} else {
++falseNegRCF;
}
} else {
if (result.getAnomalyGrade() > 0) {
++falsePos;
}
if (grade > 0) {
++falsePosRCF;
}
}
if (printFlaggedRCF && grade > 0) {
file.append(vec[0] + " " + vec[1] + " " + grade + "\n");
} else if (printFlaggedGLAD && result.getAnomalyGrade() > 0) {
file.append(vec[0] + " " + vec[1] + " " + result.getAnomalyGrade() + "\n");
}
}
if (printAnomalies || printData || printFlaggedRCF || printFlaggedGLAD) {
file.append("\n");
file.append("\n");
}
if (falsePos + truePos == 0) {
throw new IllegalStateException("");
}
checkArgument(falseNeg + truePos == falseNegRCF + truePosRCF, " incorrect accounting");
System.out.println(" at degree " + degree + " injected " + (truePos + falseNeg));
System.out.print("Precision = " + precision(truePos, falsePos));
System.out.println(" Recall = " + recall(truePos, falseNeg));
System.out.print("RCF Distance Mode Precision = " + precision(truePosRCF, falsePosRCF));
System.out.println(" RCF Distance Mode Recall = " + recall(truePosRCF, falseNegRCF));
}
}
public double[][] shiftedEllipse(int dataSize, int seed, double shift, int fans) {
NormalMixtureTestData generator = new NormalMixtureTestData(0.0, 1.0, 0.0, 1.0, 0.0, 1.0);
double[][] data = generator.generateTestData(dataSize, 2, seed);
Random prg = new Random(0);
for (int i = 0; i < dataSize; i++) {
int nextFan = prg.nextInt(fans);
// scale
data[i][1] *= 1.0 / fans;
data[i][0] *= 2.0;
// shift
data[i][0] += shift + 1.0 / fans;
data[i] = rotateClockWise(data[i], 2 * PI * nextFan / fans);
}
return data;
}
double precision(int truePos, int falsePos) {
return (truePos + falsePos > 0) ? 1.0 * truePos / (truePos + falsePos) : 1.0;
}
double recall(int truePos, int falseNeg) {
return (truePos + falseNeg > 0) ? 1.0 * truePos / (truePos + falseNeg) : 1.0;
}
}
| 642 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/LowNoisePeriodic.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.parkservices;
import java.util.Arrays;
import java.util.Random;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
public class LowNoisePeriodic implements Example {
public static void main(String[] args) throws Exception {
new LowNoisePeriodic().run();
}
@Override
public String command() {
return "Thresholded_Multi_Dim_example with low noise";
}
@Override
public String description() {
return "Thresholded Multi Dimensional Example with Low Noise";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int shingleSize = 8;
int numberOfTrees = 50;
int sampleSize = 256;
int dataSize = 100000;
int initialSegment = 100;
double[] reference = new double[] { 1.0f, 3.0f, 5.0f, 7.0f, 9.0f, 11.0f, 9.5f, 8.5f, 7.5f, 6.5f, 6.0f, 6.5f,
7.0f, 7.5f, 9.5f, 11.0f, 12.5f, 10.5f, 8.5f, 7.0f, 5.0f, 3.0f, 2.0f, 1.0f };
// the noise should leave suffient gap between the consecutive levels
double noise = 0.25;
// the noise will be amplified by something within [factorRange, 2*factorRange]
// increase should lead to increased precision--recall; likewise decrease must
// also
// lead to decreased precision recall; if the factor = 1, then the anomalies are
// information theoretically almost non-existent
double anomalyFactor = 10;
double slope = 0.2 * sampleSize
* (Arrays.stream(reference).max().getAsDouble() - Arrays.stream(reference).min().getAsDouble()) / 50000;
// to analyse without linear shift; comment out the line below and change the
// slope above as desired
slope = 0;
double anomalyRate = 0.005;
long seed = new Random().nextLong();
System.out.println(" Seed " + seed);
Random rng = new Random(seed);
int numAnomalies = 0;
int incorrectlyFlagged = 0;
int correct = 0;
int late = 0;
// change the transformation below to experiment;
// if slope != 0 then NONE will have poor result
// both of the difference operations also introduce many errors
TransformMethod method = TransformMethod.NORMALIZE;
int dimensions = shingleSize;
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().dimensions(dimensions).randomSeed(0)
.numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.internalShinglingEnabled(true).anomalyRate(0.01).forestMode(ForestMode.STANDARD).startNormalization(32)
.transformMethod(method).outputAfter(32).initialAcceptFraction(0.125)
// for 1D data weights should not alter results significantly (if in reasonable
// range say [0.1,10]
// weights are not recommended for 1D, but retained here for illustration
// as well as a mechanism to verify that results do not vary significantly
.weights(new double[] { 1.0 })
// change to transformDecay( 1.0/(desired interval length)) to perform
// a moving average smoothing the default is 1.0/sampleSize
// .transformDecay(1.0/sampleSize)
.build();
// the following ignore anomalies that are shifted up or down by a fixed amount
// from the internal prediction of RCF. Default is 0.001
// the below will show results like
// missed current value 3.0 (say X), intended 1.0 (equiv., X - noise), because
// the shift up in the actual was not 2*noise
// forest.setIgnoreNearExpectedFromAbove( new double [] {2*noise});
// or to suppress all anomalies that are shifted up from predicted
// for any sequence; using Double.MAX_VALUE may cause overflow
// forest.setIgnoreNearExpectedFromAbove(new double [] {Float.MAX_VALUE});
// the below will show results like
// missed current value 5.5 (say Y), intended 7.5 (equiv., Y + noise) because
// the shift down in the actual was not 2*noise, in effect we suppress all
// anomalies
// forest.setIgnoreNearExpectedFromBelow(new double [] {noise*2});
// the following suppresses all anomalies that shifted down compared to
// predicted
// for any sequence
// forest.setIgnoreNearExpectedFromBelow(new double [] {Float.MAX_VALUE});
double[] value = new double[] { 0.0 };
int lastAnomaly = 0;
for (int count = 0; count < dataSize; count++) {
boolean anomaly = false;
double intendedValue = reference[(count + 4) % reference.length] + slope * count;
// extremely periodic signal -- note that there is no periodicity detection
value[0] = intendedValue;
if (rng.nextDouble() < anomalyRate && count > initialSegment) {
double anomalyValue = noise * anomalyFactor * (1 + rng.nextDouble());
value[0] += (rng.nextDouble() < 0.5) ? -anomalyValue : anomalyValue;
anomaly = true;
++numAnomalies;
} else {
value[0] += (2 * rng.nextDouble() - 1) * noise;
}
AnomalyDescriptor result = forest.process(new double[] { value[0] }, 0);
if (result.getAnomalyGrade() > 0) {
System.out.print(count + " " + result.getAnomalyGrade() + " ");
if (result.getRelativeIndex() < 0) {
System.out.print((lastAnomaly == count + result.getRelativeIndex()) + " "
+ (-result.getRelativeIndex()) + " steps ago,");
if (lastAnomaly == count + result.getRelativeIndex()) {
late++;
} else {
incorrectlyFlagged++;
}
} else {
System.out.print(anomaly);
if (anomaly) {
correct++;
} else {
incorrectlyFlagged++;
}
}
System.out.print(" current value " + value[0]);
if (result.isExpectedValuesPresent()) {
System.out.print(" expected " + result.getExpectedValuesList()[0][0] + " instead of "
+ result.getPastValues()[0]);
}
System.out.print(" score " + result.getRCFScore() + " threshold " + result.getThreshold());
System.out.println();
} else if (anomaly) {
System.out.println(count + " missed current value " + value[0] + ", intended " + intendedValue
+ ", score " + result.getRCFScore() + ", threshold " + result.getThreshold());
}
if (anomaly) {
lastAnomaly = count;
}
}
System.out.println("Anomalies " + numAnomalies + ", correct " + correct + ", late " + late
+ ", incorrectly flagged " + incorrectlyFlagged);
}
}
| 643 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/SequentialForecastExample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.parkservices;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.SequentialAnalysis;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
public class SequentialForecastExample implements Example {
public static void main(String[] args) throws Exception {
new SequentialForecastExample().run();
}
@Override
public String command() {
return "Sequential_analysis_example";
}
@Override
public String description() {
return "Sequential Analysis Example";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int shingleSize = 8;
int numberOfTrees = 50;
int sampleSize = 256;
int dataSize = 4 * sampleSize;
// the code will run if the following is changed, but interpretations of
// multivariate forecasting vary
int baseDimensions = 1;
long seed = new Random().nextLong();
System.out.println("seed = " + seed);
// change the last argument seed for a different run
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,
100, 5, seed, baseDimensions);
double timeDecay = 1.0 / (10 * sampleSize);
int forecastHorizon = 2 * shingleSize;
int errorHorizon = 10 * forecastHorizon;
List<AnomalyDescriptor> anomalies = SequentialAnalysis.forecastWithAnomalies(dataWithKeys.data, shingleSize,
sampleSize, timeDecay, TransformMethod.NONE, forecastHorizon, errorHorizon, 42L).getAnomalies();
int keyCounter = 0;
for (AnomalyDescriptor result : anomalies) {
// first print the changes
while (keyCounter < dataWithKeys.changeIndices.length
&& dataWithKeys.changeIndices[keyCounter] <= result.getInternalTimeStamp()) {
System.out.println("timestamp " + dataWithKeys.changeIndices[keyCounter] + " CHANGE "
+ Arrays.toString(dataWithKeys.changes[keyCounter]));
++keyCounter;
}
if (result.getAnomalyGrade() != 0) {
System.out.print("timestamp " + result.getInternalTimeStamp() + " RESULT value ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getCurrentInput()[i] + ", ");
}
System.out.print("score " + result.getRCFScore() + ", grade " + result.getAnomalyGrade() + ", ");
if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {
System.out.print(-result.getRelativeIndex() + " step(s) ago, ");
}
if (result.isExpectedValuesPresent()) {
if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {
System.out.print("instead of ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getPastValues()[i] + ", ");
}
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (result.getPastValues()[i] != result.getExpectedValuesList()[0][i]) {
System.out.print("( "
+ (result.getPastValues()[i] - result.getExpectedValuesList()[0][i]) + " ) ");
}
}
} else {
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (result.getCurrentInput()[i] != result.getExpectedValuesList()[0][i]) {
System.out.print("( "
+ (result.getCurrentInput()[i] - result.getExpectedValuesList()[0][i]) + " ) ");
}
}
}
} else {
System.out.print("insufficient data to provide expected values");
}
System.out.println();
}
}
}
}
| 644 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ThresholdedMultiDimensionalExample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.parkservices;
import java.util.Arrays;
import java.util.Random;
import com.amazon.randomcutforest.config.CorrectionMode;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
public class ThresholdedMultiDimensionalExample implements Example {
public static void main(String[] args) throws Exception {
new ThresholdedMultiDimensionalExample().run();
}
@Override
public String command() {
return "Thresholded_Multi_Dim_example";
}
@Override
public String description() {
return "Thresholded Multi Dimensional Example";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int shingleSize = 8;
int numberOfTrees = 50;
int sampleSize = 256;
int dataSize = 4 * sampleSize;
// change this to try different number of attributes,
// this parameter is not expected to be larger than 5 for this example
int baseDimensions = 3;
int dimensions = baseDimensions * shingleSize;
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder()
// dimensions is shingleSize x the number of base dimensions in input (in this
// case 3)
.dimensions(dimensions)
// shingle size is the context (sliding) window of last contiguous observations
.shingleSize(shingleSize)
// fixed random seed would produce deterministic/reproducible results
.randomSeed(0)
// use about 50; more than 100 may not be useful
.numberOfTrees(numberOfTrees)
// samplesize should be large enough to cover the desired phenomenon; for a
// 5-minute
// interval reading if one is interested investigating anomalies over a weekly
// pattern
// there are 12 * 24 * 7 different
// 5-minute intervals in a week. That being said, larger samplesize is a larger
// model.
.sampleSize(sampleSize)
// shingling is now performed internally by default -- best not to change it
// .internalShinglingEnabled(true)
// change to different streaming transformations that are performed on the fly
// note the transformation affects the characteristics of the anomaly that can
// be
// detected
.transformMethod(TransformMethod.NORMALIZE)
// the following would increase precision at the cost of recall
// for the reverse, try ScoringStrategy.MULTI_MODE_RECALL
// the default strategy is an attempted goldilocks version and may not work
// for all data
// .scoringStrategy(ScoringStrategy.MULTI_MODE)
// the following will learn data (concept) drifts (also referered to as level
// shifts) automatically and
// stop repeated alarms. The reverse is also true -- to detect level shifts, set
// the following to false
// and test for continuous alarms
.autoAdjust(true)
// the following is a much coarser tool to eliminate repeated alarms
// the descriptor below 'result' will contain information about different
// correction/suppression modes
// .alertOnce(true)
.build();
long seed = new Random().nextLong();
System.out.println("seed = " + seed);
// basic amplitude of the waves -- the parameter will be randomly scaled up
// between 0-20 percent
double amplitude = 100.0;
// the amplitude of random noise it will be +ve/-ve uniformly at random
double noise = 5.0;
// the following controls the ratio of anomaly magnitude to noise
// notice amplitude/noise would determine signal-to-noise ratio
double anomalyFactor = 5;
// the following determines if a random linear trend should be added
boolean useSlope = false;
// provide explanations and alternatives considered for non-anomalies
boolean verboseSupression = true;
// change the last argument seed for a different run
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 24,
amplitude, noise, seed, baseDimensions, anomalyFactor, useSlope);
int keyCounter = 0;
int count = 0;
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor result = forest.process(point, 0L);
if (keyCounter < dataWithKeys.changeIndices.length && count == dataWithKeys.changeIndices[keyCounter]) {
System.out.println(
"timestamp " + (count) + " CHANGE " + Arrays.toString(dataWithKeys.changes[keyCounter]));
++keyCounter;
}
if (result.getAnomalyGrade() != 0) {
System.out.print("timestamp " + (count) + " RESULT value ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(point[i] + ", ");
}
System.out.print("score " + result.getRCFScore() + ", grade " + result.getAnomalyGrade() + ", ");
if (result.isExpectedValuesPresent()) {
if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {
System.out.print(-result.getRelativeIndex() + " steps ago, instead of ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getPastValues()[i] + ", ");
}
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (result.getPastValues()[i] != result.getExpectedValuesList()[0][i]) {
System.out.print("( "
+ (result.getPastValues()[i] - result.getExpectedValuesList()[0][i]) + " ) ");
}
}
} else {
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (point[i] != result.getExpectedValuesList()[0][i]) {
System.out.print("( inferred change = "
+ (point[i] - result.getExpectedValuesList()[0][i]) + " ) ");
}
}
}
}
System.out.println();
} else if (verboseSupression && result.getCorrectionMode() != CorrectionMode.NONE) {
System.out.println(count + " corrected via " + result.getCorrectionMode().name());
}
++count;
}
}
}
| 645 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/RCFCasterExample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.parkservices;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Arrays;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.parkservices.ForecastDescriptor;
import com.amazon.randomcutforest.parkservices.RCFCaster;
import com.amazon.randomcutforest.parkservices.calibration.Calibration;
import com.amazon.randomcutforest.returntypes.DiVector;
import com.amazon.randomcutforest.returntypes.RangeVector;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
/**
* The following example demonstrates the self calibration of RCFCast. Change
* various parameters -- we recommend keeping baseDimension = 1 (for single
* variate forecasting -- multivariate forecasting can be a complicated
* endeavour. The value shifForViz is for easier visualization.
*
* Once the datafile calibration_example is produced consider plotting it. For
* example to use gnuplot, to generate a quick and dirty gif file, consider
* these commands set terminal gif transparent animate delay 5 set output
* "example.gif" do for [i = 0:3000:3] { (all the below in a single line) plot
* [0:1000][-100:500] "example" i 0 u 1:2 w l lc "black" t "Data (seen one at a
* time)", "example" index (i+3) u 1:2 w l lw 2 lc "blue" t " Online Forecast
* (future)", "example" i (i+2) u 1:(100*$8) w l lw 2 lc "magenta" t "Interval
* Accuracy %", "example" index (i+3) u 1:($4-$2):($3-$4) w filledcurves fc
* "blue" fs transparent solid 0.3 noborder t "Calibrated uncertainty range
* (future)", "example" index (i+2) u 1:7:6 w filledcurves fc "brown" fs
* transparent solid 0.5 noborder t "Observed error distribution range (past)",
* "example" i (i+1) u 1:2 w impulses t "", 0 lc "gray" t "", 100 lc "gray" t
* "", 80 lc "gray" t"" }
*
* Try different calibrations below to see the precision over the intervals. The
* struggle of past and new data would become obvious; however the algorithm
* would self-calibrate eventually. Changing the different values for
* transformDecay() would correspond to different moving average analysis.
*
*/
public class RCFCasterExample implements Example {
public static void main(String[] args) throws Exception {
new RCFCasterExample().run();
}
@Override
public String command() {
return "Calibrated RCFCast";
}
@Override
public String description() {
return "Calibrated RCFCast Example";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_32;
int dataSize = 2 * sampleSize;
// Multi attribute forecasting is less understood than singe attribute
// forecasting;
// it is not always clear or easy to decide if multi-attribute forecasting is
// reasonable
// but the code below will run for multi-attribute case.
int baseDimensions = 2;
int forecastHorizon = 15;
int shingleSize = 20;
int outputAfter = 64;
long seed = 2023L;
double[][] fulldata = new double[2 * dataSize][];
double shiftForViz = 200;
System.out.println("seed = " + seed);
// change the last argument seed for a different run
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize, 50, 50, 5, seed,
baseDimensions, true);
for (int i = 0; i < dataSize; i++) {
fulldata[i] = Arrays.copyOf(dataWithKeys.data[i], baseDimensions);
fulldata[i][0] += shiftForViz;
}
// changing both period and amplitude for fun
MultiDimDataWithKey second = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize, 70, 30, 5, seed + 1,
baseDimensions, true);
for (int i = 0; i < dataSize; i++) {
fulldata[dataSize + i] = Arrays.copyOf(second.data[i], baseDimensions);
fulldata[dataSize + i][0] += shiftForViz;
}
int dimensions = baseDimensions * shingleSize;
// change this line to try other transforms; but the default is NORMALIZE
// uncomment the transformMethod() below
TransformMethod transformMethod = TransformMethod.NORMALIZE;
RCFCaster caster = RCFCaster.builder().dimensions(dimensions).randomSeed(seed + 1).numberOfTrees(numberOfTrees)
.shingleSize(shingleSize).sampleSize(sampleSize).internalShinglingEnabled(true).precision(precision)
.anomalyRate(0.01).outputAfter(outputAfter).calibration(Calibration.MINIMAL)
// the following affects the moving average in many of the transformations
// the 0.02 corresponds to a half life of 1/0.02 = 50 observations
// this is different from the timeDecay() of RCF; however it is a similar
// concept
.transformDecay(0.02).forecastHorizon(forecastHorizon).initialAcceptFraction(0.125).build();
String name = "example";
BufferedWriter file = new BufferedWriter(new FileWriter(name));
for (int j = 0; j < fulldata.length; j++) {
file.append(j + " ");
for (int k = 0; k < baseDimensions; k++) {
file.append(fulldata[j][k] + " ");
}
file.append("\n");
}
file.append("\n");
file.append("\n");
for (int j = 0; j < fulldata.length; j++) {
ForecastDescriptor result = caster.process(fulldata[j], 0L);
printResult(file, result, j, baseDimensions);
}
file.close();
}
void printResult(BufferedWriter file, ForecastDescriptor result, int current, int inputLength) throws IOException {
RangeVector forecast = result.getTimedForecast().rangeVector;
float[] errorP50 = result.getObservedErrorDistribution().values;
float[] upperError = result.getObservedErrorDistribution().upper;
float[] lowerError = result.getObservedErrorDistribution().lower;
DiVector rmse = result.getErrorRMSE();
float[] mean = result.getErrorMean();
float[] intervalPrecision = result.getIntervalPrecision();
file.append(current + " " + 1000 + "\n");
file.append("\n");
file.append("\n");
// block corresponding to the past; print the errors
for (int i = forecast.values.length / inputLength - 1; i >= 0; i--) {
file.append((current - i) + " ");
for (int j = 0; j < inputLength; j++) {
int k = i * inputLength + j;
file.append(mean[k] + " " + rmse.high[k] + " " + rmse.low[k] + " " + errorP50[k] + " " + upperError[k]
+ " " + lowerError[k] + " " + intervalPrecision[k] + " ");
}
file.append("\n");
}
file.append("\n");
file.append("\n");
// block corresponding to the future; the projections and the projected errors
for (int i = 0; i < forecast.values.length / inputLength; i++) {
file.append((current + i) + " ");
for (int j = 0; j < inputLength; j++) {
int k = i * inputLength + j;
file.append(forecast.values[k] + " " + forecast.upper[k] + " " + forecast.lower[k] + " ");
}
file.append("\n");
}
file.append("\n");
file.append("\n");
}
}
| 646 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ThresholdedPredictive.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.parkservices;
import java.util.Random;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.amazon.randomcutforest.returntypes.RangeVector;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
public class ThresholdedPredictive implements Example {
public static void main(String[] args) throws Exception {
new com.amazon.randomcutforest.examples.parkservices.ThresholdedPredictive().run();
}
@Override
public String command() {
return "Thresholded_Predictive_example";
}
@Override
public String description() {
return "Example of predictive forecast across multiple time series using ThresholdedRCF";
}
@Override
public void run() throws Exception {
int sampleSize = 256;
int baseDimensions = 1;
int length = 4 * sampleSize;
int outputAfter = 128;
long seed = 2022L;
Random random = new Random(seed);
int numberOfModels = 10;
MultiDimDataWithKey[] dataWithKeys = new MultiDimDataWithKey[numberOfModels];
ThresholdedRandomCutForest[] forests = new ThresholdedRandomCutForest[numberOfModels];
int[] period = new int[numberOfModels];
double alertThreshold = 300;
double lastActualSum = 0;
int anomalies = 0;
for (int k = 0; k < numberOfModels; k++) {
period[k] = (int) Math.round(40 + 30 * random.nextDouble());
dataWithKeys[k] = ShingledMultiDimDataWithKeys.getMultiDimData(length, period[k], 100, 10, seed,
baseDimensions, false);
anomalies += dataWithKeys[k].changes.length;
}
System.out.println(anomalies + " anomalies injected ");
int shingleSize = 10;
int horizon = 20;
for (int k = 0; k < numberOfModels; k++) {
forests[k] = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(baseDimensions * shingleSize).precision(Precision.FLOAT_32).randomSeed(seed + k)
.internalShinglingEnabled(true).shingleSize(shingleSize).outputAfter(outputAfter)
.transformMethod(TransformMethod.NORMALIZE).build();
}
boolean predictNextCrossing = true;
boolean actualCrossingAlerted = false;
boolean printPredictions = false;
boolean printEvents = true;
for (int i = 0; i < length; i++) {
double[] prediction = new double[horizon];
// any prediction needs suffient data
// it's best to suggest 0 till such
if (i > sampleSize) {
for (int k = 0; k < numberOfModels; k++) {
RangeVector forecast = forests[k].extrapolate(horizon).rangeVector;
for (int t = 0; t < horizon; t++) {
prediction[t] += forecast.values[t];
}
}
if (prediction[horizon - 1] > alertThreshold && predictNextCrossing) {
if (printEvents) {
System.out.println("Currently at " + i + ", should cross " + alertThreshold + " at sequence "
+ (i + horizon - 1));
}
predictNextCrossing = false;
} else if (prediction[horizon - 1] < alertThreshold && !predictNextCrossing) {
predictNextCrossing = true;
}
if (printPredictions) {
for (int t = 0; t < horizon; t++) {
System.out.println((i + t) + " " + prediction[t]);
}
System.out.println();
System.out.println();
}
}
// now look at actuals
double sumValue = 0;
for (int k = 0; k < numberOfModels; k++) {
sumValue += dataWithKeys[k].data[i][0];
}
if (lastActualSum > alertThreshold && sumValue > alertThreshold) {
if (!actualCrossingAlerted) {
if (printEvents) {
System.out.println(" Crossing " + alertThreshold + " at consecutive sequence indices " + (i - 1)
+ " " + i);
}
actualCrossingAlerted = true;
}
} else if (sumValue < alertThreshold) {
actualCrossingAlerted = false;
}
lastActualSum = sumValue;
// update model
for (int k = 0; k < numberOfModels; k++) {
forests[k].process(dataWithKeys[k].data[i], 0L);
}
}
}
} | 647 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/dynamicconfiguration/DynamicThroughput.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.dynamicconfiguration;
import java.time.Duration;
import java.time.Instant;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.testutils.NormalMixtureTestData;
public class DynamicThroughput implements Example {
public static void main(String[] args) throws Exception {
new DynamicThroughput().run();
}
@Override
public String command() {
return "dynamic_caching";
}
@Override
public String description() {
return "serialize a Random Cut Forest as a JSON string";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int dimensions = 4;
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_64;
int dataSize = 10 * sampleSize;
NormalMixtureTestData testData = new NormalMixtureTestData();
// generate data once to eliminate caching issues
testData.generateTestData(dataSize, dimensions);
testData.generateTestData(sampleSize, dimensions);
for (int i = 0; i < 5; i++) {
RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions).randomSeed(0)
.numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();
RandomCutForest forest2 = RandomCutForest.builder().compact(true).dimensions(dimensions).randomSeed(0)
.numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();
forest2.setBoundingBoxCacheFraction(i * 0.25);
int anomalies = 0;
for (double[] point : testData.generateTestData(dataSize, dimensions)) {
double score = forest.getAnomalyScore(point);
double score2 = forest2.getAnomalyScore(point);
if (Math.abs(score - score2) > 1e-10) {
anomalies++;
}
forest.update(point);
forest2.update(point);
}
Instant start = Instant.now();
for (double[] point : testData.generateTestData(sampleSize, dimensions)) {
double score = forest.getAnomalyScore(point);
double score2 = forest2.getAnomalyScore(point);
if (Math.abs(score - score2) > 1e-10) {
anomalies++;
}
forest.update(point);
forest2.update(point);
}
Instant finish = Instant.now();
// first validate that this was a nontrivial test
if (anomalies > 0) {
throw new IllegalStateException("score mismatch");
}
System.out.println("So far so good! Caching fraction = " + (i * 0.25) + ", Time ="
+ Duration.between(start, finish).toMillis() + " ms (note only one forest is changing)");
}
}
}
| 648 |
0 | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples | Create_ds/random-cut-forest-by-aws/Java/examples/src/main/java/com/amazon/randomcutforest/examples/dynamicconfiguration/DynamicSampling.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.examples.dynamicconfiguration;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.examples.Example;
import com.amazon.randomcutforest.state.RandomCutForestMapper;
import com.amazon.randomcutforest.testutils.NormalMixtureTestData;
public class DynamicSampling implements Example {
public static void main(String[] args) throws Exception {
new DynamicSampling().run();
}
@Override
public String command() {
return "dynamic_sampling";
}
@Override
public String description() {
return "check dynamic sampling";
}
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int dimensions = 4;
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_64;
int dataSize = 4 * sampleSize;
NormalMixtureTestData testData = new NormalMixtureTestData();
RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions).randomSeed(0)
.numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();
RandomCutForest forest2 = RandomCutForest.builder().compact(true).dimensions(dimensions).randomSeed(0)
.numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();
int first_anomalies = 0;
int second_anomalies = 0;
forest2.setTimeDecay(10 * forest2.getTimeDecay());
for (double[] point : testData.generateTestData(dataSize, dimensions)) {
if (forest.getAnomalyScore(point) > 1.0) {
first_anomalies++;
}
if (forest2.getAnomalyScore(point) > 1.0) {
second_anomalies++;
}
forest.update(point);
forest2.update(point);
}
System.out.println("Unusual scores: forest one " + first_anomalies + ", second one " + second_anomalies);
// should be roughly equal
first_anomalies = second_anomalies = 0;
testData = new NormalMixtureTestData(-3, 40);
for (double[] point : testData.generateTestData(dataSize, dimensions)) {
if (forest.getAnomalyScore(point) > 1.0) {
first_anomalies++;
}
if (forest2.getAnomalyScore(point) > 1.0) {
second_anomalies++;
}
forest.update(point);
forest2.update(point);
}
System.out.println("Unusual scores: forest one " + first_anomalies + ", second one " + second_anomalies);
// forest2 should adapt faster
first_anomalies = second_anomalies = 0;
RandomCutForestMapper mapper = new RandomCutForestMapper();
mapper.setSaveExecutorContextEnabled(true);
RandomCutForest copyForest = mapper.toModel(mapper.toState(forest));
copyForest.setTimeDecay(50 * forest.getTimeDecay());
// force an adjustment to catch up
testData = new NormalMixtureTestData(-10, -40);
int forced_change_anomalies = 0;
for (double[] point : testData.generateTestData(dataSize, dimensions)) {
if (forest.getAnomalyScore(point) > 1.0) {
first_anomalies++;
}
if (forest2.getAnomalyScore(point) > 1.0) {
second_anomalies++;
}
if (copyForest.getAnomalyScore(point) > 1.0) {
forced_change_anomalies++;
}
copyForest.update(point);
forest.update(point);
forest2.update(point);
}
// both should show the similar rate of adjustment
System.out.println("Unusual scores: forest one " + first_anomalies + ", second one " + second_anomalies
+ ", forced (first) " + forced_change_anomalies);
}
}
| 649 |
0 | Create_ds/random-cut-forest-by-aws/Java/testutils/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/testutils/src/main/java/com/amazon/randomcutforest/testutils/NormalMixtureTestData.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.testutils;
import java.util.Arrays;
import java.util.Random;
/**
* This class samples point from a mixture of 2 multi-variate normal
* distribution with covariance matrices of the form sigma * I. One of the
* normal distributions is considered the base distribution, the second is
* considered the anomaly distribution, and there are random transitions between
* the two.
*/
public class NormalMixtureTestData {
private final double baseMu;
private final double baseSigma;
private final double anomalyMu;
private final double anomalySigma;
private final double transitionToAnomalyProbability;
private final double transitionToBaseProbability;
public NormalMixtureTestData(double baseMu, double baseSigma, double anomalyMu, double anomalySigma,
double transitionToAnomalyProbability, double transitionToBaseProbability) {
this.baseMu = baseMu;
this.baseSigma = baseSigma;
this.anomalyMu = anomalyMu;
this.anomalySigma = anomalySigma;
this.transitionToAnomalyProbability = transitionToAnomalyProbability;
this.transitionToBaseProbability = transitionToBaseProbability;
}
public NormalMixtureTestData() {
this(0.0, 1.0, 4.0, 2.0, 0.01, 0.3);
}
public NormalMixtureTestData(double baseMu, double anomalyMu) {
this(baseMu, 1.0, anomalyMu, 2.0, 0.01, 0.3);
}
public double[][] generateTestData(int numberOfRows, int numberOfColumns) {
return generateTestData(numberOfRows, numberOfColumns, 0);
}
public double[][] generateTestData(int numberOfRows, int numberOfColumns, int seed) {
double[][] result = new double[numberOfRows][numberOfColumns];
boolean anomaly = false;
NormalDistribution dist;
if (seed != 0)
dist = new NormalDistribution(new Random(seed));
else
dist = new NormalDistribution(new Random());
for (int i = 0; i < numberOfRows; i++) {
if (!anomaly) {
fillRow(result[i], dist, baseMu, baseSigma);
if (Math.random() < transitionToAnomalyProbability) {
anomaly = true;
}
} else {
fillRow(result[i], dist, anomalyMu, anomalySigma);
if (Math.random() < transitionToBaseProbability) {
anomaly = false;
}
}
}
return result;
}
public MultiDimDataWithKey generateTestDataWithKey(int numberOfRows, int numberOfColumns, int seed) {
double[][] resultData = new double[numberOfRows][numberOfColumns];
int[] change = new int[numberOfRows];
int numberOfChanges = 0;
boolean anomaly = false;
NormalDistribution dist;
if (seed != 0)
dist = new NormalDistribution(new Random(seed));
else
dist = new NormalDistribution(new Random());
for (int i = 0; i < numberOfRows; i++) {
if (!anomaly) {
fillRow(resultData[i], dist, baseMu, baseSigma);
if (Math.random() < transitionToAnomalyProbability) {
change[numberOfChanges++] = i + 1; // next item is different
anomaly = true;
}
} else {
fillRow(resultData[i], dist, anomalyMu, anomalySigma);
if (Math.random() < transitionToBaseProbability) {
anomaly = false;
change[numberOfChanges++] = i + 1; // next item is different
}
}
}
return new MultiDimDataWithKey(resultData, Arrays.copyOf(change, numberOfChanges), null);
}
private void fillRow(double[] row, NormalDistribution dist, double mu, double sigma) {
for (int j = 0; j < row.length; j++) {
row[j] = dist.nextDouble(mu, sigma);
}
}
static class NormalDistribution {
private final Random rng;
private final double[] buffer;
private int index;
NormalDistribution(Random rng) {
this.rng = rng;
buffer = new double[2];
index = 0;
}
double nextDouble() {
if (index == 0) {
// apply the Box-Muller transform to produce Normal variates
double u = rng.nextDouble();
double v = rng.nextDouble();
double r = Math.sqrt(-2 * Math.log(u));
buffer[0] = r * Math.cos(2 * Math.PI * v);
buffer[1] = r * Math.sin(2 * Math.PI * v);
}
double result = buffer[index];
index = (index + 1) % 2;
return result;
}
double nextDouble(double mu, double sigma) {
return mu + sigma * nextDouble();
}
}
}
| 650 |
0 | Create_ds/random-cut-forest-by-aws/Java/testutils/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/testutils/src/main/java/com/amazon/randomcutforest/testutils/MultiDimDataWithKey.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.testutils;
public class MultiDimDataWithKey {
public double[][] data;
public int[] changeIndices;
public double[][] changes;
public MultiDimDataWithKey(double[][] data, int[] changeIndices, double[][] changes) {
this.data = data;
this.changeIndices = changeIndices;
this.changes = changes;
}
}
| 651 |
0 | Create_ds/random-cut-forest-by-aws/Java/testutils/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/testutils/src/main/java/com/amazon/randomcutforest/testutils/ShingledData.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.testutils;
import static java.lang.Math.PI;
import java.util.Random;
public class ShingledData {
public static double[][] generateShingledData(int size, int period, int dimensions, long seed) {
double[][] answer = new double[size][];
int entryIndex = 0;
boolean filledShingleAtleastOnce = false;
double[] history = new double[dimensions];
int count = 0;
double[] data = getData(size + dimensions - 1, period, 100, 5, seed);
for (int j = 0; j < size + dimensions - 1; ++j) { // we stream here ....
history[entryIndex] = data[j];
entryIndex = (entryIndex + 1) % dimensions;
if (entryIndex == 0) {
filledShingleAtleastOnce = true;
}
if (filledShingleAtleastOnce) {
answer[count++] = getShinglePoint(history, entryIndex, dimensions);
}
}
return answer;
}
private static double[] getShinglePoint(double[] recentPointsSeen, int indexOfOldestPoint, int shingleLength) {
double[] shingledPoint = new double[shingleLength];
int i = 0;
for (int j = 0; j < shingleLength; ++j) {
double point = recentPointsSeen[(j + indexOfOldestPoint) % shingleLength];
shingledPoint[i++] = point;
}
return shingledPoint;
}
private static double[] getData(int num, int period, double amplitude, double noise, long seed) {
double[] data = new double[num];
Random noiseprg = new Random(seed);
for (int i = 0; i < num; i++) {
data[i] = amplitude * Math.cos(2 * PI * (i + 50) / period) + noise * noiseprg.nextDouble();
if (noiseprg.nextDouble() < 0.01) {
double change = noiseprg.nextDouble() < 0.5 ? 10 * noise : -10 * noise;
data[i] += change;
System.out.println(" timestamp " + i + " changing by " + change);
}
}
return data;
}
}
| 652 |
0 | Create_ds/random-cut-forest-by-aws/Java/testutils/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/testutils/src/main/java/com/amazon/randomcutforest/testutils/ShingledMultiDimDataWithKeys.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.testutils;
import static java.lang.Math.PI;
import java.util.Arrays;
import java.util.Random;
public class ShingledMultiDimDataWithKeys {
public static MultiDimDataWithKey generateShingledDataWithKey(int size, int period, int shingleSize,
int baseDimension, long seed) {
int entryIndex = 0;
boolean filledShingleAtleastOnce = false;
double[][] history = new double[shingleSize][];
int count = 0;
MultiDimDataWithKey dataWithKeys = getMultiDimData(size + shingleSize - 1, period, 100, 5, seed, baseDimension);
double[][] answer = generateShingledData(dataWithKeys.data, shingleSize, baseDimension, false);
return new MultiDimDataWithKey(answer, dataWithKeys.changeIndices, dataWithKeys.changes);
}
public static double[][] generateShingledData(double[][] data, int shingleSize, int baseDimension,
boolean rotation) {
int size = data.length - shingleSize + 1;
double[][] answer = new double[size][];
int entryIndex = 0;
boolean filledShingleAtleastOnce = false;
double[][] history = new double[shingleSize][];
int count = 0;
for (int j = 0; j < size + shingleSize - 1; ++j) { // we stream here ....
history[entryIndex] = data[j];
entryIndex = (entryIndex + 1) % shingleSize;
if (entryIndex == 0) {
filledShingleAtleastOnce = true;
}
if (filledShingleAtleastOnce) {
int position = (rotation) ? 0 : entryIndex;
answer[count++] = getShinglePoint(history, position, shingleSize, baseDimension);
}
}
return answer;
}
private static double[] getShinglePoint(double[][] recentPointsSeen, int indexOfOldestPoint, int shingleLength,
int baseDimension) {
double[] shingledPoint = new double[shingleLength * baseDimension];
int count = 0;
for (int j = 0; j < shingleLength; ++j) {
double[] point = recentPointsSeen[(j + indexOfOldestPoint) % shingleLength];
for (int i = 0; i < baseDimension; i++) {
shingledPoint[count++] = point[i];
}
}
return shingledPoint;
}
public static MultiDimDataWithKey getMultiDimData(int num, int period, double amplitude, double noise, long seed,
int baseDimension) {
return getMultiDimData(num, period, amplitude, noise, seed, baseDimension, false);
}
public static MultiDimDataWithKey getMultiDimData(int num, int period, double amplitude, double noise, long seed,
int baseDimension, boolean useSlope) {
return getMultiDimData(num, period, amplitude, noise, seed, baseDimension, 5.0, useSlope);
}
public static MultiDimDataWithKey getMultiDimData(int num, int period, double amplitude, double noise, long seed,
int baseDimension, double anomalyFactor, boolean useSlope) {
double[][] data = new double[num][];
double[][] changes = new double[num][];
int[] changedIndices = new int[num];
int counter = 0;
Random prg = new Random(seed);
Random noiseprg = new Random(prg.nextLong());
double[] phase = new double[baseDimension];
double[] amp = new double[baseDimension];
double[] slope = new double[baseDimension];
double[] shift = new double[baseDimension];
for (int i = 0; i < baseDimension; i++) {
phase[i] = prg.nextInt(period);
if (useSlope) {
shift[i] = (4 * prg.nextDouble() - 1) * amplitude;
}
amp[i] = (1 + 0.2 * prg.nextDouble()) * amplitude;
if (useSlope) {
slope[i] = (0.25 - prg.nextDouble() * 0.5) * amplitude / period;
}
}
for (int i = 0; i < num; i++) {
data[i] = new double[baseDimension];
boolean flag = (noiseprg.nextDouble() < 0.01);
double[] newChange = new double[baseDimension];
boolean used = false;
for (int j = 0; j < baseDimension; j++) {
data[i][j] = amp[j] * Math.cos(2 * PI * (i + phase[j]) / period) + slope[j] * i + shift[j];
// ensures that the noise does not cancel the anomaly or change it's magnitude
if (flag && noiseprg.nextDouble() < 0.3) {
double factor = anomalyFactor * (1 + noiseprg.nextDouble());
double change = noiseprg.nextDouble() < 0.5 ? factor * noise : -factor * noise;
data[i][j] += newChange[j] = change;
used = true;
} else {
data[i][j] += noise * (2 * noiseprg.nextDouble() - 1);
}
}
if (used) {
changedIndices[counter] = i;
changes[counter++] = newChange;
}
}
return new MultiDimDataWithKey(data, Arrays.copyOf(changedIndices, counter), Arrays.copyOf(changes, counter));
}
}
| 653 |
0 | Create_ds/random-cut-forest-by-aws/Java/testutils/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/testutils/src/main/java/com/amazon/randomcutforest/testutils/ExampleDataSets.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.testutils;
import static java.lang.Math.PI;
import static java.lang.Math.cos;
import static java.lang.Math.sin;
import java.util.Random;
/**
* This class samples point from a mixture of 2 multi-variate normal
* distribution with covariance matrices of the form sigma * I. One of the
* normal distributions is considered the base distribution, the second is
* considered the anomaly distribution, and there are random transitions between
* the two.
*/
public class ExampleDataSets {
public static double[][] generateFan(int numberPerBlade, int numberOfBlades) {
if ((numberOfBlades > 12) || (numberPerBlade <= 0))
return null;
int newDimensions = 2;
int dataSize = numberOfBlades * numberPerBlade;
Random prg = new Random(0);
NormalMixtureTestData generator = new NormalMixtureTestData(0.0, 1.0, 0.0, 1.0, 0, 1);
double[][] data = generator.generateTestData(dataSize, newDimensions, 100);
double[][] transformedData = new double[data.length][newDimensions];
for (int j = 0; j < data.length; j++) {
// shrink
transformedData[j][0] = 0.05 * data[j][0];
transformedData[j][1] = 0.2 * data[j][1];
double toss = prg.nextDouble();
// rotate
int i = 0;
while (i < numberOfBlades + 1) {
if (toss < i * 1.0 / numberOfBlades) {
double[] vec = rotateClockWise(transformedData[j], 2 * PI * i / numberOfBlades);
transformedData[j][0] = vec[0] + 0.6 * sin(2 * PI * i / numberOfBlades);
transformedData[j][1] = vec[1] + 0.6 * cos(2 * PI * i / numberOfBlades);
break;
} else
++i;
}
}
return transformedData;
}
public static double[] rotateClockWise(double[] point, double theta) {
double[] result = new double[2];
result[0] = cos(theta) * point[0] + sin(theta) * point[1];
result[1] = -sin(theta) * point[0] + cos(theta) * point[1];
return result;
}
public static double[][] generate(int size) {
Random prg = new Random();
double[][] data = new double[size][2];
for (int i = 0; i < size; i++) {
boolean test = false;
while (!test) {
double x = 2 * prg.nextDouble() - 1;
double y = 2 * prg.nextDouble() - 1;
if (x * x + y * y <= 1) {
if (y > 0) {
if (x > 0 && ((x - 0.5) * (x - 0.5) + y * y) <= 0.25) {
test = ((x - 0.5) * (x - 0.5) + y * y > 1.0 / 32) && (prg.nextDouble() < 0.6);
}
} else {
if (x > 0) {
if ((x - 0.5) * (x - 0.5) + y * y > 1.0 / 32) {
test = ((x - 0.5) * (x - 0.5) + y * y < 0.25) || (prg.nextDouble() < 0.4);
}
} else {
test = ((x + 0.5) * (x + 0.5) + y * y > 0.25) && (prg.nextDouble() < 0.2);
}
}
}
if (test) {
data[i][0] = x;
data[i][1] = y;
}
}
}
return data;
}
}
| 654 |
0 | Create_ds/jsii/tools/jsii-build-tools | Create_ds/jsii/tools/jsii-build-tools/bin/package-java | #!/bin/bash
set -euo pipefail
mkdir -p dist/java
rsync -av maven-repo/ dist/java/
| 655 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/function_call.java | callSomeFunction(1, 2, 3);
| 656 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/shorthand_property.java | String foo = "hello";
callFunction(Map.of("foo", foo));
| 657 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/default_struct_fields.java | public class Struct {
private String x;
public String getX() {
return this.x;
}
public Struct x(String x) {
this.x = x;
return this;
}
private String y;
public String getY() {
return this.y;
}
public Struct y(String y) {
this.y = y;
return this;
}
}
public void foo(Struct s) {
System.out.println(s.getX() + s.getY());
}
| 658 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/translate_object_literals_second_level_with_newlines.java | foo(25, Map.of("foo", 3, "deeper", Map.of(
"a", 1,
"b", 2)));
| 659 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/translate_object_literals_with_multiple_newlines.java | foo(25, Map.of(
"foo", 3,
"banana", "hello"));
| 660 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/method_call.java | someObject.callSomeFunction(1, 2, 3);
| 661 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/list_of_anonymous_structs.java | foo(Map.of(
"list", List.of(Map.of(
"a", 1,
"b", 2), Map.of(
"a", 3,
"b", 4))));
| 662 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/declaring_default_arguments.java | public void foo(String x) {
foo(x, "hello");
}
public void foo(String x, String y) {
foo(x, y, null);
}
public void foo(String x, String y, String z) {
System.out.println(x + y + z);
}
| 663 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/will_type_deep_structs_directly_if_type_info_is_available.java | public class BaseDeeperStruct {
private Number a;
public Number getA() {
return this.a;
}
public BaseDeeperStruct a(Number a) {
this.a = a;
return this;
}
}
public class DeeperStruct extends BaseDeeperStruct {
private Number b;
public Number getB() {
return this.b;
}
public DeeperStruct b(Number b) {
this.b = b;
return this;
}
}
public class OuterStruct {
private Number foo;
public Number getFoo() {
return this.foo;
}
public OuterStruct foo(Number foo) {
this.foo = foo;
return this;
}
private DeeperStruct deeper;
public DeeperStruct getDeeper() {
return this.deeper;
}
public OuterStruct deeper(DeeperStruct deeper) {
this.deeper = deeper;
return this;
}
}
public void foo(Number x, OuterStruct outer) {
}
foo(25, OuterStruct.builder().foo(3).deeper(DeeperStruct.builder()
.a(1)
.b(2)
.build()).build());
| 664 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/translate_object_literals_in_function_call.java | foo(25, Map.of("foo", 3, "banana", "hello"));
| 665 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/static_function_call.java | SomeObject.callSomeFunction(1, 2, 3);
| 666 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/literal_map_argument.java | public void foo(Map<String, String> xs) {
}
foo(Map.of("foo", "bar", "schmoo", "schmar"));
| 667 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/translate_object_literals_only_one_level_deep.java | foo(25, Map.of("foo", 3, "deeper", Map.of("a", 1, "b", 2)));
| 668 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/translate_object_literals_with_newlines.java | foo(25, Map.of(
"foo", 3,
"banana", "hello"));
| 669 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/self_method_call.java | this.callSomeFunction(25);
| 670 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/calls/this_argument.java | callSomeFunction(this, 25);
| 671 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/misc/booleans_render_to_right_primitives.java | callFunction(true, false);
| 672 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/comments/no_duplication_of_comments.java | // Here's a comment
object.member.functionCall(new Class(), "argument");
| 673 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/comments/empty_lines_in_comments.java | // Here's a comment
//
// Second line
someCall();
| 674 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/comments/interleave_multiline_comments_with_function_call.java | someFunction(arg1, Map.of(
/* A comment before arg2 */
"arg2", "string",
/* A comment before arg3 */
"arg3", "boo"));
| 675 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/comments/interleave_single_line_comments_with_function_call.java | someFunction(arg1, Map.of(
// A comment before arg2
"arg2", "string",
// A comment before arg3
"arg3", "boo"));
| 676 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/classes/invisible_interfaces_do_not_affect_whitespace.java | public class MyClass1 {
}
public class ThisWillNotBeRendered {
}
public class MyClass2 {
}
| 677 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/classes/class_with_different_name.java | public class OtherName {
public OtherName() {
}
} | 678 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/classes/whitespace_between_multiple_members.java | public class MyClass {
public MyClass(String y) {
this.x = y;
}
public void hello() {
System.out.println(this.x);
}
public void bye() {
System.out.println("bye");
}
}
| 679 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/classes/whitespace_between_multiple_empty_members.java | public class MyClass {
public MyClass(String y) {
this.x = y;
}
public void hello() {
}
public void bye() {
}
}
| 680 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/classes/class_declaration_with_private_fields_and_constructor.java | public class MyClass {
private final String x;
public MyClass(String y) {
this.x = y;
}
}
| 681 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/classes/constructor_with_optional_params.java | public class A {
public A() {
this(null);
}
public A(String a) {
this(a, 3);
}
public A(String a, Number b) {
}
}
| 682 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/classes/class_with_method.java | public class MyClass extends SomeOtherClass {
public void someMethod(String x) {
System.out.println(x);
}
}
| 683 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/classes/class_with_extends_and_implements.java | public class MyClass extends SomeOtherClass implements SomeInterface {
}
| 684 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/classes/empty_class.java | public class empty_class {
}
| 685 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/classes/class_declaration_with_public_fields_and_constructor.java | public class MyClass {
public final String x;
public MyClass(String y) {
this.x = y;
}
}
| 686 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/classes/class_with_inheritance.java | public class MyClass extends SomeOtherClass {
}
| 687 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/classes/class_implementing_jsii_interface.java | public class MyClass implements IResolvable {
public Object resolve() {
return 42;
}
} | 688 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/classes/class_with_inheritance_and_super_class.java | public class MyClass extends SomeOtherClass {
public MyClass(String x, String y) {
super(x);
}
}
| 689 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/classes/class_with_props_argument.java | public class MyClassProps {
private String prop1;
public String getProp1() {
return this.prop1;
}
public MyClassProps prop1(String prop1) {
this.prop1 = prop1;
return this;
}
private Number prop2;
public Number getProp2() {
return this.prop2;
}
public MyClassProps prop2(Number prop2) {
this.prop2 = prop2;
return this;
}
}
public class MyClass extends SomeOtherClass {
public MyClass(Construct scope, String id, MyClassProps props) {
super(scope, id, props);
System.out.println(props.getProp1());
}
}
| 690 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/imports/selective_import.java | import scope.some.module.one.*;
import scope.some.module.Two;
import scope.some.module.someThree.*;
import scope.some.module.four.*;
new Two();
renamed();
| 691 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/imports/multiple-imports.java | import aws.cdk.lib.*;
import constructs.Construct;
import constructs.IConstruct;
| 692 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/imports/submodule-import.java | import software.amazon.jsii.tests.calculator.submodule.*;
import software.amazon.jsii.tests.calculator.submodule.child.*;
import software.amazon.jsii.tests.calculator.homonymousForwardReferences.*;
import software.amazon.jsii.tests.calculator.homonymousForwardReferences.foo.*;
import gen.providers.aws.kms.*;
// Access without existing type information
Object awsKmsKeyExamplekms = KmsKey.Builder.create(this, "examplekms")
.deletionWindowInDays(7)
.description("KMS key 1")
.build();
// Accesses two distinct points of the submodule hierarchy
MyClass myClass = MyClass.Builder.create().prop(SomeEnum.SOME).build();
// Access via a renamed import
Consumer.consume(ConsumerProps.builder().homonymous(Homonymous.builder().stringProperty("yes").build()).build());
| 693 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/imports/import_require.java | import scope.some.module.*;
new ClassFromModule();
| 694 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/imports/import_star_as.java | import scope.some.module.*;
new ClassFromModule();
| 695 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/identifiers/keyword.java | import scope.aws.lambda.*;
ClassFromLambda.Builder.create()
.key("lambda.amazonaws.com")
.build(); | 696 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/statements/whitespace_between_statements.java | statementOne();
statementTwo();
| 697 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/statements/declare_var.java | Type variable; | 698 |
0 | Create_ds/jsii/packages/jsii-rosetta/test/translations | Create_ds/jsii/packages/jsii-rosetta/test/translations/statements/initialize_object_literal.java | Map<String, Object> expected = Map.of(
"Foo", "Bar",
"Baz", 5,
"Qux", List.of("Waldo", "Fred")); | 699 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.