index int64 0 0 | repo_id stringlengths 26 205 | file_path stringlengths 51 246 | content stringlengths 8 433k | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/IStateMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state;
public interface IStateMapper<Model, State> {
State toState(Model model);
Model toModel(State state, long seed);
default Model toModel(State state) {
return toModel(state, 0L);
}
}
| 500 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/Version.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state;
public class Version {
public static final String V2_0 = "2.0";
public static final String V2_1 = "2.1";
public static final String V3_0 = "3.0";
public static final String V3_5 = "3.5";
public static final String V3_7 = "3.7";
public static final String V3_8 = "3.8";
}
| 501 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/tree/RandomCutTreeMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state.tree;
import static com.amazon.randomcutforest.tree.AbstractNodeStore.Null;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.state.IContextualStateMapper;
import com.amazon.randomcutforest.state.Version;
import com.amazon.randomcutforest.tree.AbstractNodeStore;
import com.amazon.randomcutforest.tree.RandomCutTree;
@Getter
@Setter
public class RandomCutTreeMapper
implements IContextualStateMapper<RandomCutTree, CompactRandomCutTreeState, CompactRandomCutTreeContext> {
@Override
public RandomCutTree toModel(CompactRandomCutTreeState state, CompactRandomCutTreeContext context, long seed) {
int dimension = (state.getDimensions() != 0) ? state.getDimensions() : context.getPointStore().getDimensions();
context.setDimension(dimension);
AbstractNodeStoreMapper nodeStoreMapper = new AbstractNodeStoreMapper();
nodeStoreMapper.setRoot(state.getRoot());
AbstractNodeStore nodeStore = nodeStoreMapper.toModel(state.getNodeStoreState(), context);
// boundingBoxcache is not set deliberately;
// it should be set after the partial tree is complete
// likewise all the leaves, including the root, should be set to
// nodeStore.getCapacity()
// such that when the partial tree is filled, the correct mass is computed
// note that this has no effect on the cuts -- since a single node tree has no
// cuts
// uncommenting and using the following line would result in such an incorrect
// computation
// in testRoundTripForSingleNodeForest() where the masses of the trees would be
// different by 1
// and thus outputAfter() would be triggered differently.
// int newRoot = state.getRoot();
int newRoot = nodeStore.isLeaf(state.getRoot()) ? nodeStore.getCapacity() : state.getRoot();
RandomCutTree tree = new RandomCutTree.Builder().dimension(dimension)
.storeSequenceIndexesEnabled(state.isStoreSequenceIndexesEnabled()).capacity(state.getMaxSize())
.setRoot(newRoot).randomSeed(state.getSeed()).pointStoreView(context.getPointStore())
.nodeStore(nodeStore).centerOfMassEnabled(state.isCenterOfMassEnabled())
.outputAfter(state.getOutputAfter()).build();
return tree;
}
@Override
public CompactRandomCutTreeState toState(RandomCutTree model) {
CompactRandomCutTreeState state = new CompactRandomCutTreeState();
state.setVersion(Version.V3_0);
int root = model.getRoot();
AbstractNodeStoreMapper nodeStoreMapper = new AbstractNodeStoreMapper();
nodeStoreMapper.setRoot(root);
state.setNodeStoreState(nodeStoreMapper.toState(model.getNodeStore()));
// the compression of nodeStore would change the root
if ((root != Null) && (root < model.getNumberOfLeaves() - 1)) {
root = 0; // reordering is forced
}
state.setRoot(root);
state.setMaxSize(model.getNumberOfLeaves());
state.setPartialTreeState(true);
state.setStoreSequenceIndexesEnabled(model.isStoreSequenceIndexesEnabled());
state.setCenterOfMassEnabled(model.isCenterOfMassEnabled());
state.setBoundingBoxCacheFraction(model.getBoundingBoxCacheFraction());
state.setOutputAfter(model.getOutputAfter());
state.setSeed(model.getRandomSeed());
state.setDimensions(model.getDimension());
return state;
}
}
| 502 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/tree/CompactRandomCutTreeState.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state.tree;
import static com.amazon.randomcutforest.state.Version.V2_0;
import java.io.Serializable;
import lombok.Data;
import com.amazon.randomcutforest.state.store.NodeStoreState;
@Data
public class CompactRandomCutTreeState implements Serializable {
private static final long serialVersionUID = 1L;
private String version = V2_0;
private int root;
private int maxSize;
private int outputAfter;
private boolean storeSequenceIndexesEnabled;
private boolean centerOfMassEnabled;
private NodeStoreState nodeStoreState;
private double boundingBoxCacheFraction;
private boolean partialTreeState;
private long seed;
private int id;
private int dimensions;
private long staticSeed;
private float weight;
private byte[] auxiliaryData;
private boolean hasAuxiliaryData;
}
| 503 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/tree/AbstractNodeStoreMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state.tree;
import static com.amazon.randomcutforest.tree.AbstractNodeStore.Null;
import java.util.concurrent.ArrayBlockingQueue;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.state.IContextualStateMapper;
import com.amazon.randomcutforest.state.Version;
import com.amazon.randomcutforest.state.store.NodeStoreState;
import com.amazon.randomcutforest.tree.AbstractNodeStore;
import com.amazon.randomcutforest.util.ArrayPacking;
@Getter
@Setter
public class AbstractNodeStoreMapper
implements IContextualStateMapper<AbstractNodeStore, NodeStoreState, CompactRandomCutTreeContext> {
private int root;
@Override
public AbstractNodeStore toModel(NodeStoreState state, CompactRandomCutTreeContext compactRandomCutTreeContext,
long seed) {
int capacity = state.getCapacity();
int[] cutDimension = null;
int[] leftIndex = null;
int[] rightIndex = null;
float[] cutValue = null;
if (root != Null && root < capacity) {
cutDimension = ArrayPacking.unpackInts(state.getCutDimension(), capacity, state.isCompressed());
cutValue = ArrayPacking.unpackFloats(state.getCutValueData(), capacity);
leftIndex = ArrayPacking.unpackInts(state.getLeftIndex(), capacity, state.isCompressed());
rightIndex = ArrayPacking.unpackInts(state.getRightIndex(), capacity, state.isCompressed());
reverseBits(state.getSize(), leftIndex, rightIndex, capacity);
}
// note boundingBoxCache is not set deliberately
return AbstractNodeStore.builder().capacity(capacity).useRoot(root).leftIndex(leftIndex).rightIndex(rightIndex)
.cutDimension(cutDimension).cutValues(cutValue).dimension(compactRandomCutTreeContext.getDimension())
.build();
}
@Override
public NodeStoreState toState(AbstractNodeStore model) {
NodeStoreState state = new NodeStoreState();
int capacity = model.getCapacity();
state.setVersion(Version.V3_0);
state.setCapacity(capacity);
state.setCompressed(true);
state.setPartialTreeStateEnabled(true);
state.setPrecision(Precision.FLOAT_32.name());
int[] leftIndex = model.getLeftIndex();
int[] rightIndex = model.getRightIndex();
int[] cutDimension = model.getCutDimension();
float[] cutValues = model.getCutValues();
int[] map = new int[capacity];
int size = reorderNodesInBreadthFirstOrder(map, leftIndex, rightIndex, capacity);
state.setSize(size);
boolean check = root != Null && root < capacity;
state.setCanonicalAndNotALeaf(check);
if (check) { // can have a canonical representation saving a lot of space
int[] reorderedLeftArray = new int[size];
int[] reorderedRightArray = new int[size];
int[] reorderedCutDimension = new int[size];
float[] reorderedCutValue = new float[size];
for (int i = 0; i < size; i++) {
reorderedLeftArray[i] = (leftIndex[map[i]] < capacity) ? 1 : 0;
reorderedRightArray[i] = (rightIndex[map[i]] < capacity) ? 1 : 0;
reorderedCutDimension[i] = cutDimension[map[i]];
reorderedCutValue[i] = cutValues[map[i]];
}
state.setLeftIndex(ArrayPacking.pack(reorderedLeftArray, state.isCompressed()));
state.setRightIndex(ArrayPacking.pack(reorderedRightArray, state.isCompressed()));
state.setSize(model.size());
state.setCutDimension(ArrayPacking.pack(reorderedCutDimension, state.isCompressed()));
state.setCutValueData(ArrayPacking.pack(reorderedCutValue));
}
return state;
}
/**
* The follong function takes a pair of left and right indices for a regular
* binary tree (each node has 0 or 2 children) and where internal nodes are in
* the range [0..capacity-1] the indices are represented as : 0 for internal
* node; 1 for leaf node; the root is 0 and every non-leaf node is added to a
* queue; the number assigned to that node is the number in the queue Note that
* this implies that the left/right children can be represented by bit-arrays
*
* This function reflates the bits to the queue numbers
*
* @param size the size of the two arrays, typically this is capacity; but
* can be different in RCF2.0
* @param leftIndex the left bitarray
* @param rightIndex the right bitarray
* @param capacity the number of internal nodes (one less than number of
* leaves)
*/
protected static void reverseBits(int size, int[] leftIndex, int[] rightIndex, int capacity) {
int nodeCounter = 1;
for (int i = 0; i < size; i++) {
if (leftIndex[i] != 0) {
leftIndex[i] = nodeCounter++;
} else {
leftIndex[i] = capacity;
}
if (rightIndex[i] != 0) {
rightIndex[i] = nodeCounter++;
} else {
rightIndex[i] = capacity;
}
}
for (int i = size; i < leftIndex.length; i++) {
leftIndex[i] = rightIndex[i] = capacity;
}
}
/**
* The following function reorders the nodes stored in the tree in a breadth
* first order; Note that a regular binary tree where each internal node has 2
* chidren, as is the case for AbstractRandomCutTree or any tree produced in a
* Random Forest ensemble (not restricted to Random Cut Forests), has maxsize -
* 1 internal nodes for maxSize number of leaves. The leaves are numbered 0 +
* (maxsize), 1 + (maxSize), ..., etc. in that BFS ordering. The root is node 0.
*
* Note that if the binary tree is a complete binary tree, then the numbering
* would correspond to the well known heuristic where children of node index i
* are numbered 2*i and 2*i + 1. The trees in AbstractCompactRandomCutTree will
* not be complete binary trees. But a similar numbering enables us to compress
* the entire structure of the tree into two bit arrays corresponding to
* presence of left and right children. The idea can be viewed as similar to
* Zak's numbering for regular binary trees Lexicographic generation of binary
* trees, S. Zaks, TCS volume 10, pages 63-82, 1980, that uses depth first
* numbering. However an extensive literature exists on this topic.
*
* The overall relies on the extra advantage that we can use two bit sequences;
* the left and right child pointers which appears to be simple. While it is
* feasible to always maintain this order, that would complicate the standard
* binary search tree pattern and this tranformation is used when the tree is
* serialized. Note that while there is savings in representing the tree
* structure into two bit arrays, the bulk of the serialization corresponds to
* the payload at the nodes (cuts, dimensions for internal nodes and index to
* pointstore, number of copies for the leaves). The translation to the bits is
* handled by the NodeStoreMapper. The algorithm here corresponds to just
* producing the cannoical order.
*
* The algorithm renumbers the nodes in BFS ordering.
*/
public int reorderNodesInBreadthFirstOrder(int[] map, int[] leftIndex, int[] rightIndex, int capacity) {
if ((root != Null) && (root < capacity)) {
int currentNode = 0;
ArrayBlockingQueue<Integer> nodeQueue = new ArrayBlockingQueue<>(capacity);
nodeQueue.add(root);
while (!nodeQueue.isEmpty()) {
int head = nodeQueue.poll();
int leftChild = leftIndex[head];
if (leftChild < capacity) {
nodeQueue.add(leftChild);
}
int rightChild = rightIndex[head];
if (rightChild < capacity) {
nodeQueue.add(rightChild);
}
map[currentNode] = head;
currentNode++;
}
return currentNode;
}
return 0;
}
}
| 504 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/tree/CompactRandomCutTreeContext.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state.tree;
import lombok.Data;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.store.IPointStore;
@Data
public class CompactRandomCutTreeContext {
private int maxSize;
private int dimension;
private IPointStore<?, ?> pointStore;
private Precision precision;
}
| 505 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/sampler/CompactSamplerMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state.sampler;
import java.util.Arrays;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.sampler.CompactSampler;
import com.amazon.randomcutforest.state.IStateMapper;
import com.amazon.randomcutforest.util.ArrayPacking;
@Getter
@Setter
public class CompactSamplerMapper implements IStateMapper<CompactSampler, CompactSamplerState> {
/**
* This flag is passed to the constructor for {@code CompactSampler} when a new
* sampler is constructed in {@link #toModel}. If true, then the sampler will
* validate that the weight array in a {@code CompactSamplerState} instance
* satisfies the heap property. The heap property is not validated by default.
*/
private boolean validateHeapEnabled = false;
/**
* used to compress data, can be set to false for debug
*/
private boolean compressionEnabled = true;
@Override
public CompactSampler toModel(CompactSamplerState state, long seed) {
float[] weight = new float[state.getCapacity()];
int[] pointIndex = new int[state.getCapacity()];
long[] sequenceIndex;
int size = state.getSize();
System.arraycopy(state.getWeight(), 0, weight, 0, size);
System.arraycopy(ArrayPacking.unpackInts(state.getPointIndex(), state.isCompressed()), 0, pointIndex, 0, size);
if (state.isStoreSequenceIndicesEnabled()) {
sequenceIndex = new long[state.getCapacity()];
System.arraycopy(state.getSequenceIndex(), 0, sequenceIndex, 0, size);
} else {
sequenceIndex = null;
}
return new CompactSampler.Builder<>().capacity(state.getCapacity()).timeDecay(state.getTimeDecay())
.randomSeed(state.getRandomSeed()).storeSequenceIndexesEnabled(state.isStoreSequenceIndicesEnabled())
.weight(weight).pointIndex(pointIndex).sequenceIndex(sequenceIndex).validateHeap(validateHeapEnabled)
.initialAcceptFraction(state.getInitialAcceptFraction())
.mostRecentTimeDecayUpdate(state.getSequenceIndexOfMostRecentTimeDecayUpdate())
.maxSequenceIndex(state.getMaxSequenceIndex()).size(state.getSize()).build();
}
@Override
public CompactSamplerState toState(CompactSampler model) {
CompactSamplerState state = new CompactSamplerState();
state.setSize(model.size());
state.setCompressed(compressionEnabled);
state.setCapacity(model.getCapacity());
state.setTimeDecay(model.getTimeDecay());
state.setSequenceIndexOfMostRecentTimeDecayUpdate(model.getMostRecentTimeDecayUpdate());
state.setMaxSequenceIndex(model.getMaxSequenceIndex());
state.setInitialAcceptFraction(model.getInitialAcceptFraction());
state.setStoreSequenceIndicesEnabled(model.isStoreSequenceIndexesEnabled());
state.setRandomSeed(model.getRandomSeed());
state.setWeight(Arrays.copyOf(model.getWeightArray(), model.size()));
state.setPointIndex(ArrayPacking.pack(model.getPointIndexArray(), model.size(), state.isCompressed()));
if (model.isStoreSequenceIndexesEnabled()) {
state.setSequenceIndex(Arrays.copyOf(model.getSequenceIndexArray(), model.size()));
}
return state;
}
}
| 506 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/sampler/CompactSamplerState.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state.sampler;
import static com.amazon.randomcutforest.state.Version.V2_0;
import java.io.Serializable;
import lombok.Data;
/**
* A data object representing the state of a
* {@link com.amazon.randomcutforest.sampler.CompactSampler}.
*/
@Data
public class CompactSamplerState implements Serializable {
private static final long serialVersionUID = 1L;
/**
* a version string for extensibility
*/
private String version = V2_0;
/**
* An array of sampler weights.
*/
private float[] weight;
/**
* An array of index values identifying the points in the sample. These indexes
* will correspond to a {@link com.amazon.randomcutforest.store.PointStore}.
*/
private int[] pointIndex;
/**
* boolean for deciding to store sequence indices
*/
private boolean storeSequenceIndicesEnabled;
/**
* The sequence indexes of points in the sample.
*/
private long[] sequenceIndex;
/**
* The number of points in the sample.
*/
private int size;
/**
* The maximum number of points that the sampler can contain.
*/
private int capacity;
/**
* The behavior of the sampler at initial sampling
*/
private double initialAcceptFraction;
/**
* The time-decay parameter for this sampler
*/
private double timeDecay;
/**
* Last update of timeDecay
*/
private long sequenceIndexOfMostRecentTimeDecayUpdate;
/**
* maximum timestamp seen in update/computeWeight
*/
private long maxSequenceIndex;
/**
* boolean indicating if the compression is enabled
*/
private boolean compressed;
/**
* saving the random state, if desired
*/
private long randomSeed;
}
| 507 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/returntypes/DiVectorMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state.returntypes;
import java.util.Arrays;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.returntypes.DiVector;
import com.amazon.randomcutforest.state.IStateMapper;
@Getter
@Setter
public class DiVectorMapper implements IStateMapper<DiVector, DiVectorState> {
@Override
public DiVector toModel(DiVectorState state, long seed) {
if (state.getHigh() == null || state.getLow() == null) {
return null;
} else {
return new DiVector(state.getHigh(), state.getLow());
}
}
@Override
public DiVectorState toState(DiVector model) {
DiVectorState state = new DiVectorState();
if (model != null) {
state.setHigh(Arrays.copyOf(model.high, model.high.length));
state.setLow(Arrays.copyOf(model.low, model.low.length));
}
return state;
}
}
| 508 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/returntypes/DiVectorState.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state.returntypes;
import java.io.Serializable;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.anomalydetection.AnomalyAttributionVisitor;
/**
* A DiVector is used when we want to track a quantity in both the positive and
* negative directions for each dimension in a manifold. For example, when using
* a {@link AnomalyAttributionVisitor} to compute the attribution of the anomaly
* score to dimension of the input point, we want to know if the anomaly score
* attributed to the ith coordinate of the input point is due to that coordinate
* being unusually high or unusually low.
*
* The DiVectorState creates a POJO to be used in serialization.
*/
@Getter
@Setter
public class DiVectorState implements Serializable {
private static final long serialVersionUID = 1L;
double[] high;
double[] low;
} | 509 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/store/PointStoreState.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state.store;
import static com.amazon.randomcutforest.state.Version.V2_0;
import java.io.Serializable;
import lombok.Data;
/**
* A class for storing the state of a
* {@link com.amazon.randomcutforest.store.PointStore}. Depending on which kind
* of point store was serialized, one of the fields {@code doubleData} or
* {@code floatData} will be null.
*/
@Data
public class PointStoreState implements Serializable {
private static final long serialVersionUID = 1L;
/**
* version string for future extensibility
*/
private String version = V2_0;
/**
* size of each point saved
*/
private int dimensions;
/**
* capacity of the store
*/
private int capacity;
/**
* shingle size of the points
*/
private int shingleSize;
/**
* precision of points in the point store state
*/
private String precision;
/**
* location beyond which the store has no useful information
*/
private int startOfFreeSegment;
/**
* Point data converted to raw bytes.
*/
private byte[] pointData;
/**
* use compressed representatiomn for arrays
*/
private boolean compressed;
/**
* An array of reference counts for each stored point.
*/
private int[] refCount;
/**
* is direct mapping enabled
*/
private boolean directLocationMap;
/**
* location data for indirect maps
*/
private int[] locationList;
/**
* reverse location data to be usable in future
*/
private int[] reverseLocationList;
/**
* flag to avoid null issues in the future
*/
private boolean reverseAvailable;
/**
* boolean indicating use of overlapping shingles; need not be used in certain
* cases
*/
private boolean internalShinglingEnabled;
/**
* internal shingle
*/
private double[] internalShingle;
/**
* last timestamp
*/
private long lastTimeStamp;
/**
* rotation for internal shingles
*/
private boolean rotationEnabled;
/**
* dynamic resizing
*/
private boolean dynamicResizingEnabled;
/**
* current store capacity
*/
private int currentStoreCapacity;
/**
* current index capacity
*/
private int indexCapacity;
/**
* reduces the effect of repeated points; used in version 3.0
*/
private int[] duplicateRefs;
}
| 510 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/store/PointStoreMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state.store;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.checkNotNull;
import static com.amazon.randomcutforest.CommonUtils.toDoubleArray;
import static com.amazon.randomcutforest.CommonUtils.toFloatArray;
import java.util.Arrays;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.state.IStateMapper;
import com.amazon.randomcutforest.state.Version;
import com.amazon.randomcutforest.store.PointStore;
import com.amazon.randomcutforest.store.PointStoreLarge;
import com.amazon.randomcutforest.util.ArrayPacking;
@Getter
@Setter
public class PointStoreMapper implements IStateMapper<PointStore, PointStoreState> {
/**
* If true, then the arrays are compressed via simple data dependent scheme
*/
private boolean compressionEnabled = true;
private int numberOfTrees = 255; // byte encoding as default
@Override
public PointStore toModel(PointStoreState state, long seed) {
checkNotNull(state.getRefCount(), "refCount must not be null");
checkNotNull(state.getPointData(), "pointData must not be null");
checkArgument(Precision.valueOf(state.getPrecision()) == Precision.FLOAT_32,
"precision must be " + Precision.FLOAT_32);
int indexCapacity = state.getIndexCapacity();
int dimensions = state.getDimensions();
float[] store = ArrayPacking.unpackFloats(state.getPointData(), state.getCurrentStoreCapacity() * dimensions);
int startOfFreeSegment = state.getStartOfFreeSegment();
int[] refCount = ArrayPacking.unpackInts(state.getRefCount(), indexCapacity, state.isCompressed());
int[] locationList = new int[indexCapacity];
Arrays.fill(locationList, PointStore.INFEASIBLE_LOCN);
int[] tempList = ArrayPacking.unpackInts(state.getLocationList(), state.isCompressed());
if (!state.getVersion().equals(Version.V3_0)) {
int shingleSize = state.getShingleSize();
int baseDimension = dimensions / shingleSize;
for (int i = 0; i < tempList.length; i++) {
checkArgument(tempList[i] % baseDimension == 0,
"the location field should be a multiple of dimension/shingle size for versions before 3.0");
locationList[i] = tempList[i] / baseDimension;
}
} else {
int[] duplicateRefs = null;
if (state.getDuplicateRefs() != null) {
duplicateRefs = ArrayPacking.unpackInts(state.getDuplicateRefs(), state.isCompressed());
checkArgument(duplicateRefs.length % 2 == 0, " corrupt duplicates");
for (int i = 0; i < duplicateRefs.length; i += 2) {
refCount[duplicateRefs[i]] += duplicateRefs[i + 1];
}
}
int nextLocation = 0;
for (int i = 0; i < indexCapacity; i++) {
if (refCount[i] > 0) {
locationList[i] = tempList[nextLocation];
++nextLocation;
} else {
locationList[i] = PointStoreLarge.INFEASIBLE_LOCN;
}
}
checkArgument(nextLocation == tempList.length, "incorrect location encoding");
}
return PointStore.builder().internalRotationEnabled(state.isRotationEnabled())
.internalShinglingEnabled(state.isInternalShinglingEnabled())
.dynamicResizingEnabled(state.isDynamicResizingEnabled())
.directLocationEnabled(state.isDirectLocationMap()).indexCapacity(indexCapacity)
.currentStoreCapacity(state.getCurrentStoreCapacity()).capacity(state.getCapacity())
.shingleSize(state.getShingleSize()).dimensions(state.getDimensions()).locationList(locationList)
.nextTimeStamp(state.getLastTimeStamp()).startOfFreeSegment(startOfFreeSegment).refCount(refCount)
.knownShingle(state.getInternalShingle()).store(store).build();
}
@Override
public PointStoreState toState(PointStore model) {
model.compact();
PointStoreState state = new PointStoreState();
state.setVersion(Version.V3_0);
state.setCompressed(compressionEnabled);
state.setDimensions(model.getDimensions());
state.setCapacity(model.getCapacity());
state.setShingleSize(model.getShingleSize());
state.setDirectLocationMap(false);
state.setInternalShinglingEnabled(model.isInternalShinglingEnabled());
state.setLastTimeStamp(model.getNextSequenceIndex());
if (model.isInternalShinglingEnabled()) {
state.setInternalShingle(toDoubleArray(model.getInternalShingle()));
state.setRotationEnabled(model.isInternalRotationEnabled());
}
state.setDynamicResizingEnabled(true);
if (state.isDynamicResizingEnabled()) {
state.setCurrentStoreCapacity(model.getCurrentStoreCapacity());
state.setIndexCapacity(model.getIndexCapacity());
}
state.setStartOfFreeSegment(model.getStartOfFreeSegment());
state.setPrecision(Precision.FLOAT_32.name());
int[] refcount = model.getRefCount();
int[] tempList = model.getLocationList();
int[] locationList = new int[model.getIndexCapacity()];
int[] duplicateRefs = new int[2 * model.getIndexCapacity()];
int size = 0;
int duplicateSize = 0;
for (int i = 0; i < refcount.length; i++) {
if (refcount[i] > 0) {
locationList[size] = tempList[i];
++size;
if (refcount[i] > numberOfTrees) {
duplicateRefs[duplicateSize] = i;
duplicateRefs[duplicateSize + 1] = refcount[i] - numberOfTrees;
refcount[i] = numberOfTrees;
duplicateSize += 2;
}
}
}
state.setRefCount(ArrayPacking.pack(refcount, refcount.length, state.isCompressed()));
state.setDuplicateRefs(ArrayPacking.pack(duplicateRefs, duplicateSize, state.isCompressed()));
state.setLocationList(ArrayPacking.pack(locationList, size, state.isCompressed()));
state.setPointData(ArrayPacking.pack(model.getStore(), model.getStartOfFreeSegment()));
return state;
}
public PointStore convertFromDouble(PointStoreState state) {
checkNotNull(state.getRefCount(), "refCount must not be null");
checkNotNull(state.getPointData(), "pointData must not be null");
checkArgument(Precision.valueOf(state.getPrecision()) == Precision.FLOAT_64,
"precision must be " + Precision.FLOAT_64);
int indexCapacity = state.getIndexCapacity();
int dimensions = state.getDimensions();
float[] store = toFloatArray(
ArrayPacking.unpackDoubles(state.getPointData(), state.getCurrentStoreCapacity() * dimensions));
int startOfFreeSegment = state.getStartOfFreeSegment();
int[] refCount = ArrayPacking.unpackInts(state.getRefCount(), indexCapacity, state.isCompressed());
int[] locationList = new int[indexCapacity];
int[] tempList = ArrayPacking.unpackInts(state.getLocationList(), state.isCompressed());
System.arraycopy(tempList, 0, locationList, 0, tempList.length);
if (!state.getVersion().equals(Version.V3_0)) {
transformArray(locationList, dimensions / state.getShingleSize());
}
return PointStore.builder().internalRotationEnabled(state.isRotationEnabled())
.internalShinglingEnabled(state.isInternalShinglingEnabled())
.dynamicResizingEnabled(state.isDynamicResizingEnabled())
.directLocationEnabled(state.isDirectLocationMap()).indexCapacity(indexCapacity)
.currentStoreCapacity(state.getCurrentStoreCapacity()).capacity(state.getCapacity())
.shingleSize(state.getShingleSize()).dimensions(state.getDimensions()).locationList(locationList)
.nextTimeStamp(state.getLastTimeStamp()).startOfFreeSegment(startOfFreeSegment).refCount(refCount)
.knownShingle(state.getInternalShingle()).store(store).build();
}
void transformArray(int[] location, int baseDimension) {
checkArgument(baseDimension > 0, "incorrect invocation");
for (int i = 0; i < location.length; i++) {
if (location[i] > 0) {
location[i] = location[i] / baseDimension;
}
}
}
}
| 511 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/store/NodeStoreState.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state.store;
import static com.amazon.randomcutforest.state.Version.V2_0;
import java.io.Serializable;
import lombok.Data;
@Data
public class NodeStoreState implements Serializable {
private static final long serialVersionUID = 1L;
private String version = V2_0;
private int capacity;
private boolean compressed;
private int[] cutDimension;
private byte[] cutValueData;
private String precision;
private int root;
private boolean canonicalAndNotALeaf;
private int size;
private int[] leftIndex;
private int[] rightIndex;
private int[] nodeFreeIndexes;
private int nodeFreeIndexPointer;
private int[] leafFreeIndexes;
private int leafFreeIndexPointer;
private boolean partialTreeStateEnabled;
private int[] leafMass;
private int[] leafPointIndex;
}
| 512 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/sampler/Weighted.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.sampler;
import lombok.Data;
/**
* A container class representing a weighted value. This generic type is used in
* sampler implementations to store points along with weights that were computed
* as part of sampling.
*
* @param <P> The representation of the point value.
*/
@Data
public class Weighted<P> implements ISampled<P> {
/**
* The sampled value.
*/
private final P value;
/**
* The weight assigned to this value.
*/
private final float weight;
/**
* The sequence index of the sampled value.
*/
private final long sequenceIndex;
}
| 513 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/sampler/AbstractStreamSampler.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.sampler;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.checkNotNull;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_INITIAL_ACCEPT_FRACTION;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE;
import java.util.Random;
import com.amazon.randomcutforest.config.Config;
public abstract class AbstractStreamSampler<P> implements IStreamSampler<P> {
/**
* The decay factor used for generating the weight of the point. For greater
* values of timeDecay we become more biased in favor of recent points.
*/
protected double timeDecay;
/**
* The sequence index corresponding to the most recent change to
* {@code timeDecay}.
*/
protected long mostRecentTimeDecayUpdate = 0;
/**
* most recent timestamp, used to determine lastUpdateOfTimeDecay
*/
protected long maxSequenceIndex = 0;
/**
* The accumulated sum of timeDecay before the last update
*/
protected double accumuluatedTimeDecay = 0;
/**
* The random number generator used in sampling.
*/
protected ReplayableRandom random;
/**
* The point evicted by the last call to {@link #update}, or null if the new
* point was not accepted by the sampler.
*/
protected transient ISampled<P> evictedPoint;
/**
* the fraction of points admitted to the sampler even when the sampler can
* accept (not full) this helps control the initial behavior of the points and
* ensure robustness by ensuring that the samplers do not all sample the initial
* set of points.
*/
protected final double initialAcceptFraction;
/**
* a function that computes the probability of admittance of a new value when
* the sampler is not full Note that a value can always be admitted if it has a
* weight smaller than some sampled value
*
* this function provides a mechanism for different trees to smoothly diverge --
* most previous versions corresponded to initialFraction = 1, and the samplers
* only diverge after all of them store all the first sampleSize points. In
* contrast the method (which can be changed in a subclass) admits the first
* initialFraction * sampleSize number of points and then becomes a monotonic
* decreasing function.
*
* This function is supposed to be a parallel to the outputAfter() setting in
* the forest which controls how scores are emitted
*
* @param currentSize the current size of the sampler
* @return the probability of admitting the next point
*/
protected double initialAcceptProbability(int currentSize) {
if (currentSize < initialAcceptFraction * capacity) {
return 1.0;
} else if (initialAcceptFraction >= 1.0) {
return 0;
} else {
return 1 - (1.0 * currentSize / capacity - initialAcceptFraction) / (1 - initialAcceptFraction);
}
}
/**
* The number of points in the sample when full.
*/
protected final int capacity;
/**
* This field is used to temporarily store the result from a call to
* {@link #acceptPoint} for use in the subsequent call to {@link #addPoint}.
*
* Visible for testing.
*/
protected AcceptPointState acceptPointState;
public boolean acceptPoint(long sequenceIndex) {
return acceptPoint(sequenceIndex, 1.0f);
}
public abstract boolean acceptPoint(long sequenceIndex, float weight);
@Override
public abstract void addPoint(P pointIndex);
public AbstractStreamSampler(Builder<?> builder) {
this.capacity = builder.capacity;
this.initialAcceptFraction = builder.initialAcceptFraction;
this.timeDecay = builder.timeDecay;
if (builder.random != null) {
this.random = new ReplayableRandom(builder.random);
} else {
this.random = new ReplayableRandom(builder.randomSeed);
}
}
/**
* Weight is computed as <code>-log(w(i)) + log(-log(u(i))</code>, where
*
* <ul>
* <li><code>w(i) = exp(timeDecay * sequenceIndex)</code></li>
* <li><code>u(i)</code> is chosen uniformly from (0, 1)</li>
* </ul>
* <p>
* A higher score means lower priority. So the points with the lower score have
* higher chance of making it to the sample.
*
* @param sequenceIndex The sequenceIndex of the point whose score is being
* computed.
* @param sampleWeight the positive weight (often 1.0) used in sampling; the
* weight should be checked in the calling routine
* @return the weight value used to define point priority
*/
protected float computeWeight(long sequenceIndex, float sampleWeight) {
double randomNumber = 0d;
while (randomNumber == 0d) {
randomNumber = random.nextDouble();
}
maxSequenceIndex = (maxSequenceIndex < sequenceIndex) ? sequenceIndex : maxSequenceIndex;
return (float) (-(sequenceIndex - mostRecentTimeDecayUpdate) * timeDecay - accumuluatedTimeDecay
+ Math.log(-Math.log(randomNumber) / sampleWeight));
}
/**
* Sets the timeDecay on the fly. Note that the assumption is that the times
* stamps corresponding to changes to timeDecay and sequenceIndexes are
* non-decreasing -- the sequenceIndexes can be out of order among themselves
* within two different times when timeDecay was changed.
*
* @param newTimeDecay the new sampling rate
*/
public void setTimeDecay(double newTimeDecay) {
// accumulatedTimeDecay keeps track of adjustments and is zeroed out when the
// arrays are exported for some reason
accumuluatedTimeDecay += (maxSequenceIndex - mostRecentTimeDecayUpdate) * timeDecay;
timeDecay = newTimeDecay;
mostRecentTimeDecayUpdate = maxSequenceIndex;
}
/**
* @return the time decay value that determines the rate of decay of previously
* seen points. Larger values of time decay indicate a greater bias
* toward recent points. A value of 0 corresponds to a uniform sample
* over the stream.
*/
public double getTimeDecay() {
return timeDecay;
}
public long getMaxSequenceIndex() {
return maxSequenceIndex;
}
public void setMaxSequenceIndex(long index) {
maxSequenceIndex = index;
}
public long getMostRecentTimeDecayUpdate() {
return mostRecentTimeDecayUpdate;
}
public void setMostRecentTimeDecayUpdate(long index) {
mostRecentTimeDecayUpdate = index;
}
@Override
public <T> void setConfig(String name, T value, Class<T> clazz) {
if (Config.TIME_DECAY.equals(name)) {
checkArgument(Double.class.isAssignableFrom(clazz),
String.format("Setting '%s' must be a double value", name));
setTimeDecay((Double) value);
} else {
throw new IllegalArgumentException("Unsupported configuration setting: " + name);
}
}
@Override
public <T> T getConfig(String name, Class<T> clazz) {
checkNotNull(clazz, "clazz must not be null");
if (Config.TIME_DECAY.equals(name)) {
checkArgument(clazz.isAssignableFrom(Double.class),
String.format("Setting '%s' must be a double value", name));
return clazz.cast(getTimeDecay());
} else {
throw new IllegalArgumentException("Unsupported configuration setting: " + name);
}
}
/**
* @return the number of points contained by the sampler when full.
*/
@Override
public int getCapacity() {
return capacity;
}
public double getInitialAcceptFraction() {
return initialAcceptFraction;
}
public long getRandomSeed() {
return random.randomSeed;
}
protected class ReplayableRandom {
long randomSeed;
Random testRandom;
ReplayableRandom(long randomSeed) {
this.randomSeed = randomSeed;
}
ReplayableRandom(Random random) {
this.testRandom = random;
}
public double nextDouble() {
if (testRandom != null) {
return testRandom.nextDouble();
}
Random newRandom = new Random(randomSeed);
randomSeed = newRandom.nextLong();
return newRandom.nextDouble();
}
}
public static class Builder<T extends Builder<T>> {
// We use Optional types for optional primitive fields when it doesn't make
// sense to use a constant default.
protected int capacity = DEFAULT_SAMPLE_SIZE;
protected double timeDecay = 0;
protected Random random = null;
protected long randomSeed = new Random().nextLong();
protected long maxSequenceIndex = 0;
protected long sequenceIndexOfMostRecentTimeDecayUpdate = 0;
protected double initialAcceptFraction = DEFAULT_INITIAL_ACCEPT_FRACTION;
public T capacity(int capacity) {
this.capacity = capacity;
return (T) this;
}
public T randomSeed(long seed) {
this.randomSeed = seed;
return (T) this;
}
public T random(Random random) {
this.random = random;
return (T) this;
}
public T maxSequenceIndex(long maxSequenceIndex) {
this.maxSequenceIndex = maxSequenceIndex;
return (T) this;
}
public T mostRecentTimeDecayUpdate(long sequenceIndexOfMostRecentTimeDecayUpdate) {
this.sequenceIndexOfMostRecentTimeDecayUpdate = sequenceIndexOfMostRecentTimeDecayUpdate;
return (T) this;
}
public T initialAcceptFraction(double initialAcceptFraction) {
this.initialAcceptFraction = initialAcceptFraction;
return (T) this;
}
public T timeDecay(double timeDecay) {
this.timeDecay = timeDecay;
return (T) this;
}
}
}
| 514 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/sampler/AcceptPointState.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.sampler;
import lombok.AllArgsConstructor;
import lombok.Data;
/**
* A container class used by {@link CompactSampler}. These sampler
* implementations compute weights during {@link IStreamSampler#acceptPoint} to
* determine if a new point should be added to the sample. This class retains
* the sequence index and computed weight from that method call for use in the
* subsequent {@link IStreamSampler#addPoint} call.
*/
@Data
@AllArgsConstructor
public class AcceptPointState {
private long sequenceIndex;
private float weight;
}
| 515 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/sampler/ISampled.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.sampler;
/**
* A simple wrapper class representing a point that has been sampled by a
* sampler. A sampled point can be added to or removed from a tree.
*
* @param <P> The point representation used by this sampled point.
*/
public interface ISampled<P> {
/**
* Return the sampled value.
*
* @return the sampled value.
*/
P getValue();
/**
* Return the sequence index of the sampled value.
*
* @return the sequence index of the sampled value.
*/
long getSequenceIndex();
}
| 516 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/sampler/IStreamSampler.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.sampler;
import java.util.List;
import java.util.Optional;
import com.amazon.randomcutforest.config.IDynamicConfig;
/**
* <p>
* A sampler that can be updated iteratively from a stream of data points. The
* update operation is broken into two steps: an "accept" step and an "add"
* step. During the "accept" step, the sampler decides whether to accept a new
* point into sample. The decision rule will depend on the sampler
* implementation If the sampler is full, accepting a new point requires the
* sampler to evict a point currently in the sample. This operation is also part
* of the accept step.
* </p>
*
* <p>
* If the outcome of the accept step is to accept a new point, then the sampler
* continues to the second step to add a point to the sample (if the outcome is
* not to accept a new point, then this step is not invoked). The reason for
* this two-step process is because sampler update steps may be interleaved with
* model update steps in
* {@link com.amazon.randomcutforest.executor.IUpdatable#update} (see
* {@link com.amazon.randomcutforest.executor.SamplerPlusTree#update}, for
* example). In particular, if a new point is accepted into the sampler whose
* value is equal to an existing point in the sample, then the model may choose
* to increment the count on the existing point rather than allocate new storage
* for the duplicate point.
* </p>
*
* @param <P> The point type.
*/
public interface IStreamSampler<P> extends IDynamicConfig {
/**
* Submit a point to the sampler and return true if the point is accepted into
* the sample. By default this method chains together the {@link #acceptPoint}
* and {@link #addPoint} methods. If a point was evicted from the sample as a
* side effect, then the evicted point will be available in
* {@link #getEvictedPoint()} until the next call to {@link #addPoint}.
*
* @param point The point submitted to the sampler.
* @param sequenceIndex the sequence number
* @return true if the point is accepted and added to the sample, false if the
* point is rejected.
*/
default boolean update(P point, long sequenceIndex) {
if (acceptPoint(sequenceIndex)) {
addPoint(point);
return true;
}
return false;
}
/**
* This is the first step in a two-step sample operation. In this step, the
* sampler makes a decision about whether to accept a new point into the sample.
* If it decides to accept the point, then a new point can be added by calling
* {@link #addPoint}.
*
* If a point needs to be evicted before a new point is added, eviction should
* happen in this method. If a point is evicted during a call to
* {@code acceptSample}, it will be available by calling
* {@link #getEvictedPoint()} until the next time {@code acceptSample} is
* called.
*
* @param sequenceIndex The sequence of the the point being submitted to the
* sampler.
* @return true if the point should be added to the sample.
*/
boolean acceptPoint(long sequenceIndex);
/**
* This is the second step in a two-step sample operation. If the
* {@link #acceptPoint} method was called and returned true, then this method
* should be called to complete the sampling operation by adding the point to
* the sample. If a call to {@code addPoint} is not preceded by a successful
* call to {@code acceptPoint}, then it may fail with an
* {@code IllegalStateException}.
*
* @param point The point being added to the sample.
*/
void addPoint(P point);
/**
* Return the list of sampled points.
*
* @return the list of sampled points.
*/
List<ISampled<P>> getSample();
/**
* @return the point that was evicted from the sample in the most recent call to
* {@link #acceptPoint}, or {@code Optional.empty()} if no point was
* evicted.
*/
Optional<ISampled<P>> getEvictedPoint();
/**
* @return true if this sampler contains enough points to support the anomaly
* score computation, false otherwise. By default, this will
*/
default boolean isReady() {
return size() >= getCapacity() / 4;
}
/**
* @return true if the sampler has reached it's full capacity, false otherwise.
*/
default boolean isFull() {
return size() >= getCapacity();
}
/**
* @return the number of points contained by the sampler when full.
*/
int getCapacity();
/**
* @return the number of points currently contained by the sampler.
*/
int size();
void setMaxSequenceIndex(long maxSequenceIndex);
}
| 517 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/sampler/CompactSampler.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.sampler;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.checkState;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
/**
* <p>
* CompactSampler is an implementation of time-based reservoir sampling. When a
* point is submitted to the sampler, the decision to accept the point gives
* more weight to newer points compared to older points. The newness of a point
* is determined by its sequence index, and larger sequence indexes are
* considered newer.
* </p>
* <p>
* The sampler algorithm is an example of the general weighted reservoir
* sampling algorithm, which works like this:
* </p>
* <ol>
* <li>For each item i choose a random number u(i) uniformly from the interval
* (0, 1) and compute the weight function <code>-(1 / c(i)) * log u(i)</code>,
* for a given coefficient function c(i).</li>
* <li>For a sample size of N, maintain a list of the N items with the smallest
* weights.</li>
* <li>When a new item is submitted to sampler, compute its weight. If it is
* smaller than the largest weight currently contained in the sampler, then the
* item with the largest weight is evicted from the sample and replaced by the
* new item.</li>
* </ol>
* <p>
* The coefficient function used by CompactSampler is:
* <code>c(i) = exp(timeDecay * sequenceIndex(i))</code>.
* </p>
*/
public class CompactSampler extends AbstractStreamSampler<Integer> {
/**
* When creating a {@code CompactSampler}, the user has the option to disable
* storing sequence indexes. If storing sequence indexes is disabled, then this
* value is used for the sequence index in {@link ISampled} instances returned
* by {@link #getSample()}, {@link #getWeightedSample()}, and
* {@link #getEvictedPoint()}.
*/
public static final long SEQUENCE_INDEX_NA = -1L;
/**
* A max-heap containing the weighted points currently in sample. The head
* element is the lowest priority point in the sample (or, equivalently, is the
* point with the greatest weight).
*/
protected final float[] weight;
/**
* Index values identifying the points in the sample. See
* {@link com.amazon.randomcutforest.store.IPointStore}.
*/
protected final int[] pointIndex;
/**
* Sequence indexes of the points in the sample.
*/
protected final long[] sequenceIndex;
/**
* The number of points currently in the sample.
*/
protected int size;
/**
* If true, then the sampler will store sequence indexes along with the sampled
* points.
*/
private final boolean storeSequenceIndexesEnabled;
public static Builder<?> builder() {
return new Builder<>();
}
public static CompactSampler uniformSampler(int sampleSize, long randomSeed, boolean storeSequences) {
return new Builder<>().capacity(sampleSize).timeDecay(0).randomSeed(randomSeed)
.storeSequenceIndexesEnabled(storeSequences).build();
}
protected CompactSampler(Builder<?> builder) {
super(builder);
checkArgument(builder.initialAcceptFraction > 0, " the admittance fraction cannot be <= 0");
checkArgument(builder.capacity > 0, " sampler capacity cannot be <=0 ");
this.storeSequenceIndexesEnabled = builder.storeSequenceIndexesEnabled;
this.timeDecay = builder.timeDecay;
this.maxSequenceIndex = builder.maxSequenceIndex;
this.mostRecentTimeDecayUpdate = builder.sequenceIndexOfMostRecentTimeDecayUpdate;
if (builder.weight != null || builder.pointIndex != null || builder.sequenceIndex != null
|| builder.validateHeap) {
checkArgument(builder.weight != null && builder.weight.length == builder.capacity, " incorrect state");
checkArgument(builder.pointIndex != null && builder.pointIndex.length == builder.capacity,
" incorrect state");
checkArgument(
!builder.storeSequenceIndexesEnabled
|| builder.sequenceIndex != null && builder.sequenceIndex.length == builder.capacity,
" incorrect state");
this.weight = builder.weight;
this.pointIndex = builder.pointIndex;
this.sequenceIndex = builder.sequenceIndex;
size = builder.size;
reheap(builder.validateHeap);
} else {
checkArgument(builder.size == 0, "incorrect state");
size = 0;
weight = new float[builder.capacity];
pointIndex = new int[builder.capacity];
if (storeSequenceIndexesEnabled) {
this.sequenceIndex = new long[builder.capacity];
} else {
this.sequenceIndex = null;
}
}
}
@Override
public boolean acceptPoint(long sequenceIndex, float samplingWeight) {
checkArgument(samplingWeight >= 0, " weight has to be non-negative");
checkState(sequenceIndex >= mostRecentTimeDecayUpdate, "incorrect sequences submitted to sampler");
evictedPoint = null;
if (samplingWeight > 0) {
float weight = computeWeight(sequenceIndex, samplingWeight);
boolean initial = (size < capacity && random.nextDouble() < initialAcceptProbability(size));
if (initial || (weight < this.weight[0])) {
acceptPointState = new AcceptPointState(sequenceIndex, weight);
if (!initial) {
evictMax();
}
return true;
}
} // 0 weight implies ignore sample
return false;
}
/**
* evicts the maximum weight point from the sampler. can be used repeatedly to
* change the size of the sampler and associated tree
*/
public void evictMax() {
long evictedIndex = storeSequenceIndexesEnabled ? this.sequenceIndex[0] : 0L;
evictedPoint = new Weighted<>(this.pointIndex[0], this.weight[0], evictedIndex);
--size;
this.weight[0] = this.weight[size];
this.pointIndex[0] = this.pointIndex[size];
if (storeSequenceIndexesEnabled) {
this.sequenceIndex[0] = this.sequenceIndex[size];
}
swapDown(0);
}
/**
* Check to see if the weight at current index is greater than or equal to the
* weight at each corresponding child index. If validate is true then throw an
* IllegalStateException, otherwise swap the nodes and perform the same check at
* the next level. Continue until you reach a level where the parent node's
* weight is greater than or equal to both children's weights, or until there
* are no more levels to descend.
*
* @param startIndex The index of node to start with.
* @param validate If true, a violation of the heap property will throw an
* IllegalStateException. If false, then swap nodes that
* violate the heap property.
*/
private void swapDown(int startIndex, boolean validate) {
int current = startIndex;
while (2 * current + 1 < size) {
int maxIndex = 2 * current + 1;
if (2 * current + 2 < size && weight[2 * current + 2] > weight[maxIndex]) {
maxIndex = 2 * current + 2;
}
if (weight[maxIndex] > weight[current]) {
if (validate) {
throw new IllegalStateException("the heap property is not satisfied at index " + current);
}
swapWeights(current, maxIndex);
current = maxIndex;
} else {
break;
}
}
}
private void swapDown(int startIndex) {
swapDown(startIndex, false);
}
public void reheap(boolean validate) {
for (int i = (size + 1) / 2; i >= 0; i--) {
swapDown(i, validate);
}
}
public void addPoint(Integer pointIndex, float weight, long sequenceIndex) {
checkArgument(acceptPointState == null && size < capacity && pointIndex != null, " operation not permitted");
acceptPointState = new AcceptPointState(sequenceIndex, weight);
addPoint(pointIndex);
}
@Override
public void addPoint(Integer pointIndex) {
if (pointIndex != null) {
checkState(size < capacity, "sampler full");
checkState(acceptPointState != null,
"this method should only be called after a successful call to acceptSample(long)");
this.weight[size] = acceptPointState.getWeight();
this.pointIndex[size] = pointIndex;
if (storeSequenceIndexesEnabled) {
this.sequenceIndex[size] = acceptPointState.getSequenceIndex();
}
int current = size++;
while (current > 0) {
int tmp = (current - 1) / 2;
if (this.weight[tmp] < this.weight[current]) {
swapWeights(current, tmp);
current = tmp;
} else
break;
}
acceptPointState = null;
}
}
/**
* Return the list of sampled points. If this sampler was created with the
* {@code storeSequenceIndexesEnabled} flag set to false, then all sequence
* indexes in the list will be set to {@link #SEQUENCE_INDEX_NA}.
*
* @return the list of sampled points.
*/
@Override
public List<ISampled<Integer>> getSample() {
return streamSample().collect(Collectors.toList());
}
/**
* Return the list of sampled points with weights.
*
* @return the list of sampled points with weights.
*/
public List<Weighted<Integer>> getWeightedSample() {
return streamSample().collect(Collectors.toList());
}
private Stream<Weighted<Integer>> streamSample() {
reset_weights();
return IntStream.range(0, size).mapToObj(i -> {
long index = sequenceIndex != null ? sequenceIndex[i] : SEQUENCE_INDEX_NA;
return new Weighted<>(pointIndex[i], weight[i], index);
});
}
/**
* removes the adjustments to weight in accumulated timeDecay and resets the
* updates to timeDecay
*/
private void reset_weights() {
if (accumuluatedTimeDecay == 0)
return;
// now the weight computation of every element would not see this subtraction
// which implies that every existing element should see the offset as addition
for (int i = 0; i < size; i++) {
weight[i] += accumuluatedTimeDecay;
}
accumuluatedTimeDecay = 0;
}
/**
* @return the point evicted by the most recent call to {@link #update}, or null
* if no point was evicted.
*/
public Optional<ISampled<Integer>> getEvictedPoint() {
return Optional.ofNullable(evictedPoint);
}
/**
* @return the number of points currently contained by the sampler.
*/
@Override
public int size() {
return size;
}
public float[] getWeightArray() {
return weight;
}
public int[] getPointIndexArray() {
return pointIndex;
}
public long[] getSequenceIndexArray() {
return sequenceIndex;
}
public boolean isStoreSequenceIndexesEnabled() {
return storeSequenceIndexesEnabled;
}
private void swapWeights(int a, int b) {
int tmp = pointIndex[a];
pointIndex[a] = pointIndex[b];
pointIndex[b] = tmp;
float tmpDouble = weight[a];
weight[a] = weight[b];
weight[b] = tmpDouble;
if (storeSequenceIndexesEnabled) {
long tmpLong = sequenceIndex[a];
sequenceIndex[a] = sequenceIndex[b];
sequenceIndex[b] = tmpLong;
}
}
public static class Builder<T extends Builder<T>> extends AbstractStreamSampler.Builder<T> {
// We use Optional types for optional primitive fields when it doesn't make
// sense to use a constant default.
private int size = 0;
private float[] weight = null;
private int[] pointIndex = null;
private long[] sequenceIndex = null;
private boolean validateHeap = false;
private boolean storeSequenceIndexesEnabled = DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;
public T size(int size) {
this.size = size;
return (T) this;
}
public T weight(float[] weight) {
this.weight = weight;
return (T) this;
}
public T pointIndex(int[] pointIndex) {
this.pointIndex = pointIndex;
return (T) this;
}
public T sequenceIndex(long[] sequenceIndex) {
this.sequenceIndex = sequenceIndex;
return (T) this;
}
public T storeSequenceIndexesEnabled(boolean storeSequenceIndexesEnabled) {
this.storeSequenceIndexesEnabled = storeSequenceIndexesEnabled;
return (T) this;
}
public T validateHeap(boolean validateHeap) {
this.validateHeap = validateHeap;
return (T) this;
}
public CompactSampler build() {
return new CompactSampler(this);
}
}
}
| 518 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/imputation/ImputeVisitor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.imputation;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.Arrays;
import java.util.Random;
import com.amazon.randomcutforest.CommonUtils;
import com.amazon.randomcutforest.MultiVisitor;
import com.amazon.randomcutforest.anomalydetection.AnomalyScoreVisitor;
import com.amazon.randomcutforest.returntypes.ConditionalTreeSample;
import com.amazon.randomcutforest.tree.BoundingBox;
import com.amazon.randomcutforest.tree.INodeView;
/**
* A MultiVisitor which imputes missing values in a point. The missing values
* are first imputed with the corresponding values in the leaf node in the
* traversal path. Then, when this MultiVisitor is merged with another
* MultiVisitor, we keep the imputed values with a lower rank, where the rank
* value is the anomaly score for the imputed point.
*/
public class ImputeVisitor implements MultiVisitor<ConditionalTreeSample> {
// default large values for initialization; consider -ve log( 0 )
public static double DEFAULT_INIT_VALUE = Double.MAX_VALUE;
/**
* an array that helps indicate the missing entires in the tree space
*/
protected final boolean[] missing;
/**
* the query point in the tree space, where the missing entries (in tree space)
* would be overwritten
*/
protected float[] queryPoint;
/**
* the unnormalized anomaly score of a point, should be interpreted as -ve
* log(likelihood)
*/
protected double anomalyRank;
/**
* distance of the point in the forest space, this is not tree specific
*/
protected double distance;
/**
* a parameter that controls central estimation ( = 1.0) and fully random sample
* over entire range ( = 0.0 )
*/
protected double centrality;
protected long randomSeed;
protected double randomRank;
protected boolean converged;
protected int pointIndex;
protected int[] dimensionsUsed;
protected BoundingBox box;
/**
* Create a new ImputeVisitor.
*
* @param liftedPoint The point with missing values we want to impute
* @param queryPoint The projected point in the tree space
* @param liftedMissingIndexes the original missing indices
* @param missingIndexes The indexes of the missing values in the tree
* space
*/
public ImputeVisitor(float[] liftedPoint, float[] queryPoint, int[] liftedMissingIndexes, int[] missingIndexes,
double centrality, long randomSeed) {
checkArgument(centrality >= 0, " cannoit be negative ");
checkArgument(centrality <= 1.0, " cannot be more than 1.0");
this.queryPoint = Arrays.copyOf(queryPoint, queryPoint.length);
this.missing = new boolean[queryPoint.length];
this.centrality = centrality;
this.randomSeed = randomSeed;
this.dimensionsUsed = new int[queryPoint.length];
if (missingIndexes == null) {
missingIndexes = new int[0];
}
for (int i = 0; i < missingIndexes.length; i++) {
checkArgument(0 <= missingIndexes[i], "Missing value indexes cannot be negative");
checkArgument(missingIndexes[i] < queryPoint.length,
"Missing value indexes must be less than query length");
missing[missingIndexes[i]] = true;
}
anomalyRank = DEFAULT_INIT_VALUE;
distance = DEFAULT_INIT_VALUE;
}
public ImputeVisitor(float[] queryPoint, int numberOfMissingIndices, int[] missingIndexes) {
this(queryPoint, Arrays.copyOf(queryPoint, queryPoint.length),
Arrays.copyOf(missingIndexes, Math.min(numberOfMissingIndices, missingIndexes.length)),
Arrays.copyOf(missingIndexes, Math.min(numberOfMissingIndices, missingIndexes.length)), 1.0, 0L);
}
/**
* A copy constructor which creates a deep copy of the original ImputeVisitor.
*
* @param original
*/
ImputeVisitor(ImputeVisitor original) {
int length = original.queryPoint.length;
this.queryPoint = Arrays.copyOf(original.queryPoint, length);
this.missing = Arrays.copyOf(original.missing, length);
this.dimensionsUsed = new int[original.dimensionsUsed.length];
this.randomSeed = new Random(original.randomSeed).nextLong();
this.centrality = original.centrality;
anomalyRank = DEFAULT_INIT_VALUE;
distance = DEFAULT_INIT_VALUE;
}
/**
* Update the rank value using the probability that the imputed query point is
* separated from this bounding box in a random cut. This step is conceptually
* the same as * {@link AnomalyScoreVisitor#accept}.
*
* @param node the node being visited
* @param depthOfNode the depth of the node being visited
*/
public void accept(final INodeView node, final int depthOfNode) {
double probabilityOfSeparation;
if (box == null) {
box = (BoundingBox) node.getBoundingBox();
probabilityOfSeparation = CommonUtils.getProbabilityOfSeparation(box, queryPoint);
} else {
probabilityOfSeparation = node.probailityOfSeparation(queryPoint);
}
converged = (probabilityOfSeparation == 0);
if (probabilityOfSeparation <= 0) {
return;
}
anomalyRank = probabilityOfSeparation * scoreUnseen(depthOfNode, node.getMass())
+ (1 - probabilityOfSeparation) * anomalyRank;
}
/**
* Impute the missing values in the query point with the corresponding values in
* the leaf point. Set the rank to the score function evaluated at the leaf
* node.
*
* @param leafNode the leaf node being visited
* @param depthOfNode the depth of the leaf node
*/
@Override
public void acceptLeaf(final INodeView leafNode, final int depthOfNode) {
float[] leafPoint = leafNode.getLeafPoint();
pointIndex = leafNode.getLeafPointIndex();
double distance = 0;
for (int i = 0; i < queryPoint.length; i++) {
if (missing[i]) {
queryPoint[i] = leafPoint[i];
} else {
double t = (queryPoint[i] - leafPoint[i]);
distance += Math.abs(t);
}
}
if (centrality < 1.0) {
Random rng = new Random(randomSeed);
randomSeed = rng.nextLong();
randomRank = rng.nextDouble();
}
this.distance = distance;
if (distance <= 0) {
converged = true;
if (depthOfNode == 0) {
anomalyRank = 0;
} else {
anomalyRank = scoreSeen(depthOfNode, leafNode.getMass());
}
} else {
anomalyRank = scoreUnseen(depthOfNode, leafNode.getMass());
}
}
/**
* @return the imputed point.
*/
@Override
public ConditionalTreeSample getResult() {
return new ConditionalTreeSample(pointIndex, box, distance, queryPoint);
}
/**
* An ImputeVisitor should split whenever the cut dimension in a node
* corresponds to a missing value in the query point.
*
* @param node A node in the tree traversal
* @return true if the cut dimension in the node corresponds to a missing value
* in the query point, false otherwise.
*/
@Override
public boolean trigger(final INodeView node) {
int index = node.getCutDimension();
++dimensionsUsed[index];
return missing[index];
}
protected double getAnomalyRank() {
return anomalyRank;
}
protected double getDistance() {
return distance;
}
/**
* @return a copy of this visitor.
*/
@Override
public MultiVisitor<ConditionalTreeSample> newCopy() {
return new ImputeVisitor(this);
}
double adjustedRank() {
return (1 - centrality) * randomRank + centrality * anomalyRank;
}
protected boolean updateCombine(ImputeVisitor other) {
return other.adjustedRank() < adjustedRank();
}
/**
* If this visitor as a lower rank than the second visitor, do nothing.
* Otherwise, overwrite this visitor's imputed values withe the valuse from the
* second visitor.
*
* @param other A second visitor
*/
@Override
public void combine(MultiVisitor<ConditionalTreeSample> other) {
ImputeVisitor visitor = (ImputeVisitor) other;
if (updateCombine(visitor)) {
updateFrom(visitor);
}
}
protected void updateFrom(ImputeVisitor visitor) {
System.arraycopy(visitor.queryPoint, 0, queryPoint, 0, queryPoint.length);
pointIndex = visitor.pointIndex;
anomalyRank = visitor.anomalyRank;
box = visitor.box;
converged = visitor.converged;
distance = visitor.distance;
}
protected double scoreSeen(int depth, int mass) {
return CommonUtils.defaultScoreSeenFunction(depth, mass);
}
protected double scoreUnseen(int depth, int mass) {
return CommonUtils.defaultScoreUnseenFunction(depth, mass);
}
@Override
public boolean isConverged() {
return converged;
}
}
| 519 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/imputation/ConditionalSampleSummarizer.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.imputation;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static java.lang.Math.min;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import com.amazon.randomcutforest.returntypes.ConditionalTreeSample;
import com.amazon.randomcutforest.returntypes.SampleSummary;
import com.amazon.randomcutforest.summarization.Summarizer;
import com.amazon.randomcutforest.util.Weighted;
public class ConditionalSampleSummarizer {
/**
* this limits the number of valueswe would see per dimension; note that it may
* be hard to interpret a larger list
*/
public static int MAX_NUMBER_OF_TYPICAL_PER_DIMENSION = 2;
/**
* the maximum size of the typical points array, irrespective of the number of
* missing dimensions
*/
public static int MAX_NUMBER_OF_TYPICAL_ELEMENTS = 5;
/**
* the array of missing dimension indices
*/
protected int[] missingDimensions;
/**
* the query point, where we are inferring the missing values indicated by
* missingDimensions[0], missingDimensions[1], ... etc.
*/
protected float[] queryPoint;
/**
* a control parameter; =0 corresponds to (near) random samples and =1
* correponds to more central (low anomaly score) samples
*/
protected double centrality;
/**
* a boolean that determines if the summarization should use the missing
* dimensions or the full dimensions.
*/
protected boolean project = false;
public ConditionalSampleSummarizer(int[] missingDimensions, float[] queryPoint, double centrality) {
this.missingDimensions = Arrays.copyOf(missingDimensions, missingDimensions.length);
this.queryPoint = Arrays.copyOf(queryPoint, queryPoint.length);
this.centrality = centrality;
}
public ConditionalSampleSummarizer(int[] missingDimensions, float[] queryPoint, double centrality,
boolean project) {
this.missingDimensions = Arrays.copyOf(missingDimensions, missingDimensions.length);
this.queryPoint = Arrays.copyOf(queryPoint, queryPoint.length);
this.centrality = centrality;
this.project = project;
}
public SampleSummary summarize(List<ConditionalTreeSample> alist) {
checkArgument(alist.size() > 0, "incorrect call to summarize");
return summarize(alist, true);
}
public SampleSummary summarize(List<ConditionalTreeSample> alist, boolean addTypical) {
/**
* first we dedup over the points in the pointStore -- it is likely, and
* beneficial that different trees acting as different predictors in an ensemble
* predict the same point that has been seen before. This would be specially
* true if the time decay is large -- then the whole ensemble starts to behave
* as a sliding window.
*
* note that it is possible that two different *points* predict the same missing
* value especially when values are repeated in time. however that check of
* equality of points would be expensive -- and one mechanism is to use a tree
* (much like an RCT) to test for equality. We will try to not perform such a
* test.
*/
double totalWeight = alist.size();
List<ConditionalTreeSample> newList = ConditionalTreeSample.dedup(alist);
newList.sort((o1, o2) -> Double.compare(o1.distance, o2.distance));
ArrayList<Weighted<float[]>> points = new ArrayList<>();
newList.stream().forEach(e -> {
if (!project) {
points.add(new Weighted<>(e.leafPoint, (float) e.weight));
} else {
float[] values = new float[missingDimensions.length];
for (int i = 0; i < missingDimensions.length; i++) {
values[i] = e.leafPoint[missingDimensions[i]];
}
points.add(new Weighted<>(values, (float) e.weight));
}
});
if (!addTypical) {
return new SampleSummary(points);
}
/**
* for centrality = 0; there will be no filtration for centrality = 1; at least
* half the values will be present -- the sum of distance(P33) + distance(P50)
* appears to be slightly more reasonable than 2 * distance(P50) the distance 0
* elements correspond to exact matches (on the available fields)
*
* it is an open question is the weight of such points should be higher. But if
* one wants true dynamic adaptability then such a choice to increase weights of
* exact matches would go against the dynamic sampling based use of RCF.
**/
int dimensions = queryPoint.length;
double threshold = centrality * newList.get(0).distance;
double currentWeight = 0;
int alwaysInclude = 0;
double remainderWeight = totalWeight;
while (newList.get(alwaysInclude).distance == 0) {
remainderWeight -= newList.get(alwaysInclude).weight;
++alwaysInclude;
if (alwaysInclude == newList.size()) {
break;
}
}
for (int j = 1; j < newList.size(); j++) {
if ((currentWeight < remainderWeight / 3 && currentWeight + newList.get(j).weight >= remainderWeight / 3)
|| (currentWeight < remainderWeight / 2
&& currentWeight + newList.get(j).weight >= remainderWeight / 2)) {
threshold = centrality * newList.get(j).distance;
}
currentWeight += newList.get(j).weight;
}
// note that the threshold is currently centrality * (some distance in the list)
// thus the sequel uses a convex combination; and setting centrality = 0 removes
// the entire filtering based on distances
threshold += (1 - centrality) * newList.get(newList.size() - 1).distance;
int num = 0;
while (num < newList.size() && newList.get(num).distance <= threshold) {
++num;
}
ArrayList<Weighted<float[]>> typicalPoints = new ArrayList<>();
for (int j = 0; j < num; j++) {
ConditionalTreeSample e = newList.get(j);
float[] values;
if (project) {
values = new float[missingDimensions.length];
for (int i = 0; i < missingDimensions.length; i++) {
values[i] = e.leafPoint[missingDimensions[i]];
}
} else {
values = Arrays.copyOf(e.leafPoint, dimensions);
}
typicalPoints.add(new Weighted<>(values, (float) e.weight));
}
int maxAllowed = min(queryPoint.length * MAX_NUMBER_OF_TYPICAL_PER_DIMENSION, MAX_NUMBER_OF_TYPICAL_ELEMENTS);
maxAllowed = min(maxAllowed, num);
SampleSummary projectedSummary = Summarizer.l2summarize(typicalPoints, maxAllowed, num, false, 72);
float[][] pointList = new float[projectedSummary.summaryPoints.length][];
float[] likelihood = new float[projectedSummary.summaryPoints.length];
for (int i = 0; i < projectedSummary.summaryPoints.length; i++) {
pointList[i] = Arrays.copyOf(queryPoint, dimensions);
for (int j = 0; j < missingDimensions.length; j++) {
pointList[i][missingDimensions[j]] = projectedSummary.summaryPoints[i][j];
}
likelihood[i] = projectedSummary.relativeWeight[i];
}
return new SampleSummary(points, pointList, likelihood);
}
}
| 520 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/executor/SequentialForestTraversalExecutor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import java.util.function.BinaryOperator;
import java.util.function.Function;
import java.util.stream.Collector;
import com.amazon.randomcutforest.ComponentList;
import com.amazon.randomcutforest.IComponentModel;
import com.amazon.randomcutforest.IMultiVisitorFactory;
import com.amazon.randomcutforest.IVisitorFactory;
import com.amazon.randomcutforest.returntypes.ConvergingAccumulator;
/**
* Traverse the trees in a forest sequentially.
*/
public class SequentialForestTraversalExecutor extends AbstractForestTraversalExecutor {
public SequentialForestTraversalExecutor(ComponentList<?, ?> components) {
super(components);
}
@Override
public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory, BinaryOperator<R> accumulator,
Function<R, S> finisher) {
R unnormalizedResult = components.stream().map(c -> c.traverse(point, visitorFactory)).reduce(accumulator)
.orElseThrow(() -> new IllegalStateException("accumulator returned an empty result"));
return finisher.apply(unnormalizedResult);
}
@Override
public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory, Collector<R, ?, S> collector) {
return components.stream().map(c -> c.traverse(point, visitorFactory)).collect(collector);
}
@Override
public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory,
ConvergingAccumulator<R> accumulator, Function<R, S> finisher) {
for (IComponentModel<?, ?> component : components) {
accumulator.accept(component.traverse(point, visitorFactory));
if (accumulator.isConverged()) {
break;
}
}
return finisher.apply(accumulator.getAccumulatedValue());
}
@Override
public <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,
BinaryOperator<R> accumulator, Function<R, S> finisher) {
R unnormalizedResult = components.stream().map(c -> c.traverseMulti(point, visitorFactory)).reduce(accumulator)
.orElseThrow(() -> new IllegalStateException("accumulator returned an empty result"));
return finisher.apply(unnormalizedResult);
}
@Override
public <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,
Collector<R, ?, S> collector) {
return components.stream().map(c -> c.traverseMulti(point, visitorFactory)).collect(collector);
}
}
| 521 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/executor/ParallelForestUpdateExecutor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ForkJoinPool;
import java.util.stream.Collectors;
import com.amazon.randomcutforest.ComponentList;
/**
* An implementation of forest traversal methods that uses a private thread pool
* to visit trees in parallel.
*
* @param <PointReference> references to a point
* @param <Point> explicit data type of a point
*/
public class ParallelForestUpdateExecutor<PointReference, Point>
extends AbstractForestUpdateExecutor<PointReference, Point> {
private ForkJoinPool forkJoinPool;
private final int threadPoolSize;
public ParallelForestUpdateExecutor(IStateCoordinator<PointReference, Point> updateCoordinator,
ComponentList<PointReference, Point> components, int threadPoolSize) {
super(updateCoordinator, components);
this.threadPoolSize = threadPoolSize;
forkJoinPool = new ForkJoinPool(threadPoolSize);
}
@Override
protected List<UpdateResult<PointReference>> updateInternal(PointReference point, long seqNum) {
return submitAndJoin(() -> components.parallelStream().map(t -> t.update(point, seqNum))
.filter(UpdateResult::isStateChange).collect(Collectors.toList()));
}
private <T> T submitAndJoin(Callable<T> callable) {
if (forkJoinPool == null) {
forkJoinPool = new ForkJoinPool(threadPoolSize);
}
return forkJoinPool.submit(callable).join();
}
}
| 522 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/executor/ITraversable.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import com.amazon.randomcutforest.IMultiVisitorFactory;
import com.amazon.randomcutforest.IVisitorFactory;
import com.amazon.randomcutforest.MultiVisitor;
import com.amazon.randomcutforest.Visitor;
import com.amazon.randomcutforest.tree.ITree;
/**
* This interface defines a model that can be traversed by a {@link Visitor}.
*/
public interface ITraversable {
/**
* Traverse the path defined by {@code point} and invoke the visitor. The path
* defined by {@code point} is the path from the root node to the leaf node
* where {@code point} would be inserted. The visitor is invoked for each node
* in the path in reverse order (starting from the leaf node and ending at the
* root node). The return value is obtained by calling
* {@link Visitor#getResult()} on the visitor after it has visited each node in
* the path.
*
* @param point A point that determines the traversal path.
* @param visitorFactory A factory function that can be applied to an
* {@link ITree} instance to obtain a {@link Visitor}
* instance.
* @param <R> The return value type of the visitor.
* @return the value of {@link Visitor#getResult()} after visiting each node in
* the path.
*/
<R> R traverse(float[] point, IVisitorFactory<R> visitorFactory);
/**
* Traverse the paths defined by {@code point} and the multi-visitor, and invoke
* the multi-visitor on each node. The path defined by {@code point} is the path
* from the root node to the leaf node where {@code point} would be inserted.
* However, at each node along the path we invoke {@link MultiVisitor#trigger},
* and if it returns true we create a copy of the visitor and send it down both
* branches of the tree. The multi-visitor is invoked for each node in the path
* in reverse order (starting from the leaf node and ending at the root node).
* When two multi-visitors meet at a node, they are combined by calling
* {@link MultiVisitor#combine}. The return value is obtained by calling
* {@link MultiVisitor#getResult()} on the single remaining visitor after it has
* visited each node in each branch the path.
*
* @param point A point that determines the traversal path.
* @param visitorFactory A factory function that can be applied to an
* {@link ITree} instance to obtain a {@link MultiVisitor}
* instance.
* @param <R> The return value type of the multi-visitor.
* @return the value of {@link MultiVisitor#getResult()} after traversing all
* paths.
*/
<R> R traverseMulti(float[] point, IMultiVisitorFactory<R> visitorFactory);
/**
* After a new traversable model is initialized, it will not be able to return
* meaningful results to queries until it has been updated with (i.e., learned
* from) some number of points. The exact number of points may vary for
* different models. After this method returns true for the first time, it
* should continue to return true unless the user takes an explicit action to
* reset the model state.
*
* @return true if this model is ready to provide a meaningful response to a
* traversal query, otherwise false.
*/
boolean isOutputReady();
}
| 523 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/executor/IUpdatable.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
public interface IUpdatable<PointReference> {
/**
* result of an update on a sampler plus tree
*
* @param point to be considered for updating the sampler plus tree
* @param seqNum timestamp
* @return the (inserted,deleted) pair of handles in the tree for eventual
* bookkeeping
*/
UpdateResult<PointReference> update(PointReference point, long seqNum);
}
| 524 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/executor/IStateCoordinator.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import java.util.List;
import com.amazon.randomcutforest.store.IPointStore;
/**
* An IStateCoordinator is used in conjunction with a family of IUpdatable
* instances. The coordinator transforms the input point into the form expected
* by the updatable models, and processes the list of deleted points if needed.
* An IStateCoordinator can be used to manage shared state.
*
* @param <PointReference> An internal point representation.
* @param <Point> Explicit point type
*/
public interface IStateCoordinator<PointReference, Point> {
/**
* Transform the input point into a value that can be submitted to IUpdatable
* instances.
*
* @param point The input point.
* @param sequenceNumber the sequence number associated with the point
* @return The point transformed into the representation expected by an
* IUpdatable instance.
*/
PointReference initUpdate(Point point, long sequenceNumber);
/**
* Complete the update. This method is called by IStateCoordinator after all
* IUpdatable instances have completed their individual updates. This method
* receives the list of points that were deleted IUpdatable instances for
* further processing if needed.
*
* @param updateResults A list of points that were deleted.
* @param updateInput The corresponding output from {@link #initUpdate}, which
* was passed into the update method for each component
*/
void completeUpdate(List<UpdateResult<PointReference>> updateResults, PointReference updateInput);
long getTotalUpdates();
void setTotalUpdates(long totalUpdates);
default IPointStore<PointReference, Point> getStore() {
return null;
}
}
| 525 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/executor/SequentialForestUpdateExecutor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import java.util.List;
import java.util.stream.Collectors;
import com.amazon.randomcutforest.ComponentList;
/**
* Traverse the trees in a forest sequentially.
*
* @param <PointReference> references to a point
* @param <Point> explicit data type of a point
*/
public class SequentialForestUpdateExecutor<PointReference, Point>
extends AbstractForestUpdateExecutor<PointReference, Point> {
public SequentialForestUpdateExecutor(IStateCoordinator<PointReference, Point> updateCoordinator,
ComponentList<PointReference, Point> components) {
super(updateCoordinator, components);
}
@Override
protected List<UpdateResult<PointReference>> updateInternal(PointReference point, long seqNum) {
return components.stream().map(t -> t.update(point, seqNum)).filter(UpdateResult::isStateChange)
.collect(Collectors.toList());
}
}
| 526 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/executor/PointStoreCoordinator.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import static com.amazon.randomcutforest.CommonUtils.checkNotNull;
import java.util.List;
import com.amazon.randomcutforest.store.IPointStore;
import com.amazon.randomcutforest.store.PointStore;
/**
* pointstore coordinator for compact RCF
*
* @param <Point> the datatype of the actual point
*/
public class PointStoreCoordinator<Point> extends AbstractUpdateCoordinator<Integer, Point> {
private final IPointStore<Integer, Point> store;
public PointStoreCoordinator(IPointStore<Integer, Point> store) {
checkNotNull(store, "store must not be null");
this.store = store;
}
@Override
public Integer initUpdate(Point point, long sequenceNumber) {
int index = store.add(point, sequenceNumber);
return (index == PointStore.INFEASIBLE_POINTSTORE_INDEX) ? null : index;
}
@Override
public void completeUpdate(List<UpdateResult<Integer>> updateResults, Integer updateInput) {
if (updateInput != null) { // can be null for initial shingling
updateResults.forEach(result -> {
result.getAddedPoint().ifPresent(store::incrementRefCount);
result.getDeletedPoint().ifPresent(store::decrementRefCount);
});
store.decrementRefCount(updateInput);
}
totalUpdates++;
}
public IPointStore<Integer, Point> getStore() {
return store;
}
}
| 527 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/executor/ParallelForestTraversalExecutor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ForkJoinPool;
import java.util.function.BinaryOperator;
import java.util.function.Function;
import java.util.stream.Collector;
import java.util.stream.Collectors;
import com.amazon.randomcutforest.ComponentList;
import com.amazon.randomcutforest.IMultiVisitorFactory;
import com.amazon.randomcutforest.IVisitorFactory;
import com.amazon.randomcutforest.returntypes.ConvergingAccumulator;
/**
* An implementation of forest traversal methods that uses a private thread pool
* to visit trees in parallel.
*/
public class ParallelForestTraversalExecutor extends AbstractForestTraversalExecutor {
private ForkJoinPool forkJoinPool;
private final int threadPoolSize;
public ParallelForestTraversalExecutor(ComponentList<?, ?> treeExecutors, int threadPoolSize) {
super(treeExecutors);
this.threadPoolSize = threadPoolSize;
forkJoinPool = new ForkJoinPool(threadPoolSize);
}
@Override
public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory, BinaryOperator<R> accumulator,
Function<R, S> finisher) {
return submitAndJoin(() -> components.parallelStream().map(c -> c.traverse(point, visitorFactory))
.reduce(accumulator).map(finisher))
.orElseThrow(() -> new IllegalStateException("accumulator returned an empty result"));
}
@Override
public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory, Collector<R, ?, S> collector) {
return submitAndJoin(
() -> components.parallelStream().map(c -> c.traverse(point, visitorFactory)).collect(collector));
}
@Override
public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory,
ConvergingAccumulator<R> accumulator, Function<R, S> finisher) {
for (int i = 0; i < components.size(); i += threadPoolSize) {
final int start = i;
final int end = Math.min(start + threadPoolSize, components.size());
List<R> results = submitAndJoin(() -> components.subList(start, end).parallelStream()
.map(c -> c.traverse(point, visitorFactory)).collect(Collectors.toList()));
results.forEach(accumulator::accept);
if (accumulator.isConverged()) {
break;
}
}
return finisher.apply(accumulator.getAccumulatedValue());
}
@Override
public <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,
BinaryOperator<R> accumulator, Function<R, S> finisher) {
return submitAndJoin(() -> components.parallelStream().map(c -> c.traverseMulti(point, visitorFactory))
.reduce(accumulator).map(finisher))
.orElseThrow(() -> new IllegalStateException("accumulator returned an empty result"));
}
@Override
public <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,
Collector<R, ?, S> collector) {
return submitAndJoin(
() -> components.parallelStream().map(c -> c.traverseMulti(point, visitorFactory)).collect(collector));
}
private <T> T submitAndJoin(Callable<T> callable) {
if (forkJoinPool == null) {
forkJoinPool = new ForkJoinPool(threadPoolSize);
}
return forkJoinPool.submit(callable).join();
}
}
| 528 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/executor/SamplerPlusTree.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import static com.amazon.randomcutforest.CommonUtils.checkNotNull;
import java.util.Optional;
import lombok.Getter;
import com.amazon.randomcutforest.IComponentModel;
import com.amazon.randomcutforest.IMultiVisitorFactory;
import com.amazon.randomcutforest.IVisitorFactory;
import com.amazon.randomcutforest.config.Config;
import com.amazon.randomcutforest.sampler.ISampled;
import com.amazon.randomcutforest.sampler.IStreamSampler;
import com.amazon.randomcutforest.tree.ITree;
/**
* A SamplerPlusTree corresponds to a combination of sampler and tree where the
* information is passed via P and the tree can seek explicit point information
* of type Q
*
* @param <P> The internal point representation expected by the component models
* in this list.
* @param <Q> The explicit data type of points being passed
*/
@Getter
public class SamplerPlusTree<P, Q> implements IComponentModel<P, Q> {
private ITree<P, Q> tree;
private IStreamSampler<P> sampler;
/**
* Constructor of a pair of sampler + tree. The sampler is the driver's seat
* because it aceepts/rejects independently of the tree and the tree has to
* remain consistent.
*
* @param sampler the sampler
* @param tree the corresponding tree
*/
public SamplerPlusTree(IStreamSampler<P> sampler, ITree<P, Q> tree) {
checkNotNull(sampler, "sampler must not be null");
checkNotNull(tree, "tree must not be null");
this.sampler = sampler;
this.tree = tree;
}
/**
* This is main function that maintains the coordination between the sampler and
* the tree. The sampler proposes acceptance (by setting the weight in
* queueEntry) and in that case the evictedPoint is set. That evictedPoint is
* removed from the tree and in that case its reference deleteRef of type T is
* noted. The point is then added to the tree where the tree may propose a new
* reference newRef because the point is already present in the tree. The
* sampler entry is modified and added to the sampler. The pair of the newRef
* and deleteRef are returned for plausible bookkeeping in update executors.
*
* @param point point in consideration for updating the sampler plus
* tree
* @param sequenceIndex a time stamp that is used to generate weight in the
* timed sampling
* @return the pair of (newRef,deleteRef) with potential Optional.empty()
*/
@Override
public UpdateResult<P> update(P point, long sequenceIndex) {
P deleteRef = null;
if (sampler.acceptPoint(sequenceIndex)) {
Optional<ISampled<P>> deletedPoint = sampler.getEvictedPoint();
if (deletedPoint.isPresent()) {
ISampled<P> p = deletedPoint.get();
deleteRef = p.getValue();
tree.deletePoint(deleteRef, p.getSequenceIndex());
}
// the tree may choose to return a reference to an existing point
// whose value is equal to `point`
P addedPoint = tree.addPoint(point, sequenceIndex);
sampler.addPoint(addedPoint);
return UpdateResult.<P>builder().addedPoint(addedPoint).deletedPoint(deleteRef).build();
}
return UpdateResult.noop();
}
@Override
public <R> R traverse(float[] point, IVisitorFactory<R> visitorFactory) {
return tree.traverse(point, visitorFactory);
}
@Override
public <R> R traverseMulti(float[] point, IMultiVisitorFactory<R> visitorFactory) {
return tree.traverseMulti(point, visitorFactory);
}
@Override
public <T> void setConfig(String name, T value, Class<T> clazz) {
if (Config.BOUNDING_BOX_CACHE_FRACTION.equals(name)) {
tree.setConfig(name, value, clazz);
} else if (Config.TIME_DECAY.equals(name)) {
sampler.setConfig(name, value, clazz);
} else {
throw new IllegalArgumentException("Unsupported configuration setting: " + name);
}
}
@Override
public <T> T getConfig(String name, Class<T> clazz) {
checkNotNull(clazz, "clazz must not be null");
if (Config.BOUNDING_BOX_CACHE_FRACTION.equals(name)) {
return tree.getConfig(name, clazz);
} else if (Config.TIME_DECAY.equals(name)) {
return sampler.getConfig(name, clazz);
} else {
throw new IllegalArgumentException("Unsupported configuration setting: " + name);
}
}
@Override
public boolean isOutputReady() {
return tree.isOutputReady();
}
}
| 529 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/executor/AbstractForestUpdateExecutor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import java.util.Collections;
import java.util.List;
import lombok.Getter;
import com.amazon.randomcutforest.ComponentList;
import com.amazon.randomcutforest.store.IPointStore;
/**
* The class transforms input points into the form expected by internal models,
* and submits transformed points to individual models for updating.
*
* @param <PointReference> The point representation used by model data
* structures.
* @param <Point> The explicit data type of exchanging points
*/
@Getter
public abstract class AbstractForestUpdateExecutor<PointReference, Point> {
protected final IStateCoordinator<PointReference, Point> updateCoordinator;
protected final ComponentList<PointReference, Point> components;
/**
* Create a new AbstractForestUpdateExecutor.
*
* @param updateCoordinator The update coordinater that will be used to
* transform points and process deleted points if
* needed.
* @param components A list of models to update.
*/
protected AbstractForestUpdateExecutor(IStateCoordinator<PointReference, Point> updateCoordinator,
ComponentList<PointReference, Point> components) {
this.updateCoordinator = updateCoordinator;
this.components = components;
}
/**
* Update the forest with the given point. The point is submitted to each
* sampler in the forest. If the sampler accepts the point, the point is
* submitted to the update method in the corresponding Random Cut Tree.
*
* @param point The point used to update the forest.
*/
public void update(Point point) {
long internalSequenceNumber = updateCoordinator.getTotalUpdates();
IPointStore<?, ?> store = updateCoordinator.getStore();
if (store != null && store.isInternalShinglingEnabled()) {
internalSequenceNumber -= store.getShingleSize() - 1;
}
update(point, internalSequenceNumber);
}
public void update(Point point, long sequenceNumber) {
PointReference updateInput = updateCoordinator.initUpdate(point, sequenceNumber);
List<UpdateResult<PointReference>> results = (updateInput == null) ? Collections.emptyList()
: updateInternal(updateInput, sequenceNumber);
updateCoordinator.completeUpdate(results, updateInput);
}
/**
* Internal update method which submits the given input value to
* {@link IUpdatable#update} for each model managed by this executor.
*
* @param updateInput Input value that will be submitted to the update method
* for each tree.
* @param currentIndex the timestamp
* @return a list of points that were deleted from the model as part of the
* update.
*/
protected abstract List<UpdateResult<PointReference>> updateInternal(PointReference updateInput, long currentIndex);
}
| 530 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/executor/AbstractUpdateCoordinator.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import lombok.Getter;
import lombok.Setter;
/**
* A partial implementation of the {@link IStateCoordinator} interface that
* defines a protected instance variable to track total updates and implements
* the {@link IStateCoordinator#getTotalUpdates()} method. Classes that extend
* AbstractStateCoordinator are responsible for incrementing the totalUpdates
* counter after completing an update successfully.
*
* @param <PointReference> An internal point representation.
* @param <Point> Data type of potential exchanges of data
*/
public abstract class AbstractUpdateCoordinator<PointReference, Point>
implements IStateCoordinator<PointReference, Point> {
@Getter
@Setter
protected long totalUpdates;
public AbstractUpdateCoordinator() {
totalUpdates = 0;
}
}
| 531 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/executor/UpdateResult.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import java.util.Optional;
import lombok.Builder;
/**
* When {@link IUpdatable#update} is called, an updatable model may choose to
* update its state with the submitted point. This class contains the result of
* such an operation. A list of {@code AddPointResults}s is consumed by
* {@link IStateCoordinator#completeUpdate} to update global state as needed to
* reflect the updates to individual component models.
*
* @param <PointReference> The point reference type.
*/
@Builder
public class UpdateResult<PointReference> {
private static final UpdateResult<Object> NOOP = builder().build();
private final PointReference addedPoint;
private final PointReference deletedPoint;
/**
* Return an {@code UpdateResult} value a no-op (an operation that did not
* change the state of the model). For the returned value,
* {@code isStateChange()} will be false.
*
* @param <Q> The point reference type.
* @return an {@code UpdateResult} value representing a no-op.
*/
public static <Q> UpdateResult<Q> noop() {
return (UpdateResult<Q>) NOOP;
}
/**
* An optional containing a reference to the point that was added to the model
* as part of the udpate call, or {@code Optional.empty()} if no point was
* added.
*
* @return an optional containing a reference to the point that was added to the
* model as part of the udpate call, or {@code Optional.empty()} if no
* point was added.
*/
public Optional<PointReference> getAddedPoint() {
return Optional.ofNullable(addedPoint);
}
/**
* Once a model is at capacity, a point may be deleted from the model as part of
* an update. If a point is deleted during the update operation, then the
* deleted point reference will be present in the result of this method.
*
* @return a reference to the deleted point reference or
* {@code Optional.empty()} if no point was deleted.
*/
public Optional<PointReference> getDeletedPoint() {
return Optional.ofNullable(deletedPoint);
}
/**
* Return true if this update result represents a change to the updatable model.
* A change means that a point was added to the model, and possibly a point was
* deleted from the model.
*
* @return true if this update result represents a change to the updatabla
* model.
*/
public boolean isStateChange() {
return addedPoint != null || deletedPoint != null;
}
}
| 532 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/executor/AbstractForestTraversalExecutor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import java.util.function.BinaryOperator;
import java.util.function.Function;
import java.util.stream.Collector;
import com.amazon.randomcutforest.ComponentList;
import com.amazon.randomcutforest.IMultiVisitorFactory;
import com.amazon.randomcutforest.IVisitorFactory;
import com.amazon.randomcutforest.returntypes.ConvergingAccumulator;
public abstract class AbstractForestTraversalExecutor {
protected final ComponentList<?, ?> components;
protected AbstractForestTraversalExecutor(ComponentList<?, ?> components) {
this.components = components;
}
/**
* Visit each of the trees in the forest and combine the individual results into
* an aggregate result. A visitor is constructed for each tree using the visitor
* factory, and then submitted to a tree. The results from all the trees are
* combined using the accumulator and then transformed using the finisher before
* being returned.
*
* @param point The point that defines the traversal path.
* @param visitorFactory A factory method which is invoked for each tree to
* construct a visitor.
* @param accumulator A function that combines the results from individual
* trees into an aggregate result.
* @param finisher A function called on the aggregate result in order to
* produce the final result.
* @param <R> The visitor result type. This is the type that will be
* returned after traversing each individual tree.
* @param <S> The final type, after any final normalization at the
* forest level.
* @return The aggregated and finalized result after sending a visitor through
* each tree in the forest.
*/
public abstract <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory,
BinaryOperator<R> accumulator, Function<R, S> finisher);
/**
* Visit each of the trees in the forest and combine the individual results into
* an aggregate result. A visitor is constructed for each tree using the visitor
* factory, and then submitted to each tree. The results from individual trees
* are collected using the {@link java.util.stream.Collector} and returned.
* Trees are visited in parallel using
* {@link java.util.Collection#parallelStream()}.
*
* @param point The point that defines the traversal path.
* @param visitorFactory A factory method which is invoked for each tree to
* construct a visitor.
* @param collector A collector used to aggregate individual tree results
* into a final result.
* @param <R> The visitor result type. This is the type that will be
* returned after traversing each individual tree.
* @param <S> The final type, after any final normalization at the
* forest level.
* @return The aggregated and finalized result after sending a visitor through
* each tree in the forest.
*/
public abstract <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory,
Collector<R, ?, S> collector);
/**
* Visit each of the trees in the forest sequentially and combine the individual
* results into an aggregate result. A visitor is constructed for each tree
* using the visitor factory, and then submitted to each tree. The results from
* all the trees are combined using the {@link ConvergingAccumulator}, and the
* method stops visiting trees after convergence is reached. The result is
* transformed using the finisher before being returned.
*
* @param point The point that defines the traversal path.
* @param visitorFactory A factory method which is invoked for each tree to
* construct a visitor.
* @param accumulator An accumulator that combines the results from
* individual trees into an aggregate result and checks to
* see if the result can be returned without further
* processing.
* @param finisher A function called on the aggregate result in order to
* produce the final result.
* @param <R> The visitor result type. This is the type that will be
* returned after traversing each individual tree.
* @param <S> The final type, after any final normalization at the
* forest level.
* @return The aggregated and finalized result after sending a visitor through
* each tree in the forest.
*/
public abstract <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory,
ConvergingAccumulator<R> accumulator, Function<R, S> finisher);
/**
* Visit each of the trees in the forest and combine the individual results into
* an aggregate result. A multi-visitor is constructed for each tree using the
* visitor factory, and then submitted to a tree. The results from all the trees
* are combined using the accumulator and then transformed using the finisher
* before being returned.
*
* @param point The point that defines the traversal path.
* @param visitorFactory A factory method which is invoked for each tree to
* construct a multi-visitor.
* @param accumulator A function that combines the results from individual
* trees into an aggregate result.
* @param finisher A function called on the aggregate result in order to
* produce the final result.
* @param <R> The visitor result type. This is the type that will be
* returned after traversing each individual tree.
* @param <S> The final type, after any final normalization at the
* forest level.
* @return The aggregated and finalized result after sending a visitor through
* each tree in the forest.
*/
public abstract <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,
BinaryOperator<R> accumulator, Function<R, S> finisher);
/**
* Visit each of the trees in the forest and combine the individual results into
* an aggregate result. A multi-visitor is constructed for each tree using the
* visitor factory, and then submitted to a tree. The results from individual
* trees are collected using the {@link java.util.stream.Collector} and
* returned. Trees are visited in parallel using
* {@link java.util.Collection#parallelStream()}.
*
* @param point The point that defines the traversal path.
* @param visitorFactory A factory method which is invoked for each tree to
* construct a visitor.
* @param collector A collector used to aggregate individual tree results
* into a final result.
* @param <R> The visitor result type. This is the type that will be
* returned after traversing each individual tree.
* @param <S> The final type, after any final normalization at the
* forest level.
* @return The aggregated and finalized result after sending a visitor through
* each tree in the forest.
*/
public abstract <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,
Collector<R, ?, S> collector);
}
| 533 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/returntypes/DensityOutput.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
/**
* DensityOutput extends InterpolationMeasure with methods for computing density
* estimates.
*/
public class DensityOutput extends InterpolationMeasure {
/**
* Default scaling factor (q) to use in the getDensity method.
*/
public static final double DEFAULT_SUM_OF_POINTS_SCALING_FACTOR = 0.001;
/**
* Create a new DensityOutput object with the given number of spatial
* dimensions. Note that the number of half-dimensions will be 2 * dimensions.
*
* @param dimensions The number of spatial dimensions.
* @param sampleSize The samplesize of each tree in forest, which may be used
* for normalization.
*/
public DensityOutput(int dimensions, int sampleSize) {
super(dimensions, sampleSize);
}
/**
* A copy constructor that creates a deep copy of the base DensityOutput.
*
* @param base An InterpolationMeasure instance that we want to copy.
*/
public DensityOutput(InterpolationMeasure base) {
super(base);
}
/**
* Compute a scalar density estimate. The scaling factor q is multiplied by the
* sum of points measure and added to the denominator in the density expression
* to prevent divide-by-0 errors.
*
* @param q A scaling factor applied to the sum of points in the
* measure.
* @param manifoldDimension The number of dimensions of the submanifold on which
* we are estimating a density.
* @return a scalar density estimate.
*/
public double getDensity(double q, int manifoldDimension) {
double sumOfPts = measure.getHighLowSum() / sampleSize;
if (sumOfPts <= 0.0) {
return 0.0;
}
double sumOfFactors = 0;
for (int i = 0; i < dimensions; i++) {
double t = probMass.getHighLowSum(i) > 0 ? distances.getHighLowSum(i) / probMass.getHighLowSum(i) : 0;
if (t > 0) {
t = Math.exp(Math.log(t) * manifoldDimension) * probMass.getHighLowSum(i);
}
sumOfFactors += t;
}
return sumOfPts / (q * sumOfPts + sumOfFactors);
}
/**
* Compute a scalar density estimate. This method uses the default scaling
* factor and the full number of dimensions.
*
* @return a scalar density estimate.
*/
public double getDensity() {
return getDensity(DEFAULT_SUM_OF_POINTS_SCALING_FACTOR, dimensions);
}
/**
* Compute a directional density estimate. The scaling factor q is multiplied by
* the sum of points measure and added to the denominator in the density
* expression to prevent divide-by-0 errors.
*
* @param q A scaling factor applied to the sum of points in the
* measure.
* @param manifoldDimension The number of dimensions of the submanifold on which
* we are estimating a density.
* @return a directional density estimate.
*/
public DiVector getDirectionalDensity(double q, int manifoldDimension) {
double density = getDensity(q, manifoldDimension);
double sumOfPts = measure.getHighLowSum(); // normalization not performed since this would be used in a ratio
DiVector factors = new DiVector(super.getDimensions());
if (sumOfPts > 0) {
factors = measure.scale(density / sumOfPts);
}
return factors;
}
/**
* Compute a directional density estimate. This method uses the default scaling
* factor and the full number of dimensions.
*
* @return a scalar density estimate.
*/
public DiVector getDirectionalDensity() {
return getDirectionalDensity(DEFAULT_SUM_OF_POINTS_SCALING_FACTOR, dimensions);
}
}
| 534 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/returntypes/OneSidedStDevAccumulator.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
import static java.lang.Math.max;
/**
* This accumulator checks to see if a result is converging by testing the
* sample mean and standard deviation of a scalar value computed from the
* result. As the name implies, the accumulator performs a one-sided check,
* comparing the new value the current sample mean and updating its converged
* status only if the difference is positive (or negative, if highIsCritical is
* set to false. This accumulator is intended to be used with values where we
* care more about outliers in one direction. For example, if our statistic is
* anomaly score, we are normally more concerned with high anomaly scores than
* low ones.
*
* @param <R> The type of the value being accumulated.
*/
public abstract class OneSidedStDevAccumulator<R> implements ConvergingAccumulator<R> {
/**
* When testing for convergence, we use ALPHA times the sample standard
* deviation to define our interval.
*/
private static final double ALPHA = 0.5;
/**
* The minimum number of values that have to be accepted by this accumulator
* before we start testing for convergence.
*/
private final int minValuesAccepted;
/**
* The number of witnesses needed to declare convergence.
*/
private final int convergenceThreshold;
/**
* Set to 'true' if we care more about high values of the converging scalar than
* low values. Set to 'false' if the opposite is true.
*/
private final boolean highIsCritical;
/**
* This value is +1 if highIsCritical is 'true', and -1 if highIsCritical is
* fault. It is used in the converegence test.
*/
private final int sign;
/**
* The value accumulated until now.
*/
protected R accumulatedValue;
/**
* The number of values accepted by this accumulator until now.
*/
private int valuesAccepted;
/**
* The number of values that are 'witnesses' to convergence until now. See
* {@link #accept}.
*/
private int witnesses;
/**
* The current sum of the converging scalar value. Used to compute the sample
* mean.
*/
private double sumConvergeVal;
/**
* The current sum of squares of the converging scalar value. Used to compute
* the sample standard deviation.
*/
private double sumSqConvergeVal;
/**
* Create a new converging accumulator that uses a one-sided standard deviation
* test.
*
* @param highIsCritical Set to 'true' if we care more about high values of
* the converging scalar than low values. Set to
* 'false' if the opposite is true.
* @param precision The number of witnesses required before declaring
* convergence will be at least 1.0 / precision.
* @param minValuesAccepted The user-specified minimum number of values visited
* before returning a result. Note that
* {@link #isConverged()} may return true before
* accepting this number of results if the
* @param maxValuesAccepted The maximum number of values that will be accepted
* by this accumulator.
*/
public OneSidedStDevAccumulator(boolean highIsCritical, double precision, int minValuesAccepted,
int maxValuesAccepted) {
this.highIsCritical = highIsCritical;
this.convergenceThreshold = precision < 1.0 / maxValuesAccepted ? maxValuesAccepted : (int) (1.0 / precision);
this.minValuesAccepted = Math.min(minValuesAccepted, maxValuesAccepted);
valuesAccepted = 0;
witnesses = 0;
sumConvergeVal = 0.0;
sumSqConvergeVal = 0.0;
sign = highIsCritical ? 1 : -1;
accumulatedValue = null;
}
/**
* Given a new result value, add it to the accumulated value and update
* convergence statistics.
*
* @param result The new value being accumulated.
*/
@Override
public void accept(R result) {
accumulateValue(result);
double value = getConvergingValue(result);
sumConvergeVal += value;
sumSqConvergeVal += value * value;
valuesAccepted++;
if (valuesAccepted >= minValuesAccepted) {
// note that using the last seen value in the deviation dampens its effect
// floating point comparisons!
if (sign * (value - getMean()) + 1e-6 > ALPHA * getDeviation()) {
witnesses++;
}
}
}
/**
* @return the number of values accepted until now.
*/
@Override
public int getValuesAccepted() {
return valuesAccepted;
}
/**
* @return 'true' if the accumulated value has converged, 'false' otherwise.
*/
@Override
public boolean isConverged() {
return witnesses >= convergenceThreshold;
}
/**
* @return the accumulated value.
*/
@Override
public R getAccumulatedValue() {
return accumulatedValue;
}
/**
* Given a new result value, compute its converging scalar value.
*
* @param result A new result value computed by a Random Cut Tree.
* @return the scalar value used to measure convergence for this result type.
*/
protected abstract double getConvergingValue(R result);
/**
* Add the new result to the accumulated value.
*
* @param result The new result to add to the accumulated value.
*/
protected abstract void accumulateValue(R result);
/**
* Return the number of witnesses
*/
public int getWitnesses() {
return witnesses;
}
/**
* @return the mean of the values
*/
public double getMean() {
return (valuesAccepted == 0) ? 0 : sumConvergeVal / valuesAccepted;
}
/**
* it is possible that valuesAccepted is not large hence applying Bessel
* correction
*
* @return the standard deviation of the accepted values
*/
public double getDeviation() {
if (valuesAccepted <= 1) {
return 0;
}
double mean = sumConvergeVal / valuesAccepted;
double stdev = max(0, sumSqConvergeVal / valuesAccepted - mean * mean);
stdev = Math.sqrt(valuesAccepted * stdev / (valuesAccepted - 1));
return stdev;
}
}
| 535 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/returntypes/ConditionalTreeSample.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collector;
import com.amazon.randomcutforest.tree.BoundingBox;
public class ConditionalTreeSample {
/**
* the index of the point in the PoinStore which is used to construct the sample
* for a query
*/
public int pointStoreIndex;
/**
* the bounding box in the tree of the node which is the parent of the point
* used to construct the sample Note that the bounding box is in the projective
* space defined by the tree
*/
protected BoundingBox parentOfLeafBox;
/**
* L1 distance of the sampled point (in the projective space of the tree) L1
* distancce is chosen since the entire notion of RCF is oriented towards L1
* sampling
*/
public double distance;
/**
* the point in the tree corresponding to the sample
*/
public float[] leafPoint;
/**
* weight of the point ; useful for deduplication -- this can also be resued if
* trees are assigned weights
*/
public double weight;
public ConditionalTreeSample(int pointStoreIndex, BoundingBox box, double distance, float[] leafPoint) {
this.pointStoreIndex = pointStoreIndex;
this.parentOfLeafBox = box;
this.distance = distance;
this.leafPoint = leafPoint;
this.weight = 1.0;
}
public static Collector<ConditionalTreeSample, ArrayList<ConditionalTreeSample>, ArrayList<ConditionalTreeSample>> collector = Collector
.of(ArrayList::new, ArrayList::add, (left, right) -> {
left.addAll(right);
return left;
}, list -> list);
// the collector specifically does not try to sort/dedup since we could (and
// would) be running the
// collector in a parallel mode
public static List<ConditionalTreeSample> dedup(List<ConditionalTreeSample> list) {
list.sort(Comparator.comparingInt(o -> o.pointStoreIndex));
List<ConditionalTreeSample> newList = new ArrayList<>();
newList.add(list.get(0));
for (int j = 1; j < list.size(); j++) {
if (list.get(j).pointStoreIndex == newList.get(newList.size() - 1).pointStoreIndex) {
newList.get(newList.size() - 1).weight += list.get(j).weight;
} else {
newList.add(list.get(j));
}
}
return newList;
}
}
| 536 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/returntypes/DiVector.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.checkNotNull;
import java.util.Arrays;
import java.util.function.Function;
import com.amazon.randomcutforest.anomalydetection.AnomalyAttributionVisitor;
/**
* A DiVector is used when we want to track a quantity in both the positive and
* negative directions for each dimension in a manifold. For example, when using
* a {@link AnomalyAttributionVisitor} to compute the attribution of the anomaly
* score to dimension of the input point, we want to know if the anomaly score
* attributed to the ith coordinate of the input point is due to that coordinate
* being unusually high or unusually low.
*/
public class DiVector {
/**
* An array of values corresponding to the positive direction in each dimension.
*/
public final double[] high;
/**
* An array of values corresponding to the negative direction in each dimension.
*/
public final double[] low;
private final int dimensions;
/**
* Construct a new DiVector with the given number of spatial dimensions. In the
* result, {@link #high} and {@link #low} will each contain this many variates.
*
* @param dimensions The number of dimensions of data to store.
*/
public DiVector(int dimensions) {
checkArgument(dimensions > 0, "dimensions must be greater than 0");
this.dimensions = dimensions;
high = new double[dimensions];
low = new double[dimensions];
}
/**
* Construct a new DiVector with the given number of spatial dimensions. In the
* result, {@link #high} and {@link #low} will each contain this many variates.
*
* @param high the high vector
* @param low the low vector.
*/
public DiVector(double[] high, double[] low) {
checkArgument(high.length == low.length, "dimensions must be equal");
this.dimensions = high.length;
this.high = Arrays.copyOf(high, high.length);
this.low = Arrays.copyOf(low, low.length);
}
/**
* Create a deep copy of the base DiVector.
*
* @param base The DiVector to copy.
*/
public DiVector(DiVector base) {
this.dimensions = base.dimensions;
high = Arrays.copyOf(base.high, dimensions);
low = Arrays.copyOf(base.low, dimensions);
}
/**
* Add the values of {@link #high} and {@link #low} from the right vector to the
* left vector and return the left vector. This method is used to accumulate
* DiVector results.
*
* @param left The DiVector we are modifying. After calling this method, the
* low and high values in the DiVector will contain a sum of the
* previous values and the corresponding values from the right
* vector.
* @param right A DiVector that we want to add to the left vector. This DiVector
* is not modified by the method.
* @return the modified left vector.
*/
public static DiVector addToLeft(DiVector left, DiVector right) {
checkNotNull(left, "left must not be null");
checkNotNull(right, "right must not be null");
checkArgument(left.dimensions == right.dimensions, "dimensions must be the same");
for (int i = 0; i < left.dimensions; i++) {
left.high[i] += right.high[i];
left.low[i] += right.low[i];
}
return left;
}
/**
* @return the number of spatial dimensions of this DiVector.
*/
public int getDimensions() {
return dimensions;
}
/**
* Return a new DiVector where each value in high and low is equal to z times
* the corresponding value in this DiVector.
*
* @param z The scaling factor.
* @return a new DiVector where each value in high and low is equal to z times
* the corresponding value in this DiVector.
*/
public DiVector scale(double z) {
DiVector result = new DiVector(dimensions);
for (int i = 0; i < dimensions; i++) {
result.high[i] = high[i] * z;
result.low[i] = low[i] * z;
}
return result;
}
/**
* If the L1 norm of this DiVector is positive, scale the values in high and low
* so that the new L1 norm is equal to the target value. If the current L1 norm
* is 0, do nothing.
*
* @param targetNorm The target L1 norm value.
*/
public void renormalize(double targetNorm) {
double norm = getHighLowSum();
if (norm > 0) {
double scaleFactor = targetNorm / norm;
for (int i = 0; i < dimensions; i++) {
high[i] = high[i] * scaleFactor;
low[i] = low[i] * scaleFactor;
}
}
}
/**
* Apply the given function to each component of DiVector. That is, each entry
* of both the high and low arrays is transformed using this function.
*
* @param function A function to apply to every entry of the high and low arrays
* in this DiVector.
*/
public void componentwiseTransform(Function<Double, Double> function) {
for (int i = 0; i < dimensions; i++) {
high[i] = function.apply(high[i]);
low[i] = function.apply(low[i]);
}
}
/**
* Return the sum of high and low in the ith coordinate.
*
* @param i A coordinate index
* @return the sum of high and low in the ith coordinate.
*/
public double getHighLowSum(int i) {
return high[i] + low[i];
}
/**
* @return the sum of all values in the high and low arrays.
*/
public double getHighLowSum() {
double score = 0.0;
for (int i = 0; i < dimensions; i++) {
score += high[i] + low[i];
}
return score;
}
public DiVector lift(Function<double[], double[]> projection) {
return new DiVector(projection.apply(high), projection.apply(low));
}
}
| 537 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/returntypes/InterpolationMeasure.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.checkNotNull;
import java.util.function.Function;
import java.util.stream.Collector;
/**
* An InterpolationMeasure is used by
* {@link com.amazon.randomcutforest.interpolation.SimpleInterpolationVisitor}
* to store certain geometric quantities during a tree traversal.
*/
public class InterpolationMeasure {
public final DiVector measure;
public final DiVector distances;
public final DiVector probMass;
protected final int dimensions;
protected final int sampleSize;
/**
* Create a new InterpolationMeasure object with the given number of spatial
* dimensions. Note that the number of half-dimensions will be 2 * dimensions.
*
* @param dimensions The number of spatial dimensions.
* @param sampleSize The samplesize of each tree in forest, which may be used
* for normalization.
*/
public InterpolationMeasure(int dimensions, int sampleSize) {
checkArgument(dimensions > 0, "dimensions must be greater than 0");
this.sampleSize = sampleSize;
this.dimensions = dimensions;
measure = new DiVector(dimensions);
distances = new DiVector(dimensions);
probMass = new DiVector(dimensions);
}
/**
* A copy constructor that creates a deep copy of the base InterpolationMeasure.
*
* @param base An InterpolationMeasure instance that we want to copy.
*/
public InterpolationMeasure(InterpolationMeasure base) {
this.sampleSize = base.sampleSize;
this.dimensions = base.dimensions;
measure = new DiVector(base.measure);
distances = new DiVector(base.distances);
probMass = new DiVector(base.probMass);
}
protected InterpolationMeasure(int sampleSize, DiVector measure, DiVector distances, DiVector probMass) {
checkArgument(measure.getDimensions() == distances.getDimensions(),
"measure.getDimensions() should be equal to distances.getDimensions()");
checkArgument(measure.getDimensions() == probMass.getDimensions(),
"measure.getDimensions() should be equal to probMass.getDimensions()");
this.sampleSize = sampleSize;
this.dimensions = measure.getDimensions();
this.measure = measure;
this.distances = distances;
this.probMass = probMass;
}
/**
* Add the values of {@link #measure}, {@link #distances}, and {@link #probMass}
* from the right InterpolationMeasure to the left InterpolationMeasure and
* return the left InterpolationMeasure. This method is used to accumulate
* InterpolationMeasure results.
*
* @param left The InterpolationMeasure we are modifying. After calling this
* method, fields in this InterpolationMeasure will contain a sum
* of the previous values and the corresponding values from the
* right InterpolationMeasure.
* @param right An InterpolationMeasure that we want to add to the left vector.
* This InterpolationMeasure is not modified by the method.
* @return the modified left vector.
*/
public static InterpolationMeasure addToLeft(InterpolationMeasure left, InterpolationMeasure right) {
checkNotNull(left, "left must not be null");
checkNotNull(right, "right must not be null");
checkArgument(left.dimensions == right.dimensions, "dimensions must be the same");
DiVector.addToLeft(left.distances, right.distances);
DiVector.addToLeft(left.measure, right.measure);
DiVector.addToLeft(left.probMass, right.probMass);
return left;
}
/**
* Return a {@link Collector} which can be used to the collect many
* InterpolationMeasure results into a single, final result.
*
* @param dimensions The number of spatial dimensions in the
* InterpolationMeasures being collected.
* @param sampleSize The sample size of the Random Cut Trees that were
* measured.
* @param numberOfTrees The number of trees whose measures we are collecting
* into a final result. This value is used for scaling.
* @return an interpolation measure containing the aggregated, scaled result.
*/
public static Collector<InterpolationMeasure, InterpolationMeasure, InterpolationMeasure> collector(int dimensions,
int sampleSize, int numberOfTrees) {
return Collector.of(() -> new InterpolationMeasure(dimensions, sampleSize), InterpolationMeasure::addToLeft,
InterpolationMeasure::addToLeft, result -> result.scale(1.0 / numberOfTrees));
}
/**
* @return the number of spatial dimensions in this InterpolationMeasure.
*/
public int getDimensions() {
return dimensions;
}
/**
* @return the sample size of the Random Cut Tree that we are measuring.
*/
public int getSampleSize() {
return sampleSize;
}
/**
* Return a new InterpolationMeasure will all values scaled by the given factor.
*
* @param z The scale factor.
* @return a new InterpolationMeasure will all values scaled by the given
* factor.
*/
public InterpolationMeasure scale(double z) {
return new InterpolationMeasure(sampleSize, measure.scale(z), distances.scale(z), probMass.scale(z));
}
public InterpolationMeasure lift(Function<double[], double[]> projection) {
return new InterpolationMeasure(sampleSize, measure.lift(projection), distances.lift(projection),
probMass.lift(projection));
}
}
| 538 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/returntypes/Neighbor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.BinaryOperator;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collector;
/**
* A Neighbor represents a point together with a distance, where the distance is
* with respect to some query point. That is, we think of this point as being a
* neighbor of the query point. If the feature is enabled in the forest, a
* Neighbor will also contain a set of sequence indexes containing the times
* this point was added to the forest.
*/
public class Neighbor {
/**
* The neighbor point.
*/
public final float[] point;
/**
* The distance between the neighbor point and the query point it was created
* from.
*/
public final double distance;
/**
* A list of sequence indexes corresponding to the times when this neighbor
* point was added to the forest. If sequence indexes are not enabled for the
* forest, then this list will be empty.
*/
public final List<Long> sequenceIndexes;
public int count;
/**
* Create a new Neighbor.
*
* @param point The neighbor point.
* @param distance The distance between the neighbor point and the query
* point is was created from.
* @param sequenceIndexes A list of sequence indexes corresponding to the times
* when this neighbor point was added to the forest.
*/
public Neighbor(float[] point, double distance, List<Long> sequenceIndexes) {
this.point = point;
this.distance = distance;
this.sequenceIndexes = sequenceIndexes;
this.count = 1;
}
/**
* Get Neighbor collector which merges duplicate Neighbors and sorts them in
* ascending order of distance
*
* @return Neighbor collector
*/
public static Collector<Optional<Neighbor>, Map<Integer, Neighbor>, List<Neighbor>> collector() {
return new CollectorImpl();
}
/**
* Merge sequence indexes of other Neighbor to itself
*
* @param other other Neighbor whose sequenceIndexes need to be merged
*/
private void mergeSequenceIndexes(Neighbor other) {
this.sequenceIndexes.addAll(other.sequenceIndexes);
this.count += other.count;
}
/**
* Get hash code for the Point associated with object
*
* @return hash code for the Point
*/
private int getHashCodeForPoint() {
return Arrays.hashCode(point);
}
private static class CollectorImpl
implements Collector<Optional<Neighbor>, Map<Integer, Neighbor>, List<Neighbor>> {
@Override
public Supplier<Map<Integer, Neighbor>> supplier() {
return HashMap::new;
}
@Override
public BiConsumer<Map<Integer, Neighbor>, Optional<Neighbor>> accumulator() {
return (neighborsMap, neighborOptional) -> {
if (neighborOptional.isPresent()) {
mergeNeighborIfNeededAndPut(neighborsMap, neighborOptional.get());
}
};
}
@Override
public BinaryOperator<Map<Integer, Neighbor>> combiner() {
return (left, right) -> {
right.forEach((k, v) -> mergeNeighborIfNeededAndPut(left, v));
return left;
};
}
@Override
public Function<Map<Integer, Neighbor>, List<Neighbor>> finisher() {
return map -> {
List<Neighbor> combinedResult = new ArrayList<>();
map.forEach((k, v) -> {
v.sequenceIndexes.sort(Long::compareTo);
combinedResult.add(v);
});
Comparator<Neighbor> comparator = Comparator.comparingDouble(n -> n.distance);
combinedResult.sort(comparator);
return combinedResult;
};
}
@Override
public Set<Characteristics> characteristics() {
return Collections.emptySet();
}
private void mergeNeighborIfNeededAndPut(Map<Integer, Neighbor> neighborsMap, Neighbor currentNeighbor) {
Neighbor existingNeighbor = neighborsMap.get(currentNeighbor.getHashCodeForPoint());
if (existingNeighbor != null) {
existingNeighbor.mergeSequenceIndexes(currentNeighbor);
} else {
neighborsMap.put(currentNeighbor.getHashCodeForPoint(), currentNeighbor);
}
}
}
}
| 539 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/returntypes/ConvergingAccumulator.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
/**
* An accumulator which can be used to short-circuit the number of trees visited
* if the responses from the trees seen so far appear to be converging to a
* value. for an example
*
* @param <R> The result type being accumulated.
* @see com.amazon.randomcutforest.RandomCutForest
*/
public interface ConvergingAccumulator<R> {
/**
* Add a new result value to this accumulator.
*
* @param value A single result value which should be accumulated together with
* other results.
*/
void accept(R value);
/**
* @return 'true' if the accumulator has converged and we can stop accepting new
* values, 'false' otherwise.
*/
boolean isConverged();
/**
* @return the number of values that have been accepted by this accumulator.
*/
int getValuesAccepted();
/**
* @return the accumulated.
*/
R getAccumulatedValue();
}
| 540 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/returntypes/OneSidedConvergingDoubleAccumulator.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
/**
* A converging accumulator using a one-sided standard deviation tests. The
* accumulator tests the submitted values for convergence and returns the sum of
* all submitted values.
*/
public class OneSidedConvergingDoubleAccumulator extends OneSidedStDevAccumulator<Double> {
/**
* Create a new converging accumulator that uses a one-sided standard deviation
* test.
*
* @param highIsCritical Set to 'true' if we care more about high values of
* the converging scalar than low values. Set to
* 'false' if the opposite is true.
* @param precision The number of witnesses required before declaring
* convergence will be at least 1.0 / precision.
* @param minValuesAccepted The user-specified minimum number of values visited
* before returning a result. Note that
* {@link #isConverged()} may return true before
* accepting this number of results if the
* @param maxValuesAccepted The maximum number of values that will be accepted
* by this accumulator.
*/
public OneSidedConvergingDoubleAccumulator(boolean highIsCritical, double precision, int minValuesAccepted,
int maxValuesAccepted) {
super(highIsCritical, precision, minValuesAccepted, maxValuesAccepted);
accumulatedValue = 0.0;
}
/**
* We are testing for convergence directly on the submitted double values, hence
* we just return the argument as-is.
*
* @param result A new result value computed by a Random Cut Tree.
* @return the result value.
*/
@Override
protected double getConvergingValue(Double result) {
return result;
}
/**
* Add the result to the sum of result values.
*
* @param result The new result to add to the accumulated value.
*/
@Override
protected void accumulateValue(Double result) {
accumulatedValue += result;
}
}
| 541 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/returntypes/RangeVector.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static java.lang.Math.max;
import static java.lang.Math.min;
import java.util.Arrays;
/**
* A RangeVector is used when we want to track a quantity and its upper and
* lower bounds
*/
public class RangeVector {
public final float[] values;
/**
* An array of values corresponding to the upper ranges in each dimension.
*/
public final float[] upper;
/**
* An array of values corresponding to the lower ranges in each dimension
*/
public final float[] lower;
public RangeVector(int dimensions) {
checkArgument(dimensions > 0, "dimensions must be greater than 0");
values = new float[dimensions];
upper = new float[dimensions];
lower = new float[dimensions];
}
/**
* Construct a new RangeVector with the given number of spatial dimensions.
*
* @param values the values being estimated in a range
* @param upper the higher values of the ranges
* @param lower the lower values in the ranges
*/
public RangeVector(float[] values, float[] upper, float[] lower) {
checkArgument(values.length > 0, " dimensions must be > 0");
checkArgument(values.length == upper.length && upper.length == lower.length, "dimensions must be equal");
for (int i = 0; i < values.length; i++) {
checkArgument(upper[i] >= values[i] && values[i] >= lower[i], "incorrect semantics");
}
this.values = Arrays.copyOf(values, values.length);
this.upper = Arrays.copyOf(upper, upper.length);
this.lower = Arrays.copyOf(lower, lower.length);
}
public RangeVector(float[] values) {
checkArgument(values.length > 0, "dimensions must be > 0 ");
this.values = Arrays.copyOf(values, values.length);
this.upper = Arrays.copyOf(values, values.length);
this.lower = Arrays.copyOf(values, values.length);
}
/**
* Create a deep copy of the base RangeVector.
*
* @param base The RangeVector to copy.
*/
public RangeVector(RangeVector base) {
int dimensions = base.values.length;
this.values = Arrays.copyOf(base.values, dimensions);
this.upper = Arrays.copyOf(base.upper, dimensions);
this.lower = Arrays.copyOf(base.lower, dimensions);
}
public void shift(int i, float shift) {
checkArgument(i >= 0 && i < values.length, "incorrect index");
values[i] += shift;
// managing precision
upper[i] = max(values[i], upper[i] + shift);
lower[i] = min(values[i], lower[i] + shift);
}
public void scale(int i, float weight) {
checkArgument(i >= 0 && i < values.length, "incorrect index");
checkArgument(weight >= 0, " negative weight not permitted");
values[i] = values[i] * weight;
// managing precision
upper[i] = max(upper[i] * weight, values[i]);
lower[i] = min(lower[i] * weight, values[i]);
}
}
| 542 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/returntypes/OneSidedConvergingDiVectorAccumulator.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
/**
* A converging accumulator using a one-sided standard deviation tests. The
* accumulator tests the sum of entries (i.e., the "high-low sum") in the
* submitted DiVectors for convergence and returns the sum of all submitted
* DiVectors.
*/
public class OneSidedConvergingDiVectorAccumulator extends OneSidedStDevAccumulator<DiVector> {
/**
* Create a new converging accumulator that uses a one-sided standard deviation
* test.
*
* @param dimensions The number of dimensions in the DiVectors being
* accumulated.
* @param highIsCritical Set to 'true' if we care more about high values of
* the converging scalar than low values. Set to
* 'false' if the opposite is true.
* @param precision The number of witnesses required before declaring
* convergence will be at least 1.0 / precision.
* @param minValuesAccepted The user-specified minimum number of values visited
* before returning a result. Note that
* {@link #isConverged()} may return true before
* accepting this number of results if the
* @param maxValuesAccepted The maximum number of values that will be accepted
* by this accumulator.
*/
public OneSidedConvergingDiVectorAccumulator(int dimensions, boolean highIsCritical, double precision,
int minValuesAccepted, int maxValuesAccepted) {
super(highIsCritical, precision, minValuesAccepted, maxValuesAccepted);
accumulatedValue = new DiVector(dimensions);
}
/**
* Compute the "high-low sum" for the given DiVector.
*
* @param result A new result DiVector computed by a Random Cut Tree.
* @return the "high-low sum" for the given DiVector.
*/
@Override
protected double getConvergingValue(DiVector result) {
return result.getHighLowSum();
}
/**
* Add the DiVector to the aggregate DiVector in this accumulator.
*
* @param result The new result to add to the accumulated value.
*/
@Override
protected void accumulateValue(DiVector result) {
DiVector.addToLeft(accumulatedValue, result);
}
}
| 543 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/returntypes/SampleSummary.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.toDoubleArray;
import static com.amazon.randomcutforest.CommonUtils.toFloatArray;
import static com.amazon.randomcutforest.util.Weighted.prefixPick;
import static java.lang.Math.max;
import static java.util.stream.Collectors.toCollection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import com.amazon.randomcutforest.util.Weighted;
public class SampleSummary {
public static double DEFAULT_PERCENTILE = 0.9;
/**
* a collection of summarized points (reminiscent of typical sets from the
* perspective of information theory, Cover and Thomas, Chapter 3) which should
* be the mean/median of a spatially continuous distribution with central
* tendency. If the input is a collection of samples that correspond to an union
* of two such well separated distributions, for example as in the example data
* of RCF paper then the output should be the two corresponding central points.
*/
public float[][] summaryPoints;
/**
* a measure of comparison among the typical points;
*/
public float[] relativeWeight;
/**
* number of samples, often the number of summary
*/
public double weightOfSamples;
/**
* the global mean
*/
public float[] mean;
public float[] median;
/**
* This is the global deviation, without any filtering on the TreeSamples
*/
public float[] deviation;
/**
* an upper percentile corresponding to the points, computed dimension
* agnostically
*/
public float[] upper;
/**
* a lower percentile corresponding to the points
*/
public float[] lower;
public SampleSummary(int dimensions) {
this.weightOfSamples = 0;
this.summaryPoints = new float[1][];
this.summaryPoints[0] = new float[dimensions];
this.relativeWeight = new float[] { 0.0f };
this.median = new float[dimensions];
this.mean = new float[dimensions];
this.deviation = new float[dimensions];
this.upper = new float[dimensions];
this.lower = new float[dimensions];
}
// for older tests
public SampleSummary(float[] point) {
this(toDoubleArray(point), 1.0f);
}
public SampleSummary(double[] point, float weight) {
this(point.length);
this.weightOfSamples = weight;
this.summaryPoints[0] = toFloatArray(point);
this.relativeWeight[0] = weight;
System.arraycopy(this.summaryPoints[0], 0, this.median, 0, point.length);
System.arraycopy(this.summaryPoints[0], 0, this.mean, 0, point.length);
System.arraycopy(this.summaryPoints[0], 0, this.upper, 0, point.length);
System.arraycopy(this.summaryPoints[0], 0, this.lower, 0, point.length);
}
void addTypical(float[][] summaryPoints, float[] relativeWeight) {
checkArgument(summaryPoints.length == relativeWeight.length, "incorrect lengths of fields");
if (summaryPoints.length > 0) {
int dimension = summaryPoints[0].length;
this.summaryPoints = new float[summaryPoints.length][];
for (int i = 0; i < summaryPoints.length; i++) {
checkArgument(dimension == summaryPoints[i].length, " incorrect length points");
this.summaryPoints[i] = Arrays.copyOf(summaryPoints[i], dimension);
}
this.relativeWeight = Arrays.copyOf(relativeWeight, relativeWeight.length);
}
}
public SampleSummary(List<Weighted<float[]>> points, float[][] summaryPoints, float[] relativeWeight,
double percentile) {
this(points, percentile);
this.addTypical(summaryPoints, relativeWeight);
}
public SampleSummary(List<Weighted<float[]>> points, float[][] summaryPoints, float[] relativeWeight) {
this(points, summaryPoints, relativeWeight, DEFAULT_PERCENTILE);
}
public SampleSummary(List<Weighted<float[]>> points) {
this(points, DEFAULT_PERCENTILE);
}
/**
* constructs a summary of the weighted points based on the percentile envelopes
* by picking 1-precentile and percentile fractional rank of the items useful in
* surfacing a robust range of values
*
* @param points weighted points
* @param percentile value corresponding to bounds
*/
public SampleSummary(List<Weighted<float[]>> points, double percentile) {
checkArgument(points.size() > 0, "point list cannot be empty");
checkArgument(percentile > 0.5, " has to be more than 0.5");
checkArgument(percentile < 1.0, "has to be less than 1");
int dimension = points.get(0).index.length;
double[] coordinateSum = new double[dimension];
double[] coordinateSumSquare = new double[dimension];
double totalWeight = 0;
for (Weighted<float[]> e : points) {
checkArgument(e.index.length == dimension, "points have to be of same length");
float weight = e.weight;
checkArgument(!Float.isNaN(weight), " weights must be non-NaN values ");
checkArgument(Float.isFinite(weight), " weights must be finite ");
checkArgument(weight >= 0, "weights have to be non-negative");
totalWeight += weight;
for (int i = 0; i < dimension; i++) {
int index = i;
checkArgument(!Float.isNaN(e.index[i]),
() -> " improper input, in coordinate " + index + ", must be non-NaN values");
checkArgument(Float.isFinite(e.index[i]),
() -> " improper input, in coordinate " + index + ", must be finite values");
coordinateSum[i] += e.index[i] * weight;
coordinateSumSquare[i] += e.index[i] * e.index[i] * weight;
}
}
checkArgument(totalWeight > 0, " weights cannot all be 0");
this.weightOfSamples = totalWeight;
this.mean = new float[dimension];
this.deviation = new float[dimension];
this.median = new float[dimension];
this.upper = new float[dimension];
this.lower = new float[dimension];
for (int i = 0; i < dimension; i++) {
this.mean[i] = (float) (coordinateSum[i] / totalWeight);
this.deviation[i] = (float) Math.sqrt(max(0.0, coordinateSumSquare[i] / totalWeight - mean[i] * mean[i]));
}
for (int i = 0; i < dimension; i++) {
int index = i;
ArrayList<Weighted<Float>> list = points.stream().map(e -> new Weighted<>(e.index[index], e.weight))
.collect(toCollection(ArrayList::new));
list.sort((o1, o2) -> Float.compare(o1.index, o2.index));
this.lower[i] = prefixPick(list, totalWeight * (1.0 - percentile)).index;
this.median[i] = prefixPick(list, totalWeight / 2.0).index;
this.upper[i] = prefixPick(list, totalWeight * percentile).index;
}
}
}
| 544 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/interpolation/SimpleInterpolationVisitor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.interpolation;
import java.util.Arrays;
import com.amazon.randomcutforest.Visitor;
import com.amazon.randomcutforest.returntypes.DensityOutput;
import com.amazon.randomcutforest.returntypes.InterpolationMeasure;
import com.amazon.randomcutforest.tree.IBoundingBoxView;
import com.amazon.randomcutforest.tree.INodeView;
/**
* A Visitor which computes several geometric measures that related a given
* query point to the points stored in a RandomCutTree.
**/
public class SimpleInterpolationVisitor implements Visitor<InterpolationMeasure> {
private final float[] pointToScore;
private final long sampleSize;
private final boolean centerOfMass;
public InterpolationMeasure stored;
double sumOfNewRange = 0d;
double sumOfDifferenceInRange = 0d;
double[] directionalDistanceVector;
double[] differenceInRangeVector;
/**
* A flag that states whether the point to score is known to be contained inside
* the bounding box of Nodes being accepted. Assumes nodes are accepted in
* leaf-to-root order.
*/
boolean pointInsideBox;
/**
* An array that keeps track of whether each margin of the point being scored is
* outside inside the box considered during the recursive call to compute the
* score. Assumes nodes are accepted in leaf-to-root order.
*/
boolean[] coordInsideBox;
private boolean pointEqualsLeaf;
private IBoundingBoxView theShadowBox;
private double savedMass;
private double pointMass;
/**
* Construct a new Visitor
*
* @param pointToScore The point whose anomaly score we are computing
* @param sampleSize The sub-sample size used by the RandomCutTree that is
* scoring the point
* @param pointMass indicates the mass/duplicity of the current point
* @param centerOfMass indicates if the tree has centerOfMass
*/
public SimpleInterpolationVisitor(float[] pointToScore, int sampleSize, double pointMass, boolean centerOfMass) {
this.pointToScore = Arrays.copyOf(pointToScore, pointToScore.length);
this.sampleSize = sampleSize;
// the samplesize may be useful to scale
pointInsideBox = false;
this.pointMass = pointMass; // this corresponds to the mass/duplicity of the query
stored = new DensityOutput(pointToScore.length, sampleSize);
directionalDistanceVector = new double[2 * pointToScore.length];
differenceInRangeVector = new double[2 * pointToScore.length];
pointEqualsLeaf = false;
this.centerOfMass = centerOfMass;
// will be initialized to an array of false values
coordInsideBox = new boolean[pointToScore.length];
}
/**
* @return The score computed up until this point.
*/
@Override
public InterpolationMeasure getResult() {
return stored;
}
@Override
public void accept(INodeView node, int depthOfNode) {
if (pointInsideBox) {
return;
}
IBoundingBoxView largeBox;
IBoundingBoxView smallBox;
if (pointEqualsLeaf) {
largeBox = node.getBoundingBox();
theShadowBox = theShadowBox == null ? node.getSiblingBoundingBox(pointToScore)
: theShadowBox.getMergedBox(node.getSiblingBoundingBox(pointToScore));
smallBox = theShadowBox;
} else {
smallBox = node.getBoundingBox();
largeBox = smallBox.getMergedBox(pointToScore);
}
updateForCompute(smallBox, largeBox);
double probOfCut = sumOfDifferenceInRange / sumOfNewRange;
if (probOfCut <= 0) {
pointInsideBox = true;
} else {
double fieldVal = fieldExt(node, centerOfMass, savedMass, pointToScore);
double influenceVal = influenceExt(node, centerOfMass, savedMass, pointToScore);
// if center of mass has been enabled, then those can be used in a similar
// situation
// otherwise the center of mass is the 0 vector
for (int i = 0; i < pointToScore.length; i++) {
double prob = differenceInRangeVector[2 * i] / sumOfNewRange;
stored.probMass.high[i] = prob * influenceVal + (1 - probOfCut) * stored.probMass.high[i];
stored.measure.high[i] = prob * fieldVal + (1 - probOfCut) * stored.measure.high[i];
stored.distances.high[i] = prob * directionalDistanceVector[2 * i] * influenceVal
+ (1 - probOfCut) * stored.distances.high[i];
}
for (int i = 0; i < pointToScore.length; i++) {
double prob = differenceInRangeVector[2 * i + 1] / sumOfNewRange;
stored.probMass.low[i] = prob * influenceVal + (1 - probOfCut) * stored.probMass.low[i];
stored.measure.low[i] = prob * fieldVal + (1 - probOfCut) * stored.measure.low[i];
stored.distances.low[i] = prob * directionalDistanceVector[2 * i + 1] * influenceVal
+ (1 - probOfCut) * stored.distances.low[i];
}
}
}
@Override
public void acceptLeaf(INodeView leafNode, int depthOfNode) {
updateForCompute(leafNode.getBoundingBox(), leafNode.getBoundingBox().getMergedBox(pointToScore));
if (sumOfDifferenceInRange <= 0) { // values must be equal
savedMass = pointMass + leafNode.getMass();
pointEqualsLeaf = true;
for (int i = 0; i < pointToScore.length; i++) {
stored.measure.high[i] = stored.measure.low[i] = 0.5 * selfField(leafNode, savedMass)
/ pointToScore.length;
stored.probMass.high[i] = stored.probMass.low[i] = 0.5 * selfInfluence(leafNode, savedMass)
/ pointToScore.length;
}
Arrays.fill(coordInsideBox, false);
} else {
savedMass = pointMass;
double fieldVal = fieldPoint(leafNode, savedMass, pointToScore);
double influenceVal = influencePoint(leafNode, savedMass, pointToScore);
for (int i = 0; i < pointToScore.length; i++) {
double prob = differenceInRangeVector[2 * i] / sumOfNewRange;
stored.probMass.high[i] = prob * influenceVal;
stored.measure.high[i] = prob * fieldVal;
stored.distances.high[i] = prob * directionalDistanceVector[2 * i] * influenceVal;
}
for (int i = 0; i < pointToScore.length; i++) {
double prob = differenceInRangeVector[2 * i + 1] / sumOfNewRange;
stored.probMass.low[i] = prob * influenceVal;
stored.measure.low[i] = prob * fieldVal;
stored.distances.low[i] = prob * directionalDistanceVector[2 * i + 1] * influenceVal;
}
}
}
/**
* Update instance variables based on the difference between the large box and
* small box. The values set by this method are used in {@link #accept} and
* {@link #acceptLeaf} to update the stored density.
*
* @param smallBox
* @param largeBox
*/
void updateForCompute(IBoundingBoxView smallBox, IBoundingBoxView largeBox) {
sumOfNewRange = 0d;
sumOfDifferenceInRange = 0d;
Arrays.fill(directionalDistanceVector, 0);
Arrays.fill(differenceInRangeVector, 0);
for (int i = 0; i < pointToScore.length; ++i) {
sumOfNewRange += largeBox.getRange(i);
if (coordInsideBox[i]) {
continue;
}
double maxGap = Math.max(largeBox.getMaxValue(i) - smallBox.getMaxValue(i), 0.0);
double minGap = Math.max(smallBox.getMinValue(i) - largeBox.getMinValue(i), 0.0);
if (maxGap + minGap > 0.0) {
sumOfDifferenceInRange += (minGap + maxGap);
differenceInRangeVector[2 * i] = maxGap;
differenceInRangeVector[2 * i + 1] = minGap;
if (maxGap > 0) {
directionalDistanceVector[2 * i] = (maxGap + smallBox.getRange(i));
} else {
directionalDistanceVector[2 * i + 1] = (minGap + smallBox.getRange(i));
}
} else {
coordInsideBox[i] = true;
}
}
}
/**
* The functions below can be changed for arbitrary interpolations.
*
* @param node/leafNode corresponds to the node in the tree influencing the
* current point
* @param centerOfMass feature flag describing if the center of mass is enabled
* in tree in general this can be used for arbitrary
* extensions of the node class with additional
* information.
* @param thisMass duplicity of query
* @param thislocation location of query
* @return is the value or a 0/1 function -- the functions can be thresholded
* based of geometric coordinates of the query and the node. Many
* different Kernels can be expressed in this decomposed manner.
*/
double fieldExt(INodeView node, boolean centerOfMass, double thisMass, float[] thislocation) {
return (node.getMass() + thisMass);
}
double influenceExt(INodeView node, boolean centerOfMass, double thisMass, float[] thislocation) {
return 1.0;
}
double fieldPoint(INodeView node, double thisMass, float[] thislocation) {
return (node.getMass() + thisMass);
}
double influencePoint(INodeView node, double thisMass, float[] thislocation) {
return 1.0;
}
double selfField(INodeView leafNode, double mass) {
return mass;
}
double selfInfluence(INodeView leafnode, double mass) {
return 1.0;
}
@Override
public boolean isConverged() {
return pointInsideBox;
}
}
| 545 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/store/PointStoreSmall.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.store;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.Arrays;
public class PointStoreSmall extends PointStore {
public static char INFEASIBLE_SMALL_POINTSTORE_LOCN = (char) PointStore.INFEASIBLE_LOCN;
protected char[] locationList;
void setInfeasiblePointstoreLocationIndex(int index) {
locationList[index] = INFEASIBLE_SMALL_POINTSTORE_LOCN;
};
void extendLocationList(int newCapacity) {
int oldCapacity = locationList.length;
assert (oldCapacity < newCapacity);
locationList = Arrays.copyOf(locationList, newCapacity);
for (int i = oldCapacity; i < newCapacity; i++) {
locationList[i] = INFEASIBLE_SMALL_POINTSTORE_LOCN;
}
};
void setLocation(int index, int location) {
locationList[index] = (char) (location / baseDimension);
assert (baseDimension * (int) locationList[index] == location);
}
int getLocation(int index) {
return baseDimension * (int) locationList[index];
}
int locationListLength() {
return locationList.length;
}
public PointStoreSmall(PointStore.Builder builder) {
super(builder);
checkArgument(shingleSize * capacity < Character.MAX_VALUE, " incorrect parameters");
if (builder.locationList != null) {
locationList = new char[builder.locationList.length];
for (int i = 0; i < locationList.length; i++) {
locationList[i] = (char) builder.locationList[i];
}
} else {
locationList = new char[currentStoreCapacity];
Arrays.fill(locationList, INFEASIBLE_SMALL_POINTSTORE_LOCN);
}
}
public PointStoreSmall(int dimensions, int capacity) {
this(PointStore.builder().capacity(capacity).dimensions(dimensions).shingleSize(1).initialSize(capacity));
}
@Override
protected void checkFeasible(int index) {
checkArgument(locationList[index] != INFEASIBLE_SMALL_POINTSTORE_LOCN, " invalid point");
}
@Override
public int size() {
int count = 0;
for (int i = 0; i < locationList.length; i++) {
if (locationList[i] != INFEASIBLE_SMALL_POINTSTORE_LOCN) {
++count;
}
}
return count;
}
@Override
public int[] getLocationList() {
int[] answer = new int[locationList.length];
for (int i = 0; i < locationList.length; i++) {
answer[i] = locationList[i];
}
return answer;
}
}
| 546 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/store/PointStoreLarge.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.store;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.Arrays;
public class PointStoreLarge extends PointStore {
protected int[] locationList;
void setInfeasiblePointstoreLocationIndex(int index) {
locationList[index] = INFEASIBLE_LOCN;
};
void extendLocationList(int newCapacity) {
int oldCapacity = locationList.length;
locationList = Arrays.copyOf(locationList, newCapacity);
for (int i = oldCapacity; i < newCapacity; i++) {
locationList[i] = INFEASIBLE_LOCN;
}
};
void setLocation(int index, int location) {
locationList[index] = location / baseDimension;
}
int getLocation(int index) {
return baseDimension * locationList[index];
}
int locationListLength() {
return locationList.length;
}
public PointStoreLarge(PointStore.Builder builder) {
super(builder);
checkArgument(dimensions * capacity < Integer.MAX_VALUE, " incorrect parameters");
if (builder.locationList != null) {
locationList = Arrays.copyOf(builder.locationList, builder.locationList.length);
} else {
locationList = new int[currentStoreCapacity];
Arrays.fill(locationList, INFEASIBLE_LOCN);
}
}
@Override
public int size() {
int count = 0;
for (int i = 0; i < locationList.length; i++) {
if (locationList[i] != INFEASIBLE_LOCN) {
++count;
}
}
return count;
}
@Override
protected void checkFeasible(int index) {
checkArgument(locationList[index] != INFEASIBLE_LOCN, " invalid point");
}
@Override
public int[] getLocationList() {
return Arrays.copyOf(locationList, locationList.length);
}
}
| 547 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/store/IndexIntervalManager.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.store;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.checkState;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Stack;
/**
* This class defines common functionality for Store classes, including
* maintaining the stack of free pointers.
*/
public class IndexIntervalManager {
protected int capacity;
protected int[] freeIndexesStart;
protected int[] freeIndexesEnd;
protected int lastInUse;
public IndexIntervalManager(int capacity) {
checkArgument(capacity > 0, "incorrect parameters");
freeIndexesEnd = new int[1];
freeIndexesStart = new int[1];
lastInUse = 1;
this.capacity = capacity;
freeIndexesStart[0] = 0;
freeIndexesEnd[0] = capacity - 1;
}
static BitSet toBits(int[] refCount) {
checkArgument(refCount != null, "not a meaningful array input");
BitSet bits = new BitSet(refCount.length);
for (int i = 0; i < refCount.length; i++) {
if (refCount[i] > 0) {
bits.set(i);
}
}
return bits;
}
public IndexIntervalManager(int[] refCount, int capacity) {
this(capacity, refCount.length, toBits(refCount));
}
public IndexIntervalManager(int capacity, int length, BitSet bits) {
checkArgument(bits != null, " null bitset not allowed");
this.capacity = capacity;
int first = bits.nextClearBit(0);
Stack<int[]> stack = new Stack<>();
while (first < length) {
int last = bits.nextSetBit(first) - 1;
if (last >= first) {
stack.push(new int[] { first, last });
first = bits.nextClearBit(last + 1);
if (first < 0) {
break;
}
} else { // we do not distinguish between all full and all empty
if (first < length - 1) {
if (bits.nextClearBit(first + 1) == first + 1) {
stack.push(new int[] { first, length - 1 });
} else {
stack.push(new int[] { first, first });
}
} else {
stack.push(new int[] { length - 1, length - 1 });
}
break;
}
}
lastInUse = stack.size();
freeIndexesEnd = new int[lastInUse + 1];
freeIndexesStart = new int[lastInUse + 1];
this.capacity = capacity;
int count = 0;
while (stack.size() > 0) {
int[] interval = stack.pop();
freeIndexesStart[count] = interval[0];
freeIndexesEnd[count] = interval[1];
++count;
}
}
public void extendCapacity(int newCapacity) {
checkArgument(newCapacity > capacity, " incorrect call, we can only increase capacity");
// the current capacity need not be the final capacity, for example in case of
// point store
if (freeIndexesStart.length == lastInUse) {
freeIndexesStart = Arrays.copyOf(freeIndexesStart, lastInUse + 1);
freeIndexesEnd = Arrays.copyOf(freeIndexesEnd, lastInUse + 1);
}
freeIndexesStart[lastInUse] = capacity;
freeIndexesEnd[lastInUse] = (newCapacity - 1);
lastInUse += 1;
capacity = newCapacity;
}
public boolean isEmpty() {
return (lastInUse == 0);
}
/**
* @return the maximum number of nodes whose data can be stored.
*/
public int getCapacity() {
return capacity;
}
/**
* @return the number of indices which are being maintained
*/
public int size() {
int sum = 0;
for (int i = 0; i < lastInUse; i++) {
sum += freeIndexesEnd[i] - freeIndexesStart[i] + 1;
}
return sum;
}
/**
* Take an index from the free index stack.
*
* @return a free index that can be used to store a value.
*/
public int takeIndex() {
checkState(lastInUse > 0, "store is full");
int answer = freeIndexesStart[lastInUse - 1];
if (answer == freeIndexesEnd[lastInUse - 1]) {
lastInUse -= 1;
} else {
freeIndexesStart[lastInUse - 1] = answer + 1;
}
return answer;
}
/**
* Release an index. After the release, the index value may be returned in a
* future call to {@link #takeIndex()}.
*
* @param index The index value to release.
*/
public void releaseIndex(int index) {
if (lastInUse > 0) {
int start = freeIndexesStart[lastInUse - 1];
int end = freeIndexesEnd[lastInUse - 1];
if (start == index + 1) {
freeIndexesStart[lastInUse - 1] = index;
return;
} else if (end + 1 == index) {
freeIndexesEnd[lastInUse - 1] = index;
return;
}
}
if (freeIndexesStart.length == lastInUse) {
freeIndexesStart = Arrays.copyOf(freeIndexesStart, lastInUse + 1);
freeIndexesEnd = Arrays.copyOf(freeIndexesEnd, lastInUse + 1);
}
freeIndexesStart[lastInUse] = index;
freeIndexesEnd[lastInUse] = index;
lastInUse += 1;
}
}
| 548 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/store/IPointStoreView.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.store;
import java.util.List;
import java.util.function.BiFunction;
import com.amazon.randomcutforest.summarization.ICluster;
/**
* A view of the PointStore that forces a read only access to the store.
*/
public interface IPointStoreView<Point> {
int getDimensions();
int getCapacity();
float[] getNumericVector(int index);
float[] getInternalShingle();
long getNextSequenceIndex();
float[] transformToShingledPoint(Point input);
boolean isInternalRotationEnabled();
boolean isInternalShinglingEnabled();
int getShingleSize();
int[] transformIndices(int[] indexList);
/**
* Prints the point given the index, irrespective of the encoding of the point.
* Used in exceptions and error messages
*
* @param index index of the point in the store
* @return a string that can be printed
*/
String toString(int index);
/**
* a function that exposes an L1 clustering of the points stored in pointstore
*
* @param maxAllowed the maximum number of clusters one is
* interested in
* @param shrinkage a parameter used in CURE algorithm that can
* produce a combination of behaviors (=1
* corresponds to centroid clustering, =0
* resembles robust Minimum Spanning Tree)
* @param numberOfRepresentatives another parameter used to control the
* plausible (potentially non-spherical) shapes
* of the clusters
* @param separationRatio a parameter that controls how aggressively we
* go below maxAllowed -- this is often set to a
* DEFAULT_SEPARATION_RATIO_FOR_MERGE
* @param distance a distance function
* @param previous a (possibly null) list of previous clusters
* which can be used to seed the current clusters
* to ensure some smoothness
* @return a list of clusters
*/
List<ICluster<float[]>> summarize(int maxAllowed, double shrinkage, int numberOfRepresentatives,
double separationRatio, BiFunction<float[], float[], Double> distance, List<ICluster<float[]>> previous);
}
| 549 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/store/IPointStore.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.store;
/**
* A store for points of precision type P, which can be double[] or float[]
* which can be added to a store by the update coordinator and made accessible
* to the trees in a read only manner.
*
* @param <Point> type of input point
*/
public interface IPointStore<PointReference, Point> extends IPointStoreView<Point> {
/**
* Adds to the store; there may be a loss of precision if enableFloat is on in
* the Forest level. But external interface of the forest is double[]
*
* Note that delete is automatic, that is when no trees are accessing the point
*
* @param point point to be added
* @param sequenceNum sequence number of the point
* @return reference of the stored point
*/
PointReference add(Point point, long sequenceNum);
// increments and returns the incremented value
int incrementRefCount(int index);
// decrements and returns the decremented value
int decrementRefCount(int index);
}
| 550 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/store/PointStore.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.store;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.checkNotNull;
import static com.amazon.randomcutforest.CommonUtils.checkState;
import static com.amazon.randomcutforest.CommonUtils.toFloatArray;
import static com.amazon.randomcutforest.summarization.Summarizer.iterativeClustering;
import static java.lang.Math.max;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Optional;
import java.util.Vector;
import java.util.function.BiFunction;
import com.amazon.randomcutforest.summarization.ICluster;
import com.amazon.randomcutforest.summarization.MultiCenter;
import com.amazon.randomcutforest.util.ArrayUtils;
import com.amazon.randomcutforest.util.Weighted;
public abstract class PointStore implements IPointStore<Integer, float[]> {
public static int INFEASIBLE_POINTSTORE_INDEX = -1;
public static int INFEASIBLE_LOCN = (int) -1;
/**
* an index manager to manage free locations
*/
protected IndexIntervalManager indexManager;
/**
* generic store class
*/
protected float[] store;
/**
* generic internal shingle, note that input is doubles
*/
protected float[] internalShingle;
/**
* enable rotation of shingles; use a cyclic buffer instead of sliding window
*/
boolean rotationEnabled;
/**
* last seen timestamp for internal shingling
*/
protected long nextSequenceIndex;
/**
* refCount[i] counts of the number of trees that are currently using the point
* determined by locationList[i] or (for directLocationMapping) the point at
* store[i * dimensions]
*/
protected byte[] refCount;
protected HashMap<Integer, Integer> refCountMap;
/**
* first location where new data can be safely copied;
*/
int startOfFreeSegment;
/**
* overall dimension of the point (after shingling)
*/
int dimensions;
/**
* shingle size, if known. Setting shingle size = 1 rules out overlapping
*/
int shingleSize;
/**
* number of original dimensions which are shingled to produce and overall point
* dimensions = shingleSize * baseDimensions. However there is a possibility
* that even though the data is shingled, we may not choose to use the
* overlapping (say for out of order updates).
*/
int baseDimension;
/**
* maximum capacity
*/
int capacity;
/**
* current capacity of store (number of shingled points)
*/
int currentStoreCapacity;
/**
* enabling internal shingling
*/
boolean internalShinglingEnabled;
abstract void setInfeasiblePointstoreLocationIndex(int index);
abstract void extendLocationList(int newCapacity);
abstract void setLocation(int index, int location);
abstract int getLocation(int index);
/**
* Decrement the reference count for the given index.
*
* @param index The index value.
* @throws IllegalArgumentException if the index value is not valid.
* @throws IllegalArgumentException if the current reference count for this
* index is non positive.
*/
@Override
public int decrementRefCount(int index) {
checkArgument(index >= 0 && index < locationListLength(), " index not supported by store");
checkArgument((refCount[index] & 0xff) > 0, " cannot decrement index");
Integer value = refCountMap.remove(index);
if (value == null) {
if ((refCount[index] & 0xff) == 1) {
indexManager.releaseIndex(index);
refCount[index] = (byte) 0;
setInfeasiblePointstoreLocationIndex(index);
return 0;
} else {
int newVal = (byte) ((refCount[index] & 0xff) - 1);
refCount[index] = (byte) newVal;
return newVal;
}
} else {
if (value > 1) {
refCountMap.put(index, value - 1);
}
return value - 1 + (refCount[index] & 0xff);
}
}
/**
* takes an index from the index manager and rezises if necessary also adjusts
* refCount size to have increment/decrement be seamless
*
* @return an index from the index manager
*/
int takeIndex() {
if (indexManager.isEmpty()) {
if (indexManager.getCapacity() < capacity) {
int oldCapacity = indexManager.getCapacity();
int newCapacity = Math.min(capacity, 1 + (int) Math.floor(1.1 * oldCapacity));
indexManager.extendCapacity(newCapacity);
refCount = Arrays.copyOf(refCount, newCapacity);
extendLocationList(newCapacity);
} else {
throw new IllegalStateException(" index manager in point store is full ");
}
}
return indexManager.takeIndex();
}
protected int getAmountToWrite(float[] tempPoint) {
if (checkShingleAlignment(startOfFreeSegment, tempPoint)) {
if (!rotationEnabled
|| startOfFreeSegment % dimensions == (nextSequenceIndex - 1) * baseDimension % dimensions) {
return baseDimension;
}
} else if (!rotationEnabled) {
return dimensions;
}
// the following adds the padding for what exists;
// then the padding for the new part; all mod (dimensions)
// note that the expression is baseDimension when the condition
// startOfFreeSegment % dimensions == (nextSequenceIndex-1)*baseDimension %
// dimension
// is met
return dimensions + (dimensions - startOfFreeSegment % dimensions
+ (int) ((nextSequenceIndex) * baseDimension) % dimensions) % dimensions;
}
/**
* Add a point to the point store and return the index of the stored point.
*
* @param point The point being added to the store.
* @param sequenceNum sequence number of the point
* @return the index value of the stored point.
* @throws IllegalArgumentException if the length of the point does not match
* the point store's dimensions.
* @throws IllegalStateException if the point store is full.
*/
public int add(double[] point, long sequenceNum) {
return add(toFloatArray(point), sequenceNum);
}
public Integer add(float[] point, long sequenceNum) {
checkArgument(internalShinglingEnabled || point.length == dimensions,
"point.length must be equal to dimensions");
checkArgument(!internalShinglingEnabled || point.length == baseDimension,
"point.length must be equal to dimensions");
float[] tempPoint = point;
nextSequenceIndex++;
if (internalShinglingEnabled) {
// rotation is supported via the output and input is unchanged
tempPoint = constructShingleInPlace(internalShingle, point, false);
if (nextSequenceIndex < shingleSize) {
return INFEASIBLE_POINTSTORE_INDEX;
}
}
int nextIndex;
int amountToWrite = getAmountToWrite(tempPoint);
if (startOfFreeSegment > currentStoreCapacity * dimensions - amountToWrite) {
// try compaction and then resizing
compact();
// the compaction can change the array contents
amountToWrite = getAmountToWrite(tempPoint);
if (startOfFreeSegment > currentStoreCapacity * dimensions - amountToWrite) {
resizeStore();
checkState(startOfFreeSegment + amountToWrite <= currentStoreCapacity * dimensions, "out of space");
}
}
nextIndex = takeIndex();
setLocation(nextIndex, startOfFreeSegment - dimensions + amountToWrite);
if (amountToWrite <= dimensions) {
copyPoint(tempPoint, dimensions - amountToWrite, startOfFreeSegment, amountToWrite);
} else {
copyPoint(tempPoint, 0, startOfFreeSegment + amountToWrite - dimensions, dimensions);
}
startOfFreeSegment += amountToWrite;
refCount[nextIndex] = 1;
return nextIndex;
}
/**
* Increment the reference count for the given index. This operation assumes
* that there is currently a point stored at the given index and will throw an
* exception if that's not the case.
*
* @param index The index value.
* @throws IllegalArgumentException if the index value is not valid.
* @throws IllegalArgumentException if the current reference count for this
* index is non positive.
*/
public int incrementRefCount(int index) {
checkArgument(index >= 0 && index < locationListLength(), " index not supported by store");
checkArgument((refCount[index] & 0xff) > 0, " not in use");
Integer value = refCountMap.remove(index);
if (value == null) {
if ((refCount[index] & 0xff) == 255) {
refCountMap.put(index, 1);
return 256;
} else {
int newVal = (byte) ((refCount[index] & 0xff) + 1);
refCount[index] = (byte) newVal;
return newVal;
}
} else {
refCountMap.put(index, value + 1);
return value + 1;
}
}
@Override
public int getDimensions() {
return dimensions;
}
/**
* maximum capacity, in number of points of size dimensions
*/
public int getCapacity() {
return capacity;
}
/**
* capacity of the indices
*/
public int getIndexCapacity() {
return indexManager.getCapacity();
}
/**
* used in mapper
*
* @return gets the shingle size (if known, otherwise is 1)
*/
public int getShingleSize() {
return shingleSize;
}
/**
* gets the current store capacity in the number of points with dimension many
* values
*
* @return capacity in number of points
*/
public int getCurrentStoreCapacity() {
return currentStoreCapacity;
}
/**
* used for mappers
*
* @return the store that stores the values
*/
public float[] getStore() {
return store;
}
/**
* used for mapper
*
* @return the array of counts referring to different points
*/
public int[] getRefCount() {
int[] newarray = new int[refCount.length];
for (int i = 0; i < refCount.length; i++) {
newarray[i] = refCount[i] & 0xff;
Integer value = refCountMap.get(i);
if (value != null) {
newarray[i] += value;
}
}
return newarray;
}
/**
* useful in mapper to not copy
*
* @return the length of the prefix
*/
public int getStartOfFreeSegment() {
return startOfFreeSegment;
}
/**
* used in mapper
*
* @return if shingling is performed internally
*/
public boolean isInternalShinglingEnabled() {
return internalShinglingEnabled;
}
/**
* used in mapper and in extrapolation
*
* @return the last timestamp seen
*/
public long getNextSequenceIndex() {
return nextSequenceIndex;
}
/**
* used to obtain the most recent shingle seen so far in case of internal
* shingling
*
* @return for internal shingling, returns the last seen shingle
*/
public float[] getInternalShingle() {
checkState(internalShinglingEnabled, "internal shingling is not enabled");
return copyShingle();
}
/**
* The following function eliminates redundant information that builds up in the
* point store and shrinks the point store
*/
abstract int locationListLength();
void alignBoundaries(int initial, int freshStart) {
int locn = freshStart;
for (int i = 0; i < initial; i++) {
store[locn] = 0;
++locn;
}
}
public void compact() {
Vector<Integer[]> reverseReference = new Vector<>();
for (int i = 0; i < locationListLength(); i++) {
int locn = getLocation(i);
if (locn < currentStoreCapacity * dimensions && locn >= 0) {
reverseReference.add(new Integer[] { locn, i });
}
}
reverseReference.sort((o1, o2) -> o1[0].compareTo(o2[0]));
int freshStart = 0;
int jStatic = 0;
int jDynamic = 0;
int jEnd = reverseReference.size();
while (jStatic < jEnd) {
int blockStart = reverseReference.get(jStatic)[0];
int blockEnd = blockStart + dimensions;
int initial = 0;
if (rotationEnabled) {
initial = (dimensions - freshStart + blockStart) % dimensions;
}
int k = jStatic + 1;
jDynamic = jStatic + 1;
while (k < jEnd) {
int newElem = reverseReference.get(k)[0];
if (blockEnd >= newElem) {
k += 1;
jDynamic += 1;
blockEnd = max(blockEnd, newElem + dimensions);
} else {
k = jEnd;
}
}
alignBoundaries(initial, freshStart);
freshStart += initial;
int start = freshStart;
for (int i = blockStart; i < blockEnd; i++) {
assert (!rotationEnabled || freshStart % dimensions == i % dimensions);
if (jStatic < jEnd) {
int locn = reverseReference.get(jStatic)[0];
if (i == locn) {
int newIdx = reverseReference.get(jStatic)[1];
setLocation(newIdx, freshStart);
jStatic += 1;
}
}
freshStart += 1;
}
copyTo(start, blockStart, blockEnd - blockStart);
if (jStatic != jDynamic) {
throw new IllegalStateException("There is discepancy in indices");
}
}
startOfFreeSegment = freshStart;
}
/**
* returns the number of copies of a point
*
* @param i index of a point
* @return number of copies of the point managed by the store
*/
public int getRefCount(int i) {
int val = refCount[i] & 0xff;
Integer value = refCountMap.get(i);
if (value != null) {
val += value;
}
return val;
}
@Override
public boolean isInternalRotationEnabled() {
return rotationEnabled;
}
/**
*
* @return the number of indices stored
*/
public abstract int size();
public abstract int[] getLocationList();
/**
* transforms a point to a shingled point if internal shingling is turned on
*
* @param point new input values
* @return shingled point
*/
@Override
public float[] transformToShingledPoint(float[] point) {
checkNotNull(point, "point must not be null");
if (internalShinglingEnabled && point.length == baseDimension) {
return constructShingleInPlace(copyShingle(), point, rotationEnabled);
}
return ArrayUtils.cleanCopy(point);
}
private float[] copyShingle() {
if (!rotationEnabled) {
return Arrays.copyOf(internalShingle, dimensions);
} else {
float[] answer = new float[dimensions];
int offset = (int) (nextSequenceIndex * baseDimension);
for (int i = 0; i < dimensions; i++) {
answer[(offset + i) % dimensions] = internalShingle[i];
}
return answer;
}
}
/**
* the following function is used to update the shingle in place; it can be used
* to produce new copies as well
*
* @param target the array containing the shingled point
* @param point the new values
* @return the array which now contains the updated shingle
*/
protected float[] constructShingleInPlace(float[] target, float[] point, boolean rotationEnabled) {
if (!rotationEnabled) {
for (int i = 0; i < dimensions - baseDimension; i++) {
target[i] = target[i + baseDimension];
}
for (int i = 0; i < baseDimension; i++) {
target[dimensions - baseDimension + i] = (point[i] == 0.0) ? 0.0f : point[i];
}
} else {
int offset = ((int) (nextSequenceIndex * baseDimension) % dimensions);
for (int i = 0; i < baseDimension; i++) {
target[offset + i] = (point[i] == 0.0) ? 0.0f : point[i];
}
}
return target;
}
/**
* for extrapolation and imputation, in presence of internal shingling we need
* to update the list of missing values from the space of the input dimensions
* to the shingled dimensions
*
* @param indexList list of missing values in the input point
* @return list of missing values in the shingled point
*/
@Override
public int[] transformIndices(int[] indexList) {
checkArgument(internalShinglingEnabled, " only allowed for internal shingling");
checkArgument(indexList.length <= baseDimension, " incorrect length");
int[] results = Arrays.copyOf(indexList, indexList.length);
if (!rotationEnabled) {
for (int i = 0; i < indexList.length; i++) {
checkArgument(results[i] < baseDimension, "incorrect index");
results[i] += dimensions - baseDimension;
}
} else {
int offset = ((int) (nextSequenceIndex * baseDimension) % dimensions);
for (int i = 0; i < indexList.length; i++) {
checkArgument(results[i] < baseDimension, "incorrect index");
results[i] = (results[i] + offset) % dimensions;
}
}
return results;
}
/**
* a builder
*/
public static class Builder<T extends Builder<T>> {
// We use Optional types for optional primitive fields when it doesn't make
// sense to use a constant default.
protected int dimensions;
protected int shingleSize = 1;
protected int baseDimension;
protected boolean internalRotationEnabled = false;
protected boolean internalShinglingEnabled = false;
protected int capacity;
protected Optional<Integer> initialPointStoreSize = Optional.empty();
protected int currentStoreCapacity = 0;
protected int indexCapacity = 0;
protected float[] store = null;
protected double[] knownShingle = null;
protected int[] locationList = null;
protected int[] refCount = null;
protected long nextTimeStamp = 0;
protected int startOfFreeSegment = 0;
// dimension of the points being stored
public T dimensions(int dimensions) {
this.dimensions = dimensions;
return (T) this;
}
// maximum number of points in the store
public T capacity(int capacity) {
this.capacity = capacity;
return (T) this;
}
// initial size of the pointstore, dynamicResizing must be on
// and value cannot exceed capacity
public T initialSize(int initialPointStoreSize) {
this.initialPointStoreSize = Optional.of(initialPointStoreSize);
return (T) this;
}
// shingleSize for opportunistic compression
public T shingleSize(int shingleSize) {
this.shingleSize = shingleSize;
return (T) this;
}
// is internal shingling enabled
public T internalShinglingEnabled(boolean internalShinglingEnabled) {
this.internalShinglingEnabled = internalShinglingEnabled;
return (T) this;
}
// are shingles rotated
public T internalRotationEnabled(boolean internalRotationEnabled) {
this.internalRotationEnabled = internalRotationEnabled;
return (T) this;
}
@Deprecated
public T directLocationEnabled(boolean value) {
return (T) this;
}
@Deprecated
public T dynamicResizingEnabled(boolean value) {
return (T) this;
}
// the size of the array storing the specific points
// this is used for serialization
public T currentStoreCapacity(int currentStoreCapacity) {
this.currentStoreCapacity = currentStoreCapacity;
return (T) this;
}
// the size of the pointset being tracked
// this is used for serialization
public T indexCapacity(int indexCapacity) {
this.indexCapacity = indexCapacity;
return (T) this;
}
// last known shingle, if internalshingle is on
// this shingle is not rotated
// this is used for serialization
public T knownShingle(double[] knownShingle) {
this.knownShingle = knownShingle;
return (T) this;
}
// count of the points being tracked
// used for serialization
public T refCount(int[] refCount) {
this.refCount = refCount;
return (T) this;
}
// location of the points being tracked, if not directmapped
// used for serialization
public T locationList(int[] locationList) {
this.locationList = locationList;
return (T) this;
}
public T store(float[] store) {
this.store = store;
return (T) this;
}
// location of where points can be written
// used for serialization
public T startOfFreeSegment(int startOfFreeSegment) {
this.startOfFreeSegment = startOfFreeSegment;
return (T) this;
}
// the next timeStamp to accept
// used for serialization
public T nextTimeStamp(long nextTimeStamp) {
this.nextTimeStamp = nextTimeStamp;
return (T) this;
}
public PointStore build() {
if (shingleSize * capacity < Character.MAX_VALUE) {
return new PointStoreSmall(this);
} else {
return new PointStoreLarge(this);
}
}
}
public PointStore(PointStore.Builder builder) {
checkArgument(builder.dimensions > 0, "dimensions must be greater than 0");
checkArgument(builder.capacity > 0, "capacity must be greater than 0");
checkArgument(builder.shingleSize == 1 || builder.dimensions == builder.shingleSize
|| builder.dimensions % builder.shingleSize == 0, "incorrect use of shingle size");
/**
* the following checks are due to mappers (kept for future)
*/
if (builder.refCount != null || builder.locationList != null || builder.knownShingle != null) {
checkArgument(builder.refCount != null, "reference count must be present");
checkArgument(builder.locationList != null, "location list must be present");
checkArgument(builder.refCount.length == builder.indexCapacity, "incorrect reference count length");
// following may change if IndexManager is dynamically resized as well
checkArgument(builder.locationList.length == builder.indexCapacity, " incorrect length of locations");
checkArgument(
builder.knownShingle == null
|| builder.internalShinglingEnabled && builder.knownShingle.length == builder.dimensions,
"incorrect shingling information");
}
this.shingleSize = builder.shingleSize;
this.dimensions = builder.dimensions;
this.internalShinglingEnabled = builder.internalShinglingEnabled;
this.rotationEnabled = builder.internalRotationEnabled;
this.baseDimension = this.dimensions / this.shingleSize;
this.capacity = builder.capacity;
this.refCountMap = new HashMap<>();
if (builder.refCount == null) {
int size = (int) builder.initialPointStoreSize.orElse(builder.capacity);
currentStoreCapacity = size;
this.indexManager = new IndexIntervalManager(size);
startOfFreeSegment = 0;
refCount = new byte[size];
if (internalShinglingEnabled) {
nextSequenceIndex = 0;
internalShingle = new float[dimensions];
}
store = new float[currentStoreCapacity * dimensions];
} else {
this.refCount = new byte[builder.refCount.length];
for (int i = 0; i < refCount.length; i++) {
if (builder.refCount[i] >= 0 && builder.refCount[i] <= 255) {
refCount[i] = (byte) builder.refCount[i];
} else if (builder.refCount[i] > 255) {
refCount[i] = (byte) 255;
refCountMap.put(i, builder.refCount[i] - 255);
}
}
this.startOfFreeSegment = builder.startOfFreeSegment;
this.nextSequenceIndex = builder.nextTimeStamp;
this.currentStoreCapacity = builder.currentStoreCapacity;
if (internalShinglingEnabled) {
this.internalShingle = (builder.knownShingle != null)
? Arrays.copyOf(toFloatArray(builder.knownShingle), dimensions)
: new float[dimensions];
}
indexManager = new IndexIntervalManager(builder.refCount, builder.indexCapacity);
store = (builder.store == null) ? new float[currentStoreCapacity * dimensions] : builder.store;
}
}
void resizeStore() {
int maxCapacity = (rotationEnabled) ? 2 * capacity : capacity;
int newCapacity = (int) Math.floor(Math.min(1.1 * currentStoreCapacity, maxCapacity));
if (newCapacity > currentStoreCapacity) {
float[] newStore = new float[newCapacity * dimensions];
System.arraycopy(store, 0, newStore, 0, currentStoreCapacity * dimensions);
currentStoreCapacity = newCapacity;
store = newStore;
}
}
boolean checkShingleAlignment(int location, float[] point) {
boolean test = (location - dimensions + baseDimension >= 0);
for (int i = 0; i < dimensions - baseDimension && test; i++) {
test = (((float) point[i]) == store[location - dimensions + baseDimension + i]);
}
return test;
}
void copyPoint(float[] point, int src, int location, int length) {
for (int i = 0; i < length; i++) {
store[location + i] = point[src + i];
}
}
protected abstract void checkFeasible(int index);
/**
* Get a copy of the point at the given index.
*
* @param index An index value corresponding to a storage location in this point
* store.
* @return a copy of the point stored at the given index.
* @throws IllegalArgumentException if the index value is not valid.
* @throws IllegalArgumentException if the current reference count for this
* index is nonpositive.
*/
@Override
public float[] getNumericVector(int index) {
checkArgument(index >= 0 && index < locationListLength(), " index not supported by store");
int address = getLocation(index);
checkFeasible(index);
if (!rotationEnabled) {
return Arrays.copyOfRange(store, address, address + dimensions);
} else {
float[] answer = new float[dimensions];
for (int i = 0; i < dimensions; i++) {
answer[(address + i) % dimensions] = store[address + i];
}
return answer;
}
}
public String toString(int index) {
return Arrays.toString(getNumericVector(index));
}
void copyTo(int dest, int source, int length) {
if (dest < source) {
for (int i = 0; i < length; i++) {
store[dest + i] = store[source + i];
}
}
}
public static Builder builder() {
return new Builder();
}
/**
* a function that exposes an L1 clustering of the points stored in pointstore
*
* @param maxAllowed the maximum number of clusters one is
* interested in
* @param shrinkage a parameter used in CURE algorithm that can
* produce a combination of behaviors (=1
* corresponds to centroid clustering, =0
* resembles robust Minimum Spanning Tree)
* @param numberOfRepresentatives another parameter used to control the
* plausible (potentially non-spherical) shapes
* of the clusters
* @param separationRatio a parameter that controls how aggressively we
* go below maxAllowed -- this is often set to a
* DEFAULT_SEPARATION_RATIO_FOR_MERGE
* @param previous a (possibly null) list of previous clusters
* which can be used to seed the current clusters
* to ensure some smoothness
* @return a list of clusters
*/
public List<ICluster<float[]>> summarize(int maxAllowed, double shrinkage, int numberOfRepresentatives,
double separationRatio, BiFunction<float[], float[], Double> distance, List<ICluster<float[]>> previous) {
int[] counts = getRefCount();
ArrayList<Weighted<Integer>> refs = new ArrayList<>();
for (int i = 0; i < counts.length; i++) {
if (counts[i] != 0) {
refs.add(new Weighted<>(i, (float) counts[i]));
}
}
BiFunction<float[], Float, ICluster<float[]>> clusterInitializer = (a, b) -> MultiCenter.initialize(a, b,
shrinkage, numberOfRepresentatives);
return iterativeClustering(maxAllowed, 4 * maxAllowed, 1, refs, this::getNumericVector, distance,
clusterInitializer, 42, false, true, separationRatio, previous);
}
}
| 551 |
0 | Create_ds/random-cut-forest-by-aws/Java/serialization/src/test/java/com/amazon/randomcutforest/serialize/json | Create_ds/random-cut-forest-by-aws/Java/serialization/src/test/java/com/amazon/randomcutforest/serialize/json/v1/V1JsonResource.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.serialize.json.v1;
import lombok.Getter;
@Getter
public enum V1JsonResource {
FOREST_1("forest_1.json", 1, 25, 128), FOREST_2("forest_2.json", 4, 40, 256);
private final String resource;
private final int dimensions;
private final int numberOfTrees;
private final int sampleSize;
V1JsonResource(String resource, int dimensions, int numberOfTrees, int sampleSize) {
this.resource = resource;
this.dimensions = dimensions;
this.numberOfTrees = numberOfTrees;
this.sampleSize = sampleSize;
}
}
| 552 |
0 | Create_ds/random-cut-forest-by-aws/Java/serialization/src/test/java/com/amazon/randomcutforest/serialize/json | Create_ds/random-cut-forest-by-aws/Java/serialization/src/test/java/com/amazon/randomcutforest/serialize/json/v1/V1JsonToV3StateConverterTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.serialize.json.v1;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Random;
import java.util.stream.Stream;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.state.RandomCutForestMapper;
import com.amazon.randomcutforest.state.RandomCutForestState;
import com.fasterxml.jackson.databind.ObjectMapper;
public class V1JsonToV3StateConverterTest {
private V1JsonToV3StateConverter converter;
@BeforeEach
public void setUp() {
converter = new V1JsonToV3StateConverter();
}
@ParameterizedTest
@MethodSource("args")
public void testConvert(V1JsonResource jsonResource, Precision precision) {
String resource = jsonResource.getResource();
try (InputStream is = V1JsonToV3StateConverterTest.class.getResourceAsStream(jsonResource.getResource());
BufferedReader rr = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8));) {
StringBuilder b = new StringBuilder();
String line;
while ((line = rr.readLine()) != null) {
b.append(line);
}
String json = b.toString();
RandomCutForestState state = converter.convert(json, precision);
assertEquals(jsonResource.getDimensions(), state.getDimensions());
assertEquals(jsonResource.getNumberOfTrees(), state.getNumberOfTrees());
assertEquals(jsonResource.getSampleSize(), state.getSampleSize());
RandomCutForest forest = new RandomCutForestMapper().toModel(state, 0);
assertEquals(jsonResource.getDimensions(), forest.getDimensions());
assertEquals(jsonResource.getNumberOfTrees(), forest.getNumberOfTrees());
assertEquals(jsonResource.getSampleSize(), forest.getSampleSize());
// perform a simple validation of the deserialized forest by update and scoring
// with a few points
Random random = new Random(0);
for (int i = 0; i < 100; i++) {
double[] point = getPoint(jsonResource.getDimensions(), random);
double score = forest.getAnomalyScore(point);
assertTrue(score > 0);
forest.update(point);
}
String newString = new ObjectMapper().writeValueAsString(new RandomCutForestMapper().toState(forest));
System.out.println(" Old size " + json.length() + ", new Size " + newString.length()
+ ", improvement factor " + json.length() / newString.length());
} catch (IOException e) {
fail("Unable to load JSON resource");
}
}
@ParameterizedTest
@MethodSource("args")
public void testMerge(V1JsonResource jsonResource, Precision precision) {
String resource = jsonResource.getResource();
try (InputStream is = V1JsonToV3StateConverterTest.class.getResourceAsStream(jsonResource.getResource());
BufferedReader rr = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8));) {
StringBuilder b = new StringBuilder();
String line;
while ((line = rr.readLine()) != null) {
b.append(line);
}
String json = b.toString();
int number = new Random().nextInt(5) + 1;
int testNumberOfTrees = Math.min(100,
1 + new Random().nextInt(number * jsonResource.getNumberOfTrees() - 1));
ArrayList<String> models = new ArrayList<>();
for (int i = 0; i < number; i++) {
models.add(json);
}
RandomCutForestState state = converter.convert(models, testNumberOfTrees, precision).get();
assertEquals(jsonResource.getDimensions(), state.getDimensions());
assertEquals(testNumberOfTrees, state.getNumberOfTrees());
assertEquals(jsonResource.getSampleSize(), state.getSampleSize());
RandomCutForest forest = new RandomCutForestMapper().toModel(state, 0);
assertEquals(jsonResource.getDimensions(), forest.getDimensions());
assertEquals(testNumberOfTrees, forest.getNumberOfTrees());
assertEquals(jsonResource.getSampleSize(), forest.getSampleSize());
// perform a simple validation of the deserialized forest by update and scoring
// with a few points
Random random = new Random(0);
for (int i = 0; i < 100; i++) {
double[] point = getPoint(jsonResource.getDimensions(), random);
double score = forest.getAnomalyScore(point);
assertTrue(score > 0);
forest.update(point);
}
int expectedSize = (int) Math
.floor(1.0 * testNumberOfTrees * json.length() / (number * jsonResource.getNumberOfTrees()));
String newString = new ObjectMapper().writeValueAsString(new RandomCutForestMapper().toState(forest));
System.out.println(" Copied " + number + " times, old number of trees " + jsonResource.getNumberOfTrees()
+ ", new trees " + testNumberOfTrees + ", Expected Old size " + expectedSize + ", new Size "
+ newString.length());
} catch (IOException e) {
fail("Unable to load JSON resource");
}
}
private double[] getPoint(int dimensions, Random random) {
double[] point = new double[dimensions];
for (int i = 0; i < point.length; i++) {
point[i] = random.nextDouble();
}
return point;
}
static Stream<Arguments> args() {
return jsonParams().flatMap(
classParameter -> precision().map(testParameter -> Arguments.of(classParameter, testParameter)));
}
static Stream<Precision> precision() {
return Stream.of(Precision.FLOAT_32);
}
static Stream<V1JsonResource> jsonParams() {
return Stream.of(V1JsonResource.FOREST_1, V1JsonResource.FOREST_2);
}
}
| 553 |
0 | Create_ds/random-cut-forest-by-aws/Java/serialization/src/main/java/com/amazon/randomcutforest/serialize/json | Create_ds/random-cut-forest-by-aws/Java/serialization/src/main/java/com/amazon/randomcutforest/serialize/json/v1/V1SerializedRandomCutForest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.serialize.json.v1;
/**
* Serialized RCF for internal use only.
*/
public class V1SerializedRandomCutForest {
public Random getRng() {
return rng;
}
public void setRng(Random rng) {
this.rng = rng;
}
public int getDimensions() {
return dimensions;
}
public void setDimensions(int dimensions) {
this.dimensions = dimensions;
}
public int getSampleSize() {
return sampleSize;
}
public void setSampleSize(int sampleSize) {
this.sampleSize = sampleSize;
}
public int getOutputAfter() {
return outputAfter;
}
public void setOutputAfter(int outputAfter) {
this.outputAfter = outputAfter;
}
public int getNumberOfTrees() {
return numberOfTrees;
}
public void setNumberOfTrees(int numberOfTrees) {
this.numberOfTrees = numberOfTrees;
}
public double getLambda() {
return lambda;
}
public void setLambda(double lambda) {
this.lambda = lambda;
}
public boolean isStoreSequenceIndexesEnabled() {
return storeSequenceIndexesEnabled;
}
public void setStoreSequenceIndexesEnabled(boolean storeSequenceIndexesEnabled) {
this.storeSequenceIndexesEnabled = storeSequenceIndexesEnabled;
}
public boolean isCenterOfMassEnabled() {
return centerOfMassEnabled;
}
public void setCenterOfMassEnabled(boolean centerOfMassEnabled) {
this.centerOfMassEnabled = centerOfMassEnabled;
}
public boolean isParallelExecutionEnabled() {
return parallelExecutionEnabled;
}
public void setParallelExecutionEnabled(boolean parallelExecutionEnabled) {
this.parallelExecutionEnabled = parallelExecutionEnabled;
}
public int getThreadPoolSize() {
return threadPoolSize;
}
public void setThreadPoolSize(int threadPoolSize) {
this.threadPoolSize = threadPoolSize;
}
public Executor getExecutor() {
return executor;
}
public void setExecutor(Executor executor) {
this.executor = executor;
}
private static class Random {
}
private static class Tree {
private boolean storeSequenceIndexesEnabled;
private boolean centerOfMassEnabled;
private Random random;
public boolean isStoreSequenceIndexesEnabled() {
return storeSequenceIndexesEnabled;
}
public void setStoreSequenceIndexesEnabled(boolean storeSequenceIndexesEnabled) {
this.storeSequenceIndexesEnabled = storeSequenceIndexesEnabled;
}
public boolean isCenterOfMassEnabled() {
return centerOfMassEnabled;
}
public void setCenterOfMassEnabled(boolean centerOfMassEnabled) {
this.centerOfMassEnabled = centerOfMassEnabled;
}
public Random getRandom() {
return random;
}
public void setRandom(Random random) {
this.random = random;
}
}
static class WeightedSamples {
private double[] point;
private double weight;
private long sequenceIndex;
public double[] getPoint() {
return point;
}
public void setPoint(double[] point) {
this.point = point;
}
public double getWeight() {
return weight;
}
public void setWeight(double weight) {
this.weight = weight;
}
public long getSequenceIndex() {
return sequenceIndex;
}
public void setSequenceIndex(long sequenceIndex) {
this.sequenceIndex = sequenceIndex;
}
}
static class Sampler {
private WeightedSamples[] weightedSamples;
private int sampleSize;
private double lambda;
private Random random;
private long entriesSeen;
public WeightedSamples[] getWeightedSamples() {
return weightedSamples;
}
public void setWeightedSamples(WeightedSamples[] weightedSamples) {
this.weightedSamples = weightedSamples;
}
public int getSampleSize() {
return sampleSize;
}
public void setSampleSize(int sampleSize) {
this.sampleSize = sampleSize;
}
public double getLambda() {
return lambda;
}
public void setLambda(double lambda) {
this.lambda = lambda;
}
public Random getRandom() {
return random;
}
public void setRandom(Random random) {
this.random = random;
}
public long getEntriesSeen() {
return entriesSeen;
}
public void setEntriesSeen(long entriesSeen) {
this.entriesSeen = entriesSeen;
}
}
static class TreeUpdater {
public Sampler getSampler() {
return sampler;
}
public void setSampler(Sampler sampler) {
this.sampler = sampler;
}
public Tree getTree() {
return tree;
}
public void setTree(Tree tree) {
this.tree = tree;
}
private Sampler sampler;
private Tree tree;
}
static class Exec {
private TreeUpdater[] treeUpdaters;
private long totalUpdates;
private int threadPoolSize;
public TreeUpdater[] getTreeUpdaters() {
return treeUpdaters;
}
public void setTreeUpdaters(TreeUpdater[] treeUpdaters) {
this.treeUpdaters = treeUpdaters;
}
public long getTotalUpdates() {
return totalUpdates;
}
public void setTotalUpdates(long totalUpdates) {
this.totalUpdates = totalUpdates;
}
public int getThreadPoolSize() {
return threadPoolSize;
}
public void setThreadPoolSize(int threadPoolSize) {
this.threadPoolSize = threadPoolSize;
}
}
static class Executor {
private String executor_type;
private Exec executor;
public String getExecutor_type() {
return executor_type;
}
public void setExecutor_type(String executor_type) {
this.executor_type = executor_type;
}
public Exec getExecutor() {
return executor;
}
public void setExecutor(Exec executor) {
this.executor = executor;
}
}
private Random rng;
private int dimensions;
private int sampleSize;
private int outputAfter;
private int numberOfTrees;
private double lambda;
private boolean storeSequenceIndexesEnabled;
private boolean centerOfMassEnabled;
private boolean parallelExecutionEnabled;
private int threadPoolSize;
private Executor executor;
}
| 554 |
0 | Create_ds/random-cut-forest-by-aws/Java/serialization/src/main/java/com/amazon/randomcutforest/serialize/json | Create_ds/random-cut-forest-by-aws/Java/serialization/src/main/java/com/amazon/randomcutforest/serialize/json/v1/V1JsonToV3StateConverter.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.serialize.json.v1;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.toFloatArray;
import java.io.IOException;
import java.io.Reader;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.state.ExecutionContext;
import com.amazon.randomcutforest.state.RandomCutForestState;
import com.amazon.randomcutforest.state.sampler.CompactSamplerState;
import com.amazon.randomcutforest.state.store.PointStoreMapper;
import com.amazon.randomcutforest.state.store.PointStoreState;
import com.amazon.randomcutforest.store.IPointStore;
import com.amazon.randomcutforest.store.PointStore;
import com.amazon.randomcutforest.tree.ITree;
import com.amazon.randomcutforest.tree.RandomCutTree;
import com.fasterxml.jackson.databind.ObjectMapper;
public class V1JsonToV3StateConverter {
private final ObjectMapper mapper = new ObjectMapper();
public RandomCutForestState convert(String json, Precision precision) throws IOException {
checkArgument(precision == Precision.FLOAT_32, "float 64 is deprecated in v3");
V1SerializedRandomCutForest forest = mapper.readValue(json, V1SerializedRandomCutForest.class);
return convert(forest, precision);
}
public Optional<RandomCutForestState> convert(ArrayList<String> jsons, int numberOfTrees, Precision precision)
throws IOException {
ArrayList<V1SerializedRandomCutForest> forests = new ArrayList<>(jsons.size());
int sum = 0;
for (int i = 0; i < jsons.size(); i++) {
V1SerializedRandomCutForest forest = mapper.readValue(jsons.get(i), V1SerializedRandomCutForest.class);
forests.add(forest);
sum += forest.getNumberOfTrees();
}
if (sum < numberOfTrees) {
return Optional.empty();
}
return Optional.ofNullable(convert(forests, numberOfTrees, precision));
}
public RandomCutForestState convert(Reader reader, Precision precision) throws IOException {
checkArgument(precision == Precision.FLOAT_32, "float 64 is deprecated in v3");
V1SerializedRandomCutForest forest = mapper.readValue(reader, V1SerializedRandomCutForest.class);
return convert(forest, precision);
}
public RandomCutForestState convert(URL url, Precision precision) throws IOException {
checkArgument(precision == Precision.FLOAT_32, "float 64 is deprecated in v3");
V1SerializedRandomCutForest forest = mapper.readValue(url, V1SerializedRandomCutForest.class);
return convert(forest, precision);
}
public RandomCutForestState convert(V1SerializedRandomCutForest serializedForest, Precision precision) {
return convert(Collections.singletonList(serializedForest), serializedForest.getNumberOfTrees(), precision);
}
static class SamplerConverter {
private final IPointStore<Integer, float[]> pointStore;
private final List<CompactSamplerState> compactSamplerStates;
private final Precision precision;
private final ITree<Integer, float[]> globalTree;
private final int maxNumberOfTrees;
public SamplerConverter(int dimensions, int capacity, Precision precision, int maxNumberOfTrees) {
pointStore = PointStore.builder().dimensions(dimensions).capacity(capacity).shingleSize(1)
.initialSize(capacity).build();
globalTree = new RandomCutTree.Builder().pointStoreView(pointStore).capacity(pointStore.getCapacity() + 1)
.storeSequenceIndexesEnabled(false).centerOfMassEnabled(false).boundingBoxCacheFraction(1.0)
.build();
compactSamplerStates = new ArrayList<>();
this.maxNumberOfTrees = maxNumberOfTrees;
this.precision = precision;
}
public PointStoreState getPointStoreState(Precision precision) {
return new PointStoreMapper().toState((PointStore) pointStore);
}
public void addSampler(V1SerializedRandomCutForest.Sampler sampler) {
if (compactSamplerStates.size() < maxNumberOfTrees) {
V1SerializedRandomCutForest.WeightedSamples[] samples = sampler.getWeightedSamples();
int[] pointIndex = new int[samples.length];
float[] weight = new float[samples.length];
long[] sequenceIndex = new long[samples.length];
for (int i = 0; i < samples.length; i++) {
V1SerializedRandomCutForest.WeightedSamples sample = samples[i];
float[] point = toFloatArray(sample.getPoint());
Integer index = pointStore.add(point, sample.getSequenceIndex());
pointIndex[i] = globalTree.addPoint(index, 0L);
if (pointIndex[i] != index) {
pointStore.incrementRefCount(pointIndex[i]);
pointStore.decrementRefCount(index);
}
weight[i] = (float) sample.getWeight();
sequenceIndex[i] = sample.getSequenceIndex();
}
CompactSamplerState samplerState = new CompactSamplerState();
samplerState.setSize(samples.length);
samplerState.setCapacity(sampler.getSampleSize());
samplerState.setTimeDecay(sampler.getLambda());
samplerState.setPointIndex(pointIndex);
samplerState.setWeight(weight);
samplerState.setSequenceIndex(sequenceIndex);
samplerState.setSequenceIndexOfMostRecentTimeDecayUpdate(0L);
samplerState.setMaxSequenceIndex(sampler.getEntriesSeen());
samplerState.setInitialAcceptFraction(1.0);
compactSamplerStates.add(samplerState);
}
}
}
/**
* the function merges a collection of RCF-1.0 models with same model parameters
* and fixes the number of trees in the new model (which has to be less or equal
* than the sum of the old models) The conversion uses the execution context of
* the first forest and can be adjusted subsequently by setters
*
* @param serializedForests A non-empty list of forests (together having more
* trees than numberOfTrees)
* @param numberOfTrees the new number of trees
* @param precision the precision of the new forest
* @return a merged RCF with the first numberOfTrees trees
*/
public RandomCutForestState convert(List<V1SerializedRandomCutForest> serializedForests, int numberOfTrees,
Precision precision) {
checkArgument(serializedForests.size() > 0, "incorrect usage of convert");
checkArgument(numberOfTrees > 0, "incorrect parameter");
int sum = 0;
for (int i = 0; i < serializedForests.size(); i++) {
sum += serializedForests.get(i).getNumberOfTrees();
}
checkArgument(sum >= numberOfTrees, "incorrect parameters");
RandomCutForestState state = new RandomCutForestState();
state.setNumberOfTrees(numberOfTrees);
state.setDimensions(serializedForests.get(0).getDimensions());
state.setTimeDecay(serializedForests.get(0).getLambda());
state.setSampleSize(serializedForests.get(0).getSampleSize());
state.setShingleSize(1);
state.setCenterOfMassEnabled(serializedForests.get(0).isCenterOfMassEnabled());
state.setOutputAfter(serializedForests.get(0).getOutputAfter());
state.setStoreSequenceIndexesEnabled(serializedForests.get(0).isStoreSequenceIndexesEnabled());
state.setTotalUpdates(serializedForests.get(0).getExecutor().getExecutor().getTotalUpdates());
state.setCompact(true);
state.setInternalShinglingEnabled(false);
state.setBoundingBoxCacheFraction(1.0);
state.setSaveSamplerStateEnabled(true);
state.setSaveTreeStateEnabled(false);
state.setSaveCoordinatorStateEnabled(true);
state.setPrecision(precision.name());
state.setCompressed(false);
state.setPartialTreeState(false);
ExecutionContext executionContext = new ExecutionContext();
executionContext.setParallelExecutionEnabled(serializedForests.get(0).isParallelExecutionEnabled());
executionContext.setThreadPoolSize(serializedForests.get(0).getThreadPoolSize());
state.setExecutionContext(executionContext);
SamplerConverter samplerConverter = new SamplerConverter(state.getDimensions(),
state.getNumberOfTrees() * state.getSampleSize() + 1, precision, numberOfTrees);
serializedForests.stream().flatMap(f -> Arrays.stream(f.getExecutor().getExecutor().getTreeUpdaters()))
.limit(numberOfTrees).map(V1SerializedRandomCutForest.TreeUpdater::getSampler)
.forEach(samplerConverter::addSampler);
state.setPointStoreState(samplerConverter.getPointStoreState(precision));
state.setCompactSamplerStates(samplerConverter.compactSamplerStates);
return state;
}
}
| 555 |
0 | Create_ds/random-cut-forest-by-aws/Java/serialization/src/main/java/com/amazon/randomcutforest/serialize/json | Create_ds/random-cut-forest-by-aws/Java/serialization/src/main/java/com/amazon/randomcutforest/serialize/json/v2/V2StateToV3ForestConverter.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.serialize.json.v2;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.state.Version.V2_0;
import static com.amazon.randomcutforest.state.Version.V2_1;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.state.RandomCutForestMapper;
import com.amazon.randomcutforest.state.RandomCutForestState;
public class V2StateToV3ForestConverter {
public RandomCutForest convert(RandomCutForestState v2State) {
String version = v2State.getVersion();
checkArgument(version.equals(V2_0) || version.equals(V2_1), "incorrect convertor");
RandomCutForestMapper mapper = new RandomCutForestMapper();
mapper.setCompressionEnabled(v2State.isCompressed());
return mapper.toModel(v2State);
}
}
| 556 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/ThresholdedRandomCutForestTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.config.ImputationMethod.FIXED_VALUES;
import static com.amazon.randomcutforest.config.ImputationMethod.LINEAR;
import static com.amazon.randomcutforest.config.ImputationMethod.NEXT;
import static com.amazon.randomcutforest.config.ImputationMethod.PREVIOUS;
import static com.amazon.randomcutforest.config.ImputationMethod.RCF;
import static com.amazon.randomcutforest.config.ImputationMethod.ZERO;
import static com.amazon.randomcutforest.config.TransformMethod.DIFFERENCE;
import static com.amazon.randomcutforest.config.TransformMethod.NORMALIZE;
import static com.amazon.randomcutforest.config.TransformMethod.NORMALIZE_DIFFERENCE;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Random;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.EnumSource;
import org.junit.jupiter.params.provider.MethodSource;
import org.junit.jupiter.params.provider.ValueSource;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.ImputationMethod;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.preprocessor.Preprocessor;
import com.amazon.randomcutforest.parkservices.state.ThresholdedRandomCutForestMapper;
public class ThresholdedRandomCutForestTest {
@Test
public void testConfigAugmentOne() {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 4;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
assertThrows(IllegalArgumentException.class,
() -> ThresholdedRandomCutForest.builder().compact(true).sampleSize(sampleSize).dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED)
.internalShinglingEnabled(false).shingleSize(shingleSize).anomalyRate(0.01).build());
// have to enable internal shingling or keep it unspecified
assertDoesNotThrow(
() -> ThresholdedRandomCutForest.builder().compact(true).sampleSize(sampleSize).dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED)
.internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01).build());
// imputefraction not allowed
assertThrows(IllegalArgumentException.class,
() -> new ThresholdedRandomCutForest.Builder<>().compact(true).sampleSize(sampleSize)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.TIME_AUGMENTED).useImputedFraction(0.5).internalShinglingEnabled(true)
.shingleSize(shingleSize).anomalyRate(0.01).build());
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).sampleSize(sampleSize)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.TIME_AUGMENTED).normalizeTime(true).shingleSize(shingleSize).anomalyRate(0.01)
.build();
assertNotNull(((Preprocessor) forest.getPreprocessor()).getInitialTimeStamps());
}
@Test
public void testConfigAugmentTwo() {
int baseDimensions = 2;
int shingleSize = 1; // passes due to this
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
assertDoesNotThrow(() -> {
ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(false).shingleSize(shingleSize)
.anomalyRate(0.01).build();
assertEquals(forest.getForest().getDimensions(), dimensions + 1);
});
ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.TIME_AUGMENTED).shingleSize(shingleSize).anomalyRate(0.01).build();
assertTrue(forest.getForest().isInternalShinglingEnabled()); // default on
}
@Test
public void testConfigImpute() {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 4;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
// have to enable internal shingling or keep it unfixed
assertThrows(IllegalArgumentException.class,
() -> new ThresholdedRandomCutForest.Builder<>().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.STREAMING_IMPUTE)
.internalShinglingEnabled(false).shingleSize(shingleSize).anomalyRate(0.01).build());
assertDoesNotThrow(() -> new ThresholdedRandomCutForest.Builder<>().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.STREAMING_IMPUTE)
.shingleSize(shingleSize).anomalyRate(0.01).build());
}
@Test
public void testConfigStandard() {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 4;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
// have to enable internal shingling or keep it unfixed
assertThrows(IllegalArgumentException.class,
() -> ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.STANDARD)
.useImputedFraction(0.5).internalShinglingEnabled(false).shingleSize(shingleSize)
.anomalyRate(0.01).build());
assertDoesNotThrow(() -> {
ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions).precision(Precision.FLOAT_32)
.randomSeed(seed).forestMode(ForestMode.STANDARD).internalShinglingEnabled(false)
.shingleSize(shingleSize).anomalyRate(0.01).build();
});
assertThrows(IllegalArgumentException.class, () -> {
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.STANDARD).shingleSize(shingleSize).anomalyRate(0.01)
.transformMethod(NORMALIZE).startNormalization(111).stopNormalization(100).build();
});
// change if baseDimension != 2
double[] testOne = new double[] { 0 };
double[] testTwo = new double[] { 0, -1 };
double[] testThree = new double[] { new Random().nextDouble(), new Random().nextDouble() };
double[] testFour = new double[] { new Random().nextDouble(), new Random().nextDouble() };
double[] testFive = new double[] { new Random().nextDouble(), new Random().nextDouble() };
double[] testSix = new double[] { new Random().nextDouble(), new Random().nextDouble() };
assertThrows(IllegalArgumentException.class, () -> {
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.STANDARD).shingleSize(shingleSize).anomalyRate(0.01)
.transformMethod(NORMALIZE).ignoreNearExpectedFromAbove(testOne).build();
});
assertThrows(IllegalArgumentException.class, () -> {
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.STANDARD).shingleSize(shingleSize).anomalyRate(0.01)
.transformMethod(NORMALIZE).ignoreNearExpectedFromAbove(testTwo).build();
});
assertDoesNotThrow(() -> {
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.STANDARD).shingleSize(shingleSize).anomalyRate(0.01)
.transformMethod(NORMALIZE).ignoreNearExpectedFromAbove(testThree)
.ignoreNearExpectedFromBelow(testFour).ignoreNearExpectedFromAboveByRatio(testFive)
.ignoreNearExpectedFromBelowByRatio(testSix).build();
double[] array = forest.getPredictorCorrector().getIgnoreNearExpected();
assert (array.length == 4 * baseDimensions);
assert (array[0] == testThree[0]);
assert (array[1] == testThree[1]);
assert (array[2] == testFour[0]);
assert (array[3] == testFour[1]);
assert (array[4] == testFive[0]);
assert (array[5] == testFive[1]);
assert (array[6] == testSix[0]);
assert (array[7] == testSix[1]);
double random = new Random().nextDouble();
assertThrows(IllegalArgumentException.class, () -> forest.predictorCorrector.setSamplingRate(-1));
assertDoesNotThrow(() -> forest.predictorCorrector.setSamplingRate(random));
assertEquals(forest.predictorCorrector.getSamplingRate(), random, 1e-10);
long newSeed = forest.predictorCorrector.getRandomSeed();
assertEquals(seed, newSeed);
assertFalse(forest.predictorCorrector.autoAdjust);
assertNull(forest.predictorCorrector.getDeviations());
});
assertDoesNotThrow(() -> {
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.STANDARD).shingleSize(shingleSize).anomalyRate(0.01)
.transformMethod(NORMALIZE).autoAdjust(true).build();
assertTrue(forest.predictorCorrector.autoAdjust);
assert (forest.predictorCorrector.getDeviations().length == 2 * baseDimensions);
});
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.STANDARD).shingleSize(shingleSize)
.anomalyRate(0.01).transformMethod(NORMALIZE).startNormalization(111).stopNormalization(111).build();
assertTrue(forest.getForest().isInternalShinglingEnabled()); // left to false
assertEquals(((Preprocessor) forest.getPreprocessor()).getInitialValues().length, 111);
assertEquals(((Preprocessor) forest.getPreprocessor()).getInitialTimeStamps().length, 111);
assertEquals(((Preprocessor) forest.getPreprocessor()).getStopNormalization(), 111);
assertEquals(((Preprocessor) forest.getPreprocessor()).getStartNormalization(), 111);
}
@Test
void testImputeConfig() {
int baseDimensions = 1;
int shingleSize = 2;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
// not providing values
assertThrows(IllegalArgumentException.class, () -> {
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.STREAMING_IMPUTE).imputationMethod(ImputationMethod.FIXED_VALUES)
.normalizeTime(true).internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01)
.build();
});
// incorrect number of values to fill
assertThrows(IllegalArgumentException.class, () -> {
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.STREAMING_IMPUTE).imputationMethod(ImputationMethod.FIXED_VALUES)
.fillValues(new double[] { 0.0, 17.0 }).normalizeTime(true).internalShinglingEnabled(true)
.shingleSize(shingleSize).anomalyRate(0.01).build();
});
assertDoesNotThrow(() -> {
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.STREAMING_IMPUTE).imputationMethod(ImputationMethod.FIXED_VALUES)
.fillValues(new double[] { 2.0 }).internalShinglingEnabled(true).shingleSize(shingleSize)
.anomalyRate(0.01).build();
});
}
@ParameterizedTest
@EnumSource(ImputationMethod.class)
void testImpute(ImputationMethod method) {
int baseDimensions = 1;
int shingleSize = 1;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
// shingle size 1 ie not useful for impute
assertThrows(IllegalArgumentException.class, () -> {
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.STREAMING_IMPUTE).imputationMethod(method).normalizeTime(true)
.internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01).build();
});
int newShingleSize = 4;
int newDimensions = baseDimensions * newShingleSize;
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).dimensions(newDimensions)
.precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.STREAMING_IMPUTE)
.imputationMethod(method).internalShinglingEnabled(true).shingleSize(newShingleSize).anomalyRate(0.01)
.useImputedFraction(0.76).fillValues(new double[] { 0 }).build();
double[] fixedData = new double[] { 1.0 };
double[] newData = new double[] { 10.0 };
Random random = new Random(0);
int count = 0;
for (int i = 0; i < 200 + new Random().nextInt(100); i++) {
forest.process(fixedData, (long) count * 113 + random.nextInt(10));
++count;
}
AnomalyDescriptor result = forest.process(newData, (long) count * 113 + 1000);
assert (result.getAnomalyGrade() > 0);
assert (result.isExpectedValuesPresent());
if (method != NEXT && method != ZERO && method != FIXED_VALUES) {
assert (result.getRelativeIndex() == 0);
assertArrayEquals(result.getExpectedValuesList()[0], fixedData, 1e-6);
}
// the gap is 1000 + 113 which is about 9 times 113
// but only the first three entries are allowed in with shinglesize 4,
// after which the imputation is 100% and
// only at most 76% imputed tuples are allowed in the forest
// an additional one arise from the actual input
assertEquals(forest.getForest().getTotalUpdates(), count + 1);
// triggerring consecutive anomalies (no differencing)
// Note NEXT and LINEAR will have an obvious issue with consecutive anomalies
if (method != NEXT && method != LINEAR) {
assertEquals(forest.process(newData, (long) count * 113 + 1113).getAnomalyGrade(), 1.0);
}
assert (forest.process(new double[] { 20 }, (long) count * 113 + 1226).getAnomalyGrade() > 0);
long stamp = (long) count * 113 + 1226;
// time has to increase
assertThrows(IllegalArgumentException.class, () -> {
forest.process(new double[] { 20 }, stamp);
});
}
@ParameterizedTest
@MethodSource("args")
void testImpute(TransformMethod transformMethod, ImputationMethod method) {
int baseDimensions = 1;
int shingleSize = 4;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.STREAMING_IMPUTE)
.imputationMethod(method).internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01)
.useImputedFraction(0.76).fillValues(new double[] { 1.0 }).transformMethod(transformMethod).build();
double[] fixedData = new double[] { 1.0 };
double[] newData = new double[] { 10.0 };
Random random = new Random();
int count = 0;
for (int i = 0; i < 2000 + new Random().nextInt(100); i++) {
forest.process(fixedData, (long) count * 113 + random.nextInt(10));
++count;
}
// note every will have an update
assertEquals(forest.getForest().getTotalUpdates() + shingleSize - 1, count);
AnomalyDescriptor result = forest.process(newData, (long) count * 113 + 1000);
if (method != NEXT && method != LINEAR) {
assert (result.getAnomalyGrade() > 0);
assert (result.isExpectedValuesPresent());
}
// the other impute methods generate too much noise
if (method == RCF || method == PREVIOUS) {
assert (Math.abs(result.getExpectedValuesList()[0][0] - fixedData[0]) < 0.05);
}
// the gap is 1000 + 113 which is about 9 times 113
// but only the first three entries are allowed in with shinglesize 4,
// after which the imputation is 100% and
// only at most 76% imputed tuples are allowed in the forest
// an additional one does not arise from the actual input because all the
// initial
// entries are imputed and the method involves differencing
if (transformMethod != DIFFERENCE && transformMethod != NORMALIZE_DIFFERENCE) {
assertEquals(forest.getForest().getTotalUpdates(), count + 1);
} else {
assertEquals(forest.getForest().getTotalUpdates(), count);
}
}
static Stream<Arguments> args() {
return transformMethodStream().flatMap(
classParameter -> imputationMethod().map(testParameter -> Arguments.of(classParameter, testParameter)));
}
static Stream<ImputationMethod> imputationMethod() {
return Stream.of(ImputationMethod.values());
}
static Stream<TransformMethod> transformMethodStream() {
return Stream.of(TransformMethod.values());
}
@Test
void testMapper() {
double[] initialData = new double[] { 25.0, 25.0, 25.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 23.0, 23.0,
23.0, 23.0, 23.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 21.0, 21.0, 21.0, 21.0, 21.0, 20.0, 20.0,
20.0, 20.0, 20.0, 20.0, 20.0, 19.0, 19.0, 19.0, 19.0, 19.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0,
17.0, 17.0, 17.0, 17.0, 17.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 15.0, 15.0, 15.0, 15.0, 15.0,
15.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 17.0, 17.0, 17.0, 17.0, 17.0, 17.0, 18.0, 18.0, 18.0,
18.0, 18.0, 18.0, 18.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0,
21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 23.0, 23.0, 23.0,
23.0, 23.0, 23.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 23.0, 23.0, 23.0, 23.0, 23.0, 23.0,
23.0, 23.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0,
21.0, 21.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0,
19.0, 19.0, 19.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 17.0, 17.0, 17.0, 17.0, 17.0, 17.0,
17.0, 17.0, 17.0, 17.0, 17.0, 17.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0,
16.0, 16.0, 16.0, 16.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0,
15.0, 15.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0,
13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 14.0, 14.0, 14.0, 14.0, 15.0, 15.0, 15.0, 15.0,
16.0, 16.0, 16.0, 16.0, 17.0, 17.0, 17.0, 18.0, 18.0, 18.0, 18.0, 19.0, 19.0, 19.0, 19.0, 20.0, 20.0,
20.0, 20.0, 21.0, 21.0, 21.0, 22.0, 22.0, 22.0, 22.0, 23.0, 23.0, 23.0, 23.0, 24.0, 24.0, 24.0, 24.0,
25.0, 25.0, 25.0, 26.0, 26.0, 26.0, 26.0, 27.0, 27.0, 27.0, 27.0, 28.0, 28.0, 28.0, 28.0, 29.0, 29.0,
29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0,
28.0, 28.0, 28.0, 28.0, 28.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0,
27.0, 27.0, 27.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0,
26.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0,
25.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0,
26.0, 26.0, 26.0, 26.0, 26.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0,
27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0,
28.0, 28.0, 28.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 25.0,
25.0, 25.0, 25.0, 25.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 23.0, 23.0, 23.0, 23.0, 23.0, 22.0,
22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 21.0, 21.0, 21.0, 21.0, 21.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0,
20.0, 19.0, 19.0, 19.0, 19.0, 19.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0,
18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0,
18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0,
19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0,
19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0,
20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 21.0, 21.0,
21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0,
22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0,
22.0, 22.0, 22.0, 22.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0,
21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0,
20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0,
19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 17.0, 17.0,
17.0, 17.0, 17.0, 17.0, 17.0, 17.0, 17.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 15.0,
15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 13.0,
13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 13.0, 13.0, 13.0,
14.0, 14.0, 14.0, 14.0, 14.0, 15.0, 15.0, 15.0, 16.0, 16.0, 16.0, 16.0, 16.0, 17.0, 17.0, 17.0, 18.0,
18.0, 18.0, 18.0, 18.0, 19.0, 19.0, 19.0, 20.0, 20.0, 20.0, 20.0, 20.0, 21.0, 21.0, 21.0, 22.0, 22.0,
22.0, 22.0, 22.0, 23.0, 23.0, 23.0, 24.0, 24.0, 24.0, 24.0, 24.0, 25.0, 25.0, 25.0, 26.0, 26.0, 26.0,
26.0, 26.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0,
27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 26.0,
26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0,
26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0,
26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0,
26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0,
26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0,
26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 25.0, 25.0, 25.0, 25.0, 24.0, 24.0, 24.0, 24.0, 24.0, 23.0,
23.0, 23.0, 23.0, 23.0, 22.0, 22.0, 22.0, 22.0, 21.0, 21.0, 21.0, 21.0, 21.0, 20.0, 20.0, 20.0, 20.0,
20.0, 19.0, 19.0, 19.0, 19.0, 18.0, 18.0, 18.0, 18.0, 18.0, 17.0, 17.0, 17.0, 17.0, 16.0, 16.0, 16.0,
16.0, 16.0, 15.0, 15.0, 15.0, 15.0, 15.0, 14.0, 14.0, 14.0, 14.0, 13.0, 13.0, 13.0, 13.0, 13.0, 13.0,
13.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0,
15.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 17.0, 17.0, 17.0, 17.0, 17.0, 17.0, 17.0,
17.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0,
19.0, 20.0, 20.0, 20.0, 20.0, 20.0 };
double[] data = new double[] { 13.0, 20.0, 26.0, 18.0 };
int shingleSize = 8;
int numberOfTrees = 30;
int sampleSize = 256;
Precision precision = Precision.FLOAT_32;
int baseDimensions = 1;
long seed = -3095522926185205814L;
int dimensions = baseDimensions * shingleSize;
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.randomSeed(seed).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.precision(precision).parallelExecutionEnabled(false).outputAfter(32).internalShinglingEnabled(true)
.anomalyRate(0.005).initialAcceptFraction(0.125).timeDecay(0.0001).boundingBoxCacheFraction(0)
.forestMode(ForestMode.STANDARD).build();
double scoreSum = 0;
for (double dataPoint : initialData) {
AnomalyDescriptor result = forest.process(new double[] { dataPoint }, 0L);
scoreSum += result.getRCFScore();
}
// checking average score < 1
assert (scoreSum < initialData.length);
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
ThresholdedRandomCutForest second = mapper.toModel(mapper.toState(forest));
for (double dataPoint : data) {
AnomalyDescriptor result = second.process(new double[] { dataPoint }, 0L);
// average score jumps due to discontinuity, checking > 1
assert (result.getRCFScore() > 1.0);
}
}
@ParameterizedTest
@ValueSource(ints = { 1, 2, 3, 4, 5, 6 })
void smallGap(int gap) {
int shingleSize = 4;
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_32;
int dataSize = 4 * sampleSize;
// change this to try different number of attributes,
// this parameter is not expected to be larger than 5 for this example
int baseDimensions = 1;
// 10 trials each
int numTrials = 10;
int correct = 0;
for (int z = 0; z < numTrials; z++) {
int dimensions = baseDimensions * shingleSize;
TransformMethod transformMethod = TransformMethod.NORMALIZE;
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)
.dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)
.sampleSize(sampleSize).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STANDARD)
.transformMethod(transformMethod).build();
long seed = new Random().nextLong();
System.out.println("seed = " + seed);
Random rng = new Random(seed);
for (int i = 0; i < dataSize; i++) {
double[] point = new double[] { 0.6 + 0.2 * (2 * rng.nextDouble() - 1) };
AnomalyDescriptor result = forest.process(point, 0L);
}
AnomalyDescriptor result = forest.process(new double[] { 11.2 }, 0L);
for (int y = 0; y < gap; y++) {
result = forest.process(new double[] { 0.6 + 0.2 * (2 * rng.nextDouble() - 1) }, 0L);
}
result = forest.process(new double[] { 10.0 }, 0L);
if (result.getAnomalyGrade() > 0) {
++correct;
}
;
}
assert (correct > 0.9 * numTrials);
}
}
| 557 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/TransformTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Random;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
public class TransformTest {
@ParameterizedTest
@EnumSource(TransformMethod.class)
public void AnomalyTest(TransformMethod method) {
int sampleSize = 256;
long seed = new Random().nextLong();
System.out.println(" seed " + seed);
Random rng = new Random(seed);
int numTrials = 10;
int length = 40 * sampleSize;
int totalcount = 0;
for (int i = 0; i < numTrials; i++) {
int numberOfTrees = 30 + rng.nextInt(20);
int outputAfter = 32 + rng.nextInt(50);
// shingleSize 1 is not recommended for complicated input
int shingleSize = 1 + rng.nextInt(15);
int baseDimensions = 1 + rng.nextInt(5);
int dimensions = baseDimensions * shingleSize;
long forestSeed = rng.nextLong();
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)
.numberOfTrees(numberOfTrees).randomSeed(forestSeed).outputAfter(outputAfter).alertOnce(true)
.transformMethod(method).internalShinglingEnabled(true).shingleSize(shingleSize).build();
int count = 0;
double[] point = new double[baseDimensions];
double[] anomalyPoint = new double[baseDimensions];
for (int j = 0; j < baseDimensions; j++) {
point[j] = 50 - rng.nextInt(100);
int sign = (rng.nextDouble() < 0.5) ? -1 : 1;
anomalyPoint[j] = point[j] + sign * (10 - rng.nextInt(5));
}
int anomalyAt = outputAfter + rng.nextInt(length / 2);
for (int j = 0; j < anomalyAt; j++) {
AnomalyDescriptor firstResult = first.process(point, 0L);
if (firstResult.getAnomalyGrade() > 0) {
++count;
}
}
assertEquals(0, count);
assertTrue(first.process(anomalyPoint, 0L).getAnomalyGrade() > 0);
for (int j = anomalyAt + 1; j < length; j++) {
AnomalyDescriptor firstResult = first.process(point, 0L);
if (firstResult.getAnomalyGrade() > 0) {
++count;
}
}
// differencing introduces cascades
totalcount += count;
}
assert (totalcount < numTrials || method == TransformMethod.DIFFERENCE
|| method == TransformMethod.NORMALIZE_DIFFERENCE);
}
@ParameterizedTest
@EnumSource(value = TransformMethod.class, names = { "NONE", "NORMALIZE" })
public void AnomalyTestSine1D(TransformMethod method) {
int sampleSize = 256;
long seed = new Random().nextLong();
System.out.println(" seed " + seed);
Random rng = new Random(seed);
int numTrials = 200;
int length = 4 * sampleSize;
int found = 0;
int count = 0;
double grade = 0;
for (int i = 0; i < numTrials; i++) {
int numberOfTrees = 30 + rng.nextInt(20);
int outputAfter = 32 + rng.nextInt(50);
int shingleSize = 8;
int baseDimensions = 1; // multiple dimensions would have anti-correlations induced by
// differring periods
int dimensions = baseDimensions * shingleSize;
long forestSeed = rng.nextLong();
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)
.numberOfTrees(numberOfTrees).randomSeed(forestSeed).outputAfter(outputAfter)
.transformMethod(method).internalShinglingEnabled(true).shingleSize(shingleSize).build();
double[][] data = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 0, rng.nextLong(),
baseDimensions, 0, false).data;
int anomalyAt = outputAfter + rng.nextInt(length / 2);
for (int j = 0; j < baseDimensions; j++) {
int sign = (rng.nextDouble() < 0.5) ? -1 : 1;
// large obvious spike
data[anomalyAt][j] += sign * 100;
}
for (int j = 0; j < length; j++) {
AnomalyDescriptor firstResult = first.process(data[j], 0L);
if (firstResult.getAnomalyGrade() > 0) {
// detection can be late
if (j + firstResult.getRelativeIndex() == anomalyAt) {
++found;
}
++count;
grade += firstResult.getAnomalyGrade();
}
}
}
// catch anomalies 80% of the time
assertTrue(found > 0.8 * numTrials);
// precision is not terrible
assertTrue(count < 2 * numTrials);
// average grade is closer to found
assertTrue(grade < 1.5 * numTrials);
}
@ParameterizedTest
@EnumSource(value = TransformMethod.class, names = { "NONE", "NORMALIZE" })
public void StreamingImputeTest(TransformMethod method) {
int sampleSize = 256;
long seed = new Random().nextLong();
System.out.println(" seed " + seed);
Random rng = new Random(seed);
int numTrials = 10;
int length = 40 * sampleSize;
for (int i = 0; i < numTrials; i++) {
int numberOfTrees = 30 + rng.nextInt(20);
int outputAfter = 32 + rng.nextInt(50);
// shingleSize 1 is not recommended for complicated input
int shingleSize = 2 + rng.nextInt(15);
int baseDimensions = 2 + rng.nextInt(5);
int dimensions = baseDimensions * shingleSize;
long forestSeed = rng.nextLong();
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)
.numberOfTrees(numberOfTrees).randomSeed(forestSeed).outputAfter(outputAfter).alertOnce(true)
// .forestMode(ForestMode.STREAMING_IMPUTE)
.transformMethod(method).internalShinglingEnabled(true).shingleSize(shingleSize).build();
int count = 0;
double[] point = new double[baseDimensions];
double[] anomalyPoint = new double[baseDimensions];
for (int j = 0; j < baseDimensions; j++) {
point[j] = 50 - rng.nextInt(100);
int sign = (rng.nextDouble() < 0.5) ? -1 : 1;
anomalyPoint[j] = point[j] + sign * (10 - rng.nextInt(5));
}
int anomalyAt = outputAfter + rng.nextInt(length / 2);
for (int j = 0; j < anomalyAt; j++) {
AnomalyDescriptor firstResult = first.process(point, 0L);
if (firstResult.getAnomalyGrade() > 0) {
++count;
}
}
assertEquals(0, count);
int[] missing = null;
if (rng.nextDouble() < 0.25) {
missing = new int[] { 0 };
} else if (rng.nextDouble() < 0.33) {
missing = new int[] { 1 };
}
// anomaly detection with partial information
assertTrue(first.process(anomalyPoint, 0L, missing).getAnomalyGrade() > 0);
for (int j = anomalyAt + 1; j < length; j++) {
missing = null;
if (rng.nextDouble() < 0.05) {
missing = new int[] { 0 };
} else if (rng.nextDouble() < 0.05) {
missing = new int[] { 1 };
}
AnomalyDescriptor firstResult = first.process(point, 0L, missing);
if (firstResult.getAnomalyGrade() > 0) {
++count;
}
}
assert (count < shingleSize);
}
}
}
| 558 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/ConsistencyTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys.generateShingledData;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Arrays;
import java.util.Random;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.state.ThresholdedRandomCutForestMapper;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
@Tag("functional")
public class ConsistencyTest {
@Test
public void InternalShinglingTest() {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 4;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
int numTrials = 1; // just once since testing exact equality
int length = 40 * sampleSize;
for (int i = 0; i < numTrials; i++) {
RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).internalShinglingEnabled(true).shingleSize(shingleSize)
.randomSeed(seed).build();
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01).build();
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,
seed + i, baseDimensions);
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
assertEquals(firstResult.getRCFScore(), forest.getAnomalyScore(point), 1e-10);
forest.update(point);
}
}
}
@Test
public void ExternalShinglingTest() {
int sampleSize = 256;
int baseDimensions = 1;
int shingleSize = 4;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
int numTrials = 1; // just once since testing exact equality
int length = 400 * sampleSize;
for (int i = 0; i < numTrials; i++) {
RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).internalShinglingEnabled(false).shingleSize(shingleSize)
.randomSeed(seed).build();
RandomCutForest copyForest = RandomCutForest.builder().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).internalShinglingEnabled(false).shingleSize(1).randomSeed(seed)
.build();
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.internalShinglingEnabled(false).shingleSize(shingleSize).anomalyRate(0.01).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.internalShinglingEnabled(false).shingleSize(1).anomalyRate(0.01).build();
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.generateShingledDataWithKey(length, 50,
shingleSize, baseDimensions, seed);
int gradeDifference = 0;
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
assertEquals(firstResult.getRCFScore(), forest.getAnomalyScore(point), 1e-10);
assertEquals(firstResult.getRCFScore(), copyForest.getAnomalyScore(point), 1e-10);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
if ((firstResult.getAnomalyGrade() > 0) != (secondResult.getAnomalyGrade() > 0)) {
++gradeDifference;
// thresholded random cut forest uses shingle size in the corrector step
// this is supposed to be different
}
forest.update(point);
copyForest.update(point);
}
assertTrue(gradeDifference > 0);
}
}
@Test
public void MixedShinglingTest() {
int sampleSize = 256;
int baseDimensions = 1;
int shingleSize = 4;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
System.out.println(seed);
Random rng = new Random(seed);
int numTrials = 5; // test is exact equality, reducing the number of trials
int numberOfTrees = 30; // and using fewer trees to speed up test
int length = 40 * sampleSize;
int testLength = length;
for (int i = 0; i < numTrials; i++) {
long newSeed = rng.nextLong();
int outputAfter = rng.nextInt(sampleSize * 10) + 1;
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(newSeed)
.numberOfTrees(numberOfTrees).internalShinglingEnabled(true)
// increasing outputAfter for internal shingling
.outputAfter(outputAfter + shingleSize - 1).shingleSize(shingleSize).anomalyRate(0.01).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(newSeed)
.numberOfTrees(numberOfTrees).internalShinglingEnabled(false).outputAfter(outputAfter)
.shingleSize(shingleSize).anomalyRate(0.01).build();
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length + testLength, 50,
100, 5, newSeed + i, baseDimensions);
double[][] shingledData = generateShingledData(dataWithKeys.data, shingleSize, baseDimensions, false);
assertEquals(shingledData.length, dataWithKeys.data.length - shingleSize + 1);
int count = shingleSize - 1;
// insert initial points
for (int j = 0; j < shingleSize - 1; j++) {
first.process(dataWithKeys.data[j], 0L);
}
for (int j = 0; j < length; j++) {
// validate equality of points
for (int y = 0; y < baseDimensions; y++) {
assertEquals(dataWithKeys.data[count][y], shingledData[j][(shingleSize - 1) * baseDimensions + y],
1e-10);
}
AnomalyDescriptor firstResult = first.process(dataWithKeys.data[count], 0L);
++count;
AnomalyDescriptor secondResult = second.process(shingledData[j], 0L);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
// grades will not match
}
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
ThresholdedRandomCutForest fourth = mapper.toModel(mapper.toState(first));
for (int j = length; j < shingledData.length; j++) {
// validate eaulity of points
for (int y = 0; y < baseDimensions; y++) {
assertEquals(dataWithKeys.data[count][y], shingledData[j][(shingleSize - 1) * baseDimensions + y],
1e-10);
}
AnomalyDescriptor firstResult = first.process(dataWithKeys.data[count], 0L);
AnomalyDescriptor secondResult = second.process(shingledData[j], 0L);
AnomalyDescriptor fourthResult = fourth.process(dataWithKeys.data[count], 0L);
++count;
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), fourthResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getAnomalyGrade(), fourthResult.getAnomalyGrade(), 1e-10);
}
}
}
@ParameterizedTest
@EnumSource(TransformMethod.class)
public void TimeAugmentedTest(TransformMethod transformMethod) {
int sampleSize = 256;
int baseDimensions = 1;
int shingleSize = 4;
int dimensions = baseDimensions * shingleSize;
int numTrials = 1; // test is exact equality, reducing the number of trials
int numberOfTrees = 30; // and using fewer trees to speed up test
int length = 10 * sampleSize;
int dataSize = 2 * length;
for (int i = 0; i < numTrials; i++) {
Precision precision = Precision.FLOAT_32;
long seed = new Random().nextLong();
System.out.println("seed = " + seed);
ThresholdedRandomCutForest first = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.internalShinglingEnabled(true).precision(precision).anomalyRate(0.01)
.forestMode(ForestMode.STANDARD).weightTime(0).transformMethod(transformMethod).normalizeTime(true)
.outputAfter(32).initialAcceptFraction(0.125).build();
ThresholdedRandomCutForest second = ThresholdedRandomCutForest.builder().compact(true)
.dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)
.sampleSize(sampleSize).internalShinglingEnabled(true).precision(precision).anomalyRate(0.01)
.forestMode(ForestMode.TIME_AUGMENTED).weightTime(0).transformMethod(transformMethod)
.normalizeTime(true).outputAfter(32).initialAcceptFraction(0.125).build();
// ensuring that the parameters are the same; otherwise the grades/scores cannot
// be the same
// weighTime has to be 0 in the above
first.setLowerThreshold(1.1);
second.setLowerThreshold(1.1);
first.setHorizon(0.75);
second.setHorizon(0.75);
Random noise = new Random(0);
// change the last argument seed for a different run
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1,
50, 100, 5, seed, baseDimensions);
int count = 0;
for (int j = 0; j < length; j++) {
long timestamp = 100 * count + noise.nextInt(10) - 5;
AnomalyDescriptor result = first.process(dataWithKeys.data[j], timestamp);
AnomalyDescriptor test = second.process(dataWithKeys.data[j], timestamp);
assertEquals(result.getRCFScore(), test.getRCFScore(), 1e-10);
// grade will not be the same because dimension changes
++count;
}
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));
for (int j = length; j < 2 * length; j++) {
// can be a different gap
long timestamp = 150 * count + noise.nextInt(10) - 5;
AnomalyDescriptor firstResult = first.process(dataWithKeys.data[count], timestamp);
AnomalyDescriptor secondResult = second.process(dataWithKeys.data[count], timestamp);
AnomalyDescriptor thirdResult = third.process(dataWithKeys.data[count], timestamp);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);
assertEquals(secondResult.getAnomalyGrade(), thirdResult.getAnomalyGrade(), 1e-10);
}
}
}
// streaming impute changes normalizations
@ParameterizedTest
@EnumSource(TransformMethod.class)
public void ImputeTest(TransformMethod transformMethod) {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 4;
int dimensions = baseDimensions * shingleSize;
int numTrials = 1; // test is exact equality, reducing the number of trials
int numberOfTrees = 30; // and using fewer trees to speed up test
int length = 10 * sampleSize;
int dataSize = 2 * length;
for (int i = 0; i < numTrials; i++) {
Precision precision = Precision.FLOAT_32;
long seed = new Random().nextLong();
System.out.println("seed = " + seed);
Random rng = new Random(seed);
double[] weights = new double[baseDimensions];
Arrays.fill(weights, 1.0);
int startNormalization = 10;
int outputAfter = startNormalization + shingleSize;
long newSeed = rng.nextLong();
ThresholdedRandomCutForest first = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.randomSeed(newSeed).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.internalShinglingEnabled(true).precision(precision).anomalyRate(0.01)
.forestMode(ForestMode.STANDARD).weightTime(0).transformMethod(transformMethod).normalizeTime(true)
.startNormalization(startNormalization).outputAfter(outputAfter).initialAcceptFraction(0.125)
.weights(weights).build();
ThresholdedRandomCutForest second = ThresholdedRandomCutForest.builder().compact(true)
.dimensions(dimensions).randomSeed(newSeed).numberOfTrees(numberOfTrees).shingleSize(shingleSize)
.sampleSize(sampleSize).internalShinglingEnabled(true).precision(precision).anomalyRate(0.01)
.forestMode(ForestMode.STREAMING_IMPUTE).weightTime(0).transformMethod(transformMethod)
.startNormalization(startNormalization).normalizeTime(true).outputAfter(outputAfter)
.initialAcceptFraction(0.125).weights(weights).build();
Random noise = new Random(0);
// change the last argument seed for a different run
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1,
50, 100, 5, seed, baseDimensions);
for (int j = 0; j < length; j++) {
// gap has to be asymptotically same
long timestamp = 100 * j + 0 * noise.nextInt(10) - 5;
AnomalyDescriptor result = first.process(dataWithKeys.data[j], 0L);
AnomalyDescriptor test = second.process(dataWithKeys.data[j], timestamp);
assertEquals(result.getRCFScore(), test.getRCFScore(), 1e-6);
}
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));
for (int j = length; j < 2 * length; j++) {
// has to be the same gap
long timestamp = 100 * j + noise.nextInt(10) - 5;
AnomalyDescriptor firstResult = first.process(dataWithKeys.data[j], 0L);
AnomalyDescriptor thirdResult = third.process(dataWithKeys.data[j], timestamp);
assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-6);
}
}
}
}
| 559 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/AnomalyDescriptorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Random;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.config.ScoringStrategy;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
public class AnomalyDescriptorTest {
@ParameterizedTest
@EnumSource(ScoringStrategy.class)
public void PastValuesTest(ScoringStrategy strategy) {
int sampleSize = 256;
long seed = new Random().nextLong();
System.out.println(" seed " + seed);
Random rng = new Random(seed);
int numTrials = 10; // just once since testing exact equality
int length = 40 * sampleSize;
for (int i = 0; i < numTrials; i++) {
int outputAfter = 2 + 1;
int shingleSize = 1 + rng.nextInt(15);
int baseDimensions = 1 + rng.nextInt(5);
int dimensions = baseDimensions * shingleSize;
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(rng.nextLong())
.outputAfter(outputAfter).scoringStrategy(strategy).internalShinglingEnabled(true)
.shingleSize(shingleSize).build();
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,
rng.nextLong(), baseDimensions);
int count = 0;
for (double[] point : dataWithKeys.data) {
if (count == 82) {
point[0] += 10000; // introducing an anomaly
}
AnomalyDescriptor firstResult = first.process(point, 0L);
assertArrayEquals(firstResult.getCurrentInput(), point, 1e-6);
assertEquals(firstResult.scoringStrategy, strategy);
if (count < outputAfter || count < shingleSize) {
assertEquals(firstResult.getRCFScore(), 0);
} else {
// distances can be 0
assertTrue(strategy == ScoringStrategy.DISTANCE || firstResult.getRCFScore() > 0);
assertTrue(strategy == ScoringStrategy.DISTANCE || firstResult.threshold > 0);
assertEquals(firstResult.getScale().length, baseDimensions);
assertEquals(firstResult.getShift().length, baseDimensions);
assertTrue(firstResult.getRelativeIndex() <= 0);
if (count == 82 && strategy != ScoringStrategy.DISTANCE) {
// because distances are 0 till sampleSize; by which time
// forecasts would be reasonable
assertTrue(firstResult.getAnomalyGrade() > 0);
}
if (firstResult.getAnomalyGrade() > 0) {
assertNotNull(firstResult.getPastValues());
assertEquals(firstResult.getPastValues().length, baseDimensions);
if (firstResult.getRelativeIndex() == 0) {
assertArrayEquals(firstResult.getPastValues(), firstResult.getCurrentInput(), 1e-10);
}
assertNotNull(firstResult.getRelevantAttribution());
assertEquals(firstResult.getRelevantAttribution().length, baseDimensions);
assertEquals(firstResult.attribution.getHighLowSum(), firstResult.getRCFScore(), 1e-6);
// the reverse of this condition need not be true -- the predictor corrector
// often may declare grade 0 even when score is greater than threshold, to
// account for shingling and initial results that populate the thresholder
assertTrue(strategy == ScoringStrategy.MULTI_MODE_RECALL
|| firstResult.getRCFScore() >= firstResult.getThreshold());
} else {
assertTrue(firstResult.getRelativeIndex() == 0);
}
}
++count;
}
}
}
@ParameterizedTest
@EnumSource(ScoringStrategy.class)
public void TimeAugmentedTest(ScoringStrategy strategy) {
int sampleSize = 256;
long seed = new Random().nextLong();
System.out.println(" seed " + seed);
Random rng = new Random(seed);
int numTrials = 10; // just once since testing exact equality
int length = 40 * sampleSize;
for (int i = 0; i < numTrials; i++) {
int outputAfter = 2 + 1;
int shingleSize = 1 + rng.nextInt(15);
int baseDimensions = 1 + rng.nextInt(5);
int dimensions = baseDimensions * shingleSize;
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(rng.nextLong())
.outputAfter(outputAfter).forestMode(ForestMode.TIME_AUGMENTED).scoringStrategy(strategy)
.internalShinglingEnabled(true).shingleSize(shingleSize).build();
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,
rng.nextLong(), baseDimensions);
int count = 0;
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
assertArrayEquals(firstResult.getCurrentInput(), point, 1e-6);
assertEquals(firstResult.scoringStrategy, strategy);
if (count < outputAfter || count < shingleSize) {
assertEquals(firstResult.getRCFScore(), 0);
} else {
// distances can be 0
assertTrue(strategy == ScoringStrategy.DISTANCE || firstResult.getRCFScore() > 0);
assertTrue(strategy == ScoringStrategy.DISTANCE || firstResult.threshold > 0);
assertEquals(firstResult.getScale().length, baseDimensions + 1);
assertEquals(firstResult.getShift().length, baseDimensions + 1);
assertTrue(firstResult.getRelativeIndex() <= 0);
if (firstResult.getAnomalyGrade() > 0) {
assertNotNull(firstResult.getPastValues());
assertEquals(firstResult.getPastValues().length, baseDimensions);
if (firstResult.getRelativeIndex() == 0) {
assertArrayEquals(firstResult.getPastValues(), firstResult.getCurrentInput(), 1e-10);
}
assertEquals(firstResult.attribution.getHighLowSum(), firstResult.getRCFScore(), 1e-6);
assertNotNull(firstResult.getRelevantAttribution());
assertEquals(firstResult.getRelevantAttribution().length, baseDimensions);
assertTrue(strategy == ScoringStrategy.MULTI_MODE_RECALL
|| firstResult.getRCFScore() >= firstResult.getThreshold());
}
}
++count;
}
}
}
}
| 560 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/PredictorCorrectorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.config.TransformMethod.NORMALIZE;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.Arrays;
import java.util.Random;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.config.ScoringStrategy;
import com.amazon.randomcutforest.parkservices.state.predictorcorrector.PredictorCorrectorMapper;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
import com.amazon.randomcutforest.returntypes.DiVector;
public class PredictorCorrectorTest {
@Test
void AttributorTest() {
int sampleSize = 256;
int baseDimensions = 10;
int shingleSize = 10;
int dimensions = baseDimensions * shingleSize;
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(0L).forestMode(ForestMode.STANDARD).shingleSize(shingleSize)
.anomalyRate(0.01).transformMethod(NORMALIZE).build();
DiVector test = new DiVector(baseDimensions * shingleSize);
assert (forest.predictorCorrector.getExpectedPoint(test, 0, baseDimensions, null, null) == null);
assertThrows(IllegalArgumentException.class, () -> forest.predictorCorrector.setNumberOfAttributors(-1));
forest.predictorCorrector.setNumberOfAttributors(baseDimensions);
assertThrows(NullPointerException.class,
() -> forest.predictorCorrector.getExpectedPoint(test, 0, baseDimensions, null, null));
double[] array = new double[20];
Arrays.fill(array, 1.0);
DiVector testTwo = new DiVector(array, array);
assertThrows(NullPointerException.class,
() -> forest.predictorCorrector.getExpectedPoint(test, 0, baseDimensions, null, null));
}
@Test
void configTest() {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 10;
int dimensions = baseDimensions * shingleSize;
double[] testOne = new double[] { new Random().nextDouble(), new Random().nextDouble() };
double[] testTwo = new double[] { new Random().nextDouble(), new Random().nextDouble() };
double[] testThree = new double[] { new Random().nextDouble(), new Random().nextDouble() };
double[] testFour = new double[] { new Random().nextDouble(), new Random().nextDouble() };
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(0L).forestMode(ForestMode.STANDARD).shingleSize(shingleSize)
.anomalyRate(0.01).scoringStrategy(ScoringStrategy.DISTANCE).transformMethod(NORMALIZE).randomSeed(1110)
.autoAdjust(true).ignoreNearExpectedFromAbove(testOne).ignoreNearExpectedFromBelow(testTwo)
.ignoreNearExpectedFromAboveByRatio(testThree).ignoreNearExpectedFromBelowByRatio(testFour).build();
PredictorCorrector predictorCorrector = forest.getPredictorCorrector();
double[] test = new double[1];
assertThrows(IllegalArgumentException.class, () -> predictorCorrector.setIgnoreNearExpected(test));
assertDoesNotThrow(() -> predictorCorrector.setIgnoreNearExpected(null));
assertArrayEquals(predictorCorrector.ignoreNearExpectedFromAbove, testOne, 1e-10);
assertArrayEquals(predictorCorrector.ignoreNearExpectedFromBelow, testTwo, 1e-10);
assertArrayEquals(predictorCorrector.ignoreNearExpectedFromAboveByRatio, testThree, 1e-10);
assertArrayEquals(predictorCorrector.ignoreNearExpectedFromBelowByRatio, testFour, 1e-10);
assertNotNull(predictorCorrector.getDeviations());
assertEquals(predictorCorrector.lastStrategy, ScoringStrategy.DISTANCE);
PredictorCorrectorMapper mapper = new PredictorCorrectorMapper();
PredictorCorrector copy = mapper.toModel(mapper.toState(predictorCorrector));
assertArrayEquals(copy.ignoreNearExpectedFromAbove, testOne, 1e-10);
assertArrayEquals(copy.ignoreNearExpectedFromBelow, testTwo, 1e-10);
assertArrayEquals(copy.ignoreNearExpectedFromAboveByRatio, testThree, 1e-10);
assertArrayEquals(copy.ignoreNearExpectedFromBelowByRatio, testFour, 1e-10);
assertNotNull(copy.getDeviations());
assertEquals(copy.lastStrategy, ScoringStrategy.DISTANCE);
copy.deviationsActual = new Deviation[1]; // changing the state
IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> copy.getDeviations());
assertEquals("incorrect state", exception.getMessage());
copy.deviationsExpected = new Deviation[1];
exception = assertThrows(IllegalArgumentException.class, () -> copy.getDeviations());
assertEquals("length should be base dimension", exception.getMessage());
double[] another = new double[4 * baseDimensions];
assertDoesNotThrow(() -> predictorCorrector.setIgnoreNearExpected(another));
assertArrayEquals(predictorCorrector.ignoreNearExpectedFromAbove, new double[2]);
assertArrayEquals(predictorCorrector.ignoreNearExpectedFromBelow, new double[2]);
assertArrayEquals(predictorCorrector.ignoreNearExpectedFromAboveByRatio, new double[2]);
assertArrayEquals(predictorCorrector.ignoreNearExpectedFromBelowByRatio, new double[2]);
another[0] = -1;
assertThrows(IllegalArgumentException.class, () -> predictorCorrector.setIgnoreNearExpected(another));
predictorCorrector.setIgnoreNearExpectedFromAbove(testOne);
predictorCorrector.setIgnoreNearExpectedFromBelow(testTwo);
predictorCorrector.setIgnoreNearExpectedFromAboveByRatio(testThree);
predictorCorrector.setIgnoreNearExpectedFromBelowByRatio(testFour);
assertArrayEquals(predictorCorrector.ignoreNearExpectedFromAbove, testOne, 1e-10);
assertArrayEquals(predictorCorrector.ignoreNearExpectedFromBelow, testTwo, 1e-10);
assertArrayEquals(predictorCorrector.ignoreNearExpectedFromAboveByRatio, testThree, 1e-10);
assertArrayEquals(predictorCorrector.ignoreNearExpectedFromBelowByRatio, testFour, 1e-10);
Random testRandom = new Random(1110L);
assertEquals(predictorCorrector.getRandomSeed(), 1110L);
double nextDouble = predictorCorrector.nextDouble();
assertEquals(predictorCorrector.getRandomSeed(), testRandom.nextLong());
assertEquals(nextDouble, testRandom.nextDouble(), 1e-10);
}
}
| 561 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/TestGlobalLocalAnomalyDetector.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.toDoubleArray;
import static com.amazon.randomcutforest.CommonUtils.toFloatArray;
import static com.amazon.randomcutforest.testutils.ExampleDataSets.rotateClockWise;
import static java.lang.Math.PI;
import static java.lang.Math.min;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Random;
import java.util.function.BiFunction;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.config.ScoringStrategy;
import com.amazon.randomcutforest.parkservices.returntypes.GenericAnomalyDescriptor;
import com.amazon.randomcutforest.summarization.Summarizer;
import com.amazon.randomcutforest.testutils.NormalMixtureTestData;
import com.amazon.randomcutforest.util.Weighted;
public class TestGlobalLocalAnomalyDetector {
@Test
void testDynamicStringClustering() {
long seed = new Random().nextLong();
System.out.println("String summarization seed : " + seed);
Random random = new Random(seed);
int stringSize = 70;
int numberOfStrings = 200000;
int reservoirSize = 2000;
boolean changeInMiddle = true;
// the following should be away from 0.5 in [0.5,1]
double gapProbOfA = 0.85;
double anomalyRate = 0.05;
char[][] points = new char[numberOfStrings][];
boolean[] injected = new boolean[numberOfStrings];
int numberOfInjected = 0;
for (int i = 0; i < numberOfStrings; i++) {
if (random.nextDouble() < anomalyRate && i > reservoirSize / 2) {
injected[i] = true;
++numberOfInjected;
points[i] = getABArray(stringSize + 10, 0.5, random, false, 0);
} else {
boolean flag = changeInMiddle && random.nextDouble() < 0.25;
double prob = (random.nextDouble() < 0.5) ? gapProbOfA : (1 - gapProbOfA);
points[i] = getABArray(stringSize, prob, random, flag, 0.25 * i / numberOfStrings);
}
}
System.out.println("Injected " + numberOfInjected + " 'anomalies' in " + points.length);
int recluster = reservoirSize / 2;
BiFunction<char[], char[], Double> dist = (a, b) -> toyD(a, b, stringSize / 2.0);
GlobalLocalAnomalyDetector<char[]> reservoir = GlobalLocalAnomalyDetector.builder().randomSeed(42)
.numberOfRepresentatives(5).timeDecay(1.0 / reservoirSize).capacity(reservoirSize).build();
reservoir.setGlobalDistance(dist);
int truePos = 0;
int falsePos = 0;
int falseNeg = 0;
for (int y = 0; y < points.length; y++) {
if (y % 200 == 100 && y > reservoirSize) {
char[] temp = points[y];
// check for malformed distance function, to the extent we can check efficiently
BiFunction<char[], char[], Double> badDistance = (a, b) -> -1.0;
assertThrows(IllegalArgumentException.class, () -> {
reservoir.process(temp, 1.0f, badDistance, true);
});
}
GenericAnomalyDescriptor<char[]> result = reservoir.process(points[y], 1.0f, null, true);
if (result.getRepresentativeList() != null) {
double sum = 0;
for (Weighted<char[]> rep : result.getRepresentativeList()) {
assert (rep.weight <= 1.0);
sum += rep.weight;
}
// checking likelihood summing to 1
assertEquals(sum, 1.0, 1e-6);
}
if (result.getAnomalyGrade() > 0) {
if (!injected[y]) {
++falsePos;
} else {
++truePos;
}
} else if (injected[y]) {
++falseNeg;
}
if (10 * y % points.length == 0 && y > 0) {
System.out.println(" at " + y);
System.out.println("Precision = " + precision(truePos, falsePos));
System.out.println("Recall = " + recall(truePos, falseNeg));
}
}
System.out.println(" Final: ");
System.out.println("Precision = " + precision(truePos, falsePos));
System.out.println("Recall = " + recall(truePos, falseNeg));
}
public static double toyD(char[] a, char[] b, double u) {
if (a.length > b.length) {
return toyD(b, a, u);
}
double[][] dist = new double[2][b.length + 1];
for (int j = 0; j < b.length + 1; j++) {
dist[0][j] = j;
}
for (int i = 1; i < a.length + 1; i++) {
dist[1][0] = i;
for (int j = 1; j < b.length + 1; j++) {
double t = dist[0][j - 1] + ((a[i - 1] == b[j - 1]) ? 0 : 1);
dist[1][j] = min(min(t, dist[0][j] + 1), dist[1][j - 1] + 1);
}
for (int j = 0; j < b.length + 1; j++) {
dist[0][j] = dist[1][j];
}
}
return dist[1][b.length];
}
// colors
public static final String ANSI_RESET = "\u001B[0m";
public static final String ANSI_RED = "\u001B[31m";
public static final String ANSI_BLUE = "\u001B[34m";
public char[] getABArray(int size, double probabilityOfA, Random random, Boolean changeInMiddle, double fraction) {
int newSize = size + random.nextInt(size / 5);
char[] a = new char[newSize];
for (int i = 0; i < newSize; i++) {
double toss = (changeInMiddle && (i > (1 - fraction) * newSize || i < newSize * fraction))
? (1 - probabilityOfA)
: probabilityOfA;
if (random.nextDouble() < toss) {
a[i] = '-';
} else {
a[i] = '_';
}
}
return a;
}
public double[][] shiftedEllipse(int dataSize, int seed, double shift, int fans) {
NormalMixtureTestData generator = new NormalMixtureTestData(0.0, 1.0, 0.0, 1.0, 0.0, 1.0);
double[][] data = generator.generateTestData(dataSize, 2, seed);
Random prg = new Random(0);
for (int i = 0; i < dataSize; i++) {
int nextFan = prg.nextInt(fans);
// scale
data[i][1] *= 1.0 / fans;
data[i][0] *= 2.0;
// shift
data[i][0] += shift + 1.0 / fans;
data[i] = rotateClockWise(data[i], 2 * PI * nextFan / fans);
}
return data;
}
@Test
void testDynamicNumericClustering() throws IOException {
long randomSeed = new Random().nextLong();
System.out.println("Seed " + randomSeed);
// we would be sending dataSize * 360 vectors
int dataSize = 2000;
double range = 10.0;
int numberOfFans = 3;
// corresponds to number of clusters
double[][] data = shiftedEllipse(dataSize, 7, range / 2, numberOfFans);
int truePos = 0;
int falsePos = 0;
int falseNeg = 0;
int truePosRCF = 0;
int falsePosRCF = 0;
int falseNegRCF = 0;
int reservoirSize = dataSize;
double timedecay = 1.0 / reservoirSize;
GlobalLocalAnomalyDetector<float[]> reservoir = GlobalLocalAnomalyDetector.builder().randomSeed(42)
.numberOfRepresentatives(3).timeDecay(timedecay).capacity(reservoirSize).build();
reservoir.setGlobalDistance(Summarizer::L2distance);
double zFactor = 6.0; // six sigma deviation; seems to work best
reservoir.setZfactor(zFactor);
ThresholdedRandomCutForest test = ThresholdedRandomCutForest.builder().dimensions(2).shingleSize(1)
.randomSeed(77).timeDecay(timedecay).scoringStrategy(ScoringStrategy.DISTANCE).build();
test.setZfactor(zFactor); // using the same apples to apples comparison
String name = "clustering_example";
BufferedWriter file = new BufferedWriter(new FileWriter(name));
Random noiseGen = new Random(randomSeed + 1);
for (int degree = 0; degree < 360; degree += 1) {
int index = 0;
while (index < data.length) {
boolean injected = false;
float[] vec;
if (noiseGen.nextDouble() < 0.005) {
injected = true;
double[] candAnomaly = new double[2];
// generate points along x axis
candAnomaly[0] = (range / 2 * noiseGen.nextDouble() + range / 2);
candAnomaly[1] = 0.1 * (2.0 * noiseGen.nextDouble() - 1.0);
int antiFan = noiseGen.nextInt(numberOfFans);
// rotate to be 90-180 degrees away -- these are decidedly anomalous
vec = toFloatArray(rotateClockWise(candAnomaly,
-2 * PI * (degree + 180 * (1 + 2 * antiFan) / numberOfFans) / 360));
} else {
vec = toFloatArray(rotateClockWise(data[index], -2 * PI * degree / 360));
++index;
}
GenericAnomalyDescriptor<float[]> result = reservoir.process(vec, 1.0f, null, true);
AnomalyDescriptor res = test.process(toDoubleArray(vec), 0L);
double grade = res.getAnomalyGrade();
if (result.getRepresentativeList() != null) {
double sum = 0;
for (Weighted<float[]> rep : result.getRepresentativeList()) {
assert (rep.weight <= 1.0);
sum += rep.weight;
}
// checking likelihood summing to 1
assertEquals(sum, 1.0, 1e-6);
}
if (injected) {
if (result.getAnomalyGrade() > 0) {
++truePos;
} else {
++falseNeg;
}
if (grade > 0) {
++truePosRCF;
assert (res.attribution != null);
// even though scoring is different, we should see attribution add up to score
assertEquals(res.attribution.getHighLowSum(), res.getRCFScore(), 1e-6);
} else {
++falseNegRCF;
}
} else {
if (result.getAnomalyGrade() > 0) {
++falsePos;
}
if (grade > 0) {
++falsePosRCF;
assert (res.attribution != null);
// even though scoring is different, we should see attribution add up to score
assertEquals(res.attribution.getHighLowSum(), res.getRCFScore(), 1e-6);
}
}
}
if (falsePos + truePos == 0) {
throw new IllegalStateException("");
}
checkArgument(falseNeg + truePos == falseNegRCF + truePosRCF, " incorrect accounting");
System.out.println(" at degree " + degree + " injected " + (truePos + falseNeg));
System.out.print("Precision = " + precision(truePos, falsePos));
System.out.println(" Recall = " + recall(truePos, falseNeg));
System.out.print("RCF Distance Mode Precision = " + precision(truePosRCF, falsePosRCF));
System.out.println(" RCF Distance Mode Recall = " + recall(truePosRCF, falseNegRCF));
}
// attempting merge
long number = new Random().nextLong();
int size = reservoirSize - new Random().nextInt(100);
double newShrinkage = new Random().nextDouble();
int reps = new Random().nextInt(10) + 1; // cannot be 0
GlobalLocalAnomalyDetector.Builder builder = GlobalLocalAnomalyDetector.builder().capacity(size)
.shrinkage(newShrinkage).numberOfRepresentatives(reps).timeDecay(timedecay).randomSeed(number);
GlobalLocalAnomalyDetector<float[]> newDetector = new GlobalLocalAnomalyDetector<>(reservoir, reservoir,
builder, true, Summarizer::L1distance);
assertEquals(newDetector.getCapacity(), size);
assertNotEquals(newDetector.getClusters(), null);
assertEquals(newDetector.numberOfRepresentatives, reps);
assertEquals(newDetector.shrinkage, newShrinkage);
assert (newDetector.getClusters() != null);
float[] weight = newDetector.sampler.getWeightArray();
for (int i = 0; i < size - 1; i += 2) {
assert (weight[i] >= weight[i + 1]);
}
GlobalLocalAnomalyDetector<float[]> another = new GlobalLocalAnomalyDetector<>(reservoir, reservoir, builder,
false, Summarizer::L2distance);
assertNull(another.getClusters());
file.close();
}
double precision(int truePos, int falsePos) {
return (truePos + falsePos > 0) ? 1.0 * truePos / (truePos + falsePos) : 1.0;
}
double recall(int truePos, int falseNeg) {
return (truePos + falseNeg > 0) ? 1.0 * truePos / (truePos + falseNeg) : 1.0;
}
}
| 562 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/ForecastTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.config.ImputationMethod.RCF;
import static com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys.generateShingledData;
import static java.lang.Math.min;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Random;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.junit.jupiter.params.provider.EnumSource;
import org.junit.jupiter.params.provider.ValueSource;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.returntypes.TimedRangeVector;
import com.amazon.randomcutforest.returntypes.RangeVector;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
@Tag("functional")
public class ForecastTest {
@ParameterizedTest
@EnumSource(TransformMethod.class)
public void basicAndIdempotence(TransformMethod method) {
int sampleSize = 256;
int baseDimensions = 1;
int shingleSize = 8;
int dimensions = baseDimensions * shingleSize;
long seed = 1L;
int length = 4 * sampleSize;
int outputAfter = 128;
ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true)
.shingleSize(shingleSize).outputAfter(outputAfter).transformMethod(method).build();
// as the ratio of amplitude (signal) to noise is changed, the estimation range
// in forecast
// (or any other inference) should increase
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 10, seed,
baseDimensions);
System.out.println(dataWithKeys.changes.length + " anomalies injected ");
double[][] shingledData = generateShingledData(dataWithKeys.data, shingleSize, baseDimensions, false);
assertEquals(shingledData.length, dataWithKeys.data.length - shingleSize + 1);
int horizon = 20;
if (method == TransformMethod.NORMALIZE_DIFFERENCE || method == TransformMethod.DIFFERENCE) {
horizon = min(horizon, shingleSize / 2 + 1);
}
double[] error = new double[horizon];
double[] lowerError = new double[horizon];
double[] upperError = new double[horizon];
for (int j = 0; j < dataWithKeys.data.length; j++) {
// forecast first; change centrality to achieve a control over the sampling
// setting centrality = 0 would correspond to random sampling from the leaves
// reached by
// impute visitor
TimedRangeVector extrapolate = forest.extrapolate(horizon, true, 1.0);
RangeVector forecast = extrapolate.rangeVector;
assert (forecast.values.length == horizon);
assert (extrapolate.timeStamps.length == horizon);
assert (extrapolate.lowerTimeStamps.length == horizon);
assert (extrapolate.upperTimeStamps.length == horizon);
RangeVector alternative = forest.extrapolate(horizon, true, 1.0).rangeVector;
// repeated invocations of extrapolate should return same result
// for the same values of correction,centrality
assertArrayEquals(forecast.values, alternative.values, 1e-6f);
assertArrayEquals(forecast.lower, alternative.lower, 1e-6f);
assertArrayEquals(forecast.upper, alternative.upper, 1e-6f);
for (int i = 0; i < horizon; i++) {
// check ranges
if (j > sampleSize) {
assert (extrapolate.timeStamps[i] == j + i);
assert (extrapolate.upperTimeStamps[i] == j + i);
assert (extrapolate.lowerTimeStamps[i] == j + i);
}
assert (forecast.values[i] >= forecast.lower[i]);
assert (forecast.values[i] <= forecast.upper[i]);
// compute errors
if (j > outputAfter + shingleSize - 1 && j + i < dataWithKeys.data.length) {
double t = dataWithKeys.data[j + i][0] - forecast.values[i];
error[i] += t * t;
t = dataWithKeys.data[j + i][0] - forecast.lower[i];
lowerError[i] += t * t;
t = dataWithKeys.data[j + i][0] - forecast.upper[i];
upperError[i] += t * t;
}
}
forest.process(dataWithKeys.data[j], j);
}
System.out.println(forest.getTransformMethod().name() + " RMSE (as horizon increases) ");
for (int i = 0; i < horizon; i++) {
double t = error[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);
System.out.print(Math.sqrt(t) + " ");
}
System.out.println();
System.out.println("RMSE Lower (as horizon increases)");
for (int i = 0; i < horizon; i++) {
double t = lowerError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);
System.out.print(Math.sqrt(t) + " ");
}
System.out.println();
System.out.println("RMSE Upper (as horizon increases)");
for (int i = 0; i < horizon; i++) {
double t = upperError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);
System.out.print(Math.sqrt(t) + " ");
}
System.out.println();
}
@ParameterizedTest
@CsvSource({ "NORMALIZE,true", "NORMALIZE,false", "SUBTRACT_MA,true", "SUBTRACT_MA,false", "WEIGHTED,true",
"WEIGHTED,false" })
public void linearShift(String methodString, String normalizeTime) {
int sampleSize = 256;
int baseDimensions = 1;
int shingleSize = 8;
int dimensions = baseDimensions * shingleSize;
long seed = 0L;
int length = 10 * sampleSize;
int outputAfter = 128;
ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true)
.shingleSize(shingleSize).timeDecay(1.0 / 1024).outputAfter(outputAfter)
.transformMethod(TransformMethod.valueOf(methodString))
.normalizeTime(Boolean.parseBoolean(normalizeTime)).build();
// as the ratio of amplitude (signal) to noise is changed, the estimation range
// in forecast
// (or any other inference) should increase
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 10, seed,
baseDimensions, true);
System.out.println(dataWithKeys.changes.length + " anomalies injected ");
double[][] shingledData = generateShingledData(dataWithKeys.data, shingleSize, baseDimensions, false);
assertEquals(shingledData.length, dataWithKeys.data.length - shingleSize + 1);
// the following constraint is for differencing based methods
int horizon = shingleSize / 2 + 1;
double[] error = new double[horizon];
double[] lowerError = new double[horizon];
double[] upperError = new double[horizon];
for (int j = 0; j < dataWithKeys.data.length; j++) {
// forecast first; change centrality to achieve a control over the sampling
// setting centrality = 0 would correspond to random sampling from the leaves
// reached by
// impute visitor
TimedRangeVector extrapolate = forest.extrapolate(horizon, true, 1.0);
RangeVector forecast = extrapolate.rangeVector;
assert (forecast.values.length == horizon);
assert (extrapolate.timeStamps.length == horizon);
assert (extrapolate.lowerTimeStamps.length == horizon);
assert (extrapolate.upperTimeStamps.length == horizon);
RangeVector alternative = forest.extrapolate(horizon, true, 1.0).rangeVector;
// repeated invocations of extrapolate should return same result
// for the same values of correction,centrality
assertArrayEquals(forecast.values, alternative.values, 1e-6f);
assertArrayEquals(forecast.lower, alternative.lower, 1e-6f);
assertArrayEquals(forecast.upper, alternative.upper, 1e-6f);
for (int i = 0; i < horizon; i++) {
if (j > outputAfter) {
assert (extrapolate.timeStamps[i] == i + j);
assert (extrapolate.upperTimeStamps[i] == i + j);
assert (extrapolate.lowerTimeStamps[i] == i + j);
}
// check ranges
assert (forecast.values[i] >= forecast.lower[i]);
assert (forecast.values[i] <= forecast.upper[i]);
// compute errors
if (j > outputAfter + shingleSize - 1 && j + i < dataWithKeys.data.length) {
double t = dataWithKeys.data[j + i][0] - forecast.values[i];
error[i] += t * t;
t = dataWithKeys.data[j + i][0] - forecast.lower[i];
lowerError[i] += t * t;
t = dataWithKeys.data[j + i][0] - forecast.upper[i];
upperError[i] += t * t;
}
}
forest.process(dataWithKeys.data[j], j);
}
System.out.println(forest.getTransformMethod().name() + " RMSE (as horizon increases)");
for (int i = 0; i < horizon; i++) {
double t = error[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);
System.out.print(Math.sqrt(t) + " ");
}
System.out.println();
System.out.println("RMSE Lower (as horizon increases)");
for (int i = 0; i < horizon; i++) {
double t = lowerError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);
System.out.print(Math.sqrt(t) + " ");
}
System.out.println();
System.out.println("RMSE Upper (as horizon increases)");
for (int i = 0; i < horizon; i++) {
double t = upperError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);
System.out.print(Math.sqrt(t) + " ");
}
System.out.println();
}
@ParameterizedTest
@CsvSource({ "DIFFERENCE,true", "DIFFERENCE,false", "NORMALIZE_DIFFERENCE,true", "NORMALIZE_DIFFERENCE,false" })
public void linearShiftDifference(String methodString, String normalizeTime) {
int sampleSize = 256;
int baseDimensions = 1;
int shingleSize = 8;
int dimensions = baseDimensions * shingleSize;
// use same seed as previous test
long seed = 0L;
int length = 10 * sampleSize;
int outputAfter = 128;
ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true)
.shingleSize(shingleSize).timeDecay(1.0 / 1024).outputAfter(outputAfter)
.transformMethod(TransformMethod.valueOf(methodString))
.normalizeTime(Boolean.parseBoolean(normalizeTime)).build();
// as the ratio of amplitude (signal) to noise is changed, the estimation range
// in forecast
// (or any other inference) should increase
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 10, seed,
baseDimensions, true);
System.out.println(dataWithKeys.changes.length + " anomalies injected ");
double[][] shingledData = generateShingledData(dataWithKeys.data, shingleSize, baseDimensions, false);
assertEquals(shingledData.length, dataWithKeys.data.length - shingleSize + 1);
// the following constraint is for differencing based methods
// the differenced values will be noisy in the presence of anomalies
// the example demonstrates that the best forecaster need not be the best
// anomaly detector, even from a restricted family of algorithms
int horizon = shingleSize / 2 + 1;
double[] error = new double[horizon];
double[] lowerError = new double[horizon];
double[] upperError = new double[horizon];
for (int j = 0; j < dataWithKeys.data.length; j++) {
TimedRangeVector extrapolate = forest.extrapolate(horizon, true, 1.0);
RangeVector forecast = extrapolate.rangeVector;
assert (forecast.values.length == horizon);
assert (extrapolate.timeStamps.length == horizon);
assert (extrapolate.lowerTimeStamps.length == horizon);
assert (extrapolate.upperTimeStamps.length == horizon);
RangeVector alternative = forest.extrapolate(horizon, true, 1.0).rangeVector;
// repeated invocations of extrapolate should return same result
// for the same values of correction,centrality
assertArrayEquals(forecast.values, alternative.values, 1e-6f);
assertArrayEquals(forecast.lower, alternative.lower, 1e-6f);
assertArrayEquals(forecast.upper, alternative.upper, 1e-6f);
for (int i = 0; i < horizon; i++) {
// check ranges
assertEquals(extrapolate.timeStamps[i], 0);
assertEquals(extrapolate.upperTimeStamps[i], 0);
assertEquals(extrapolate.lowerTimeStamps[i], 0);
assert (forecast.values[i] >= forecast.lower[i]);
assert (forecast.values[i] <= forecast.upper[i]);
// compute errors
if (j > outputAfter + shingleSize - 1 && j + i < dataWithKeys.data.length) {
double t = dataWithKeys.data[j + i][0] - forecast.values[i];
error[i] += t * t;
t = dataWithKeys.data[j + i][0] - forecast.lower[i];
lowerError[i] += t * t;
t = dataWithKeys.data[j + i][0] - forecast.upper[i];
upperError[i] += t * t;
}
}
forest.process(dataWithKeys.data[j], 0L);
}
System.out.println(forest.getTransformMethod().name() + " RMSE (as horizon increases)");
for (int i = 0; i < horizon; i++) {
double t = error[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);
System.out.print(Math.sqrt(t) + " ");
}
System.out.println();
System.out.println("RMSE Lower (as horizon increases)");
for (int i = 0; i < horizon; i++) {
double t = lowerError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);
System.out.print(Math.sqrt(t) + " ");
}
System.out.println();
System.out.println("RMSE Upper (as horizon increases)");
for (int i = 0; i < horizon; i++) {
double t = upperError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);
System.out.print(Math.sqrt(t) + " ");
}
System.out.println();
}
@ParameterizedTest
@ValueSource(booleans = { true, false })
void timeAugmentedTest(boolean normalize) {
int shingleSize = 4;
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_32;
int baseDimensions = 1;
int horizon = 10;
int count = 0;
int dimensions = baseDimensions * shingleSize;
ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)
.sampleSize(sampleSize).internalShinglingEnabled(true).precision(precision).anomalyRate(0.01)
.forestMode(ForestMode.TIME_AUGMENTED).normalizeTime(normalize).build();
long seed = new Random().nextLong();
double[] data = new double[] { 1.0 };
System.out.println("seed = " + seed);
Random rng = new Random(seed);
for (int i = 0; i < 200; i++) {
long time = 1000L * count + rng.nextInt(100);
forest.process(data, time);
++count;
}
TimedRangeVector extrapolate = forest.extrapolate(horizon, true, 1.0);
RangeVector range = extrapolate.rangeVector;
assert (range.values.length == baseDimensions * horizon);
assert (extrapolate.timeStamps.length == horizon);
assert (extrapolate.lowerTimeStamps.length == horizon);
assert (extrapolate.upperTimeStamps.length == horizon);
/*
* the forecasted time stamps should be close to 1000 * (count + i) the data
* values should remain as in data[]
*/
for (int i = 0; i < horizon; i++) {
assertEquals(range.values[i], data[0]);
assertEquals(range.upper[i], data[0]);
assertEquals(range.lower[i], data[0]);
assert (Math.abs(Math.round(extrapolate.timeStamps[i] * 0.001) - count - i) <= 1);
assert (extrapolate.timeStamps[i] >= extrapolate.lowerTimeStamps[i]);
assert (extrapolate.upperTimeStamps[i] >= extrapolate.timeStamps[i]);
}
}
@ParameterizedTest
@EnumSource(TransformMethod.class)
public void streamingImputeTest(TransformMethod method) {
int shingleSize = 8;
int numberOfTrees = 100;
int sampleSize = 256;
Precision precision = Precision.FLOAT_32;
int dataSize = 4 * sampleSize;
int outputAfter = sampleSize;
// change this to try different number of attributes,
int baseDimensions = 1;
int dropped = 0;
long seed = 2022L;
// the following simulates random drops
long dropSeed = 7L;
Random dropPRG = new Random(dropSeed);
System.out.println("seed = " + seed);
System.out.println("dropping seed = " + dropSeed);
int dimensions = baseDimensions * shingleSize;
ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)
.sampleSize(sampleSize).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STREAMING_IMPUTE)
.transformMethod(method).imputationMethod(RCF).build();
// limited to shingleSize/2+1 due to the differenced methods
int horizon = shingleSize / 2 + 1;
double[] error = new double[horizon];
double[] lowerError = new double[horizon];
double[] upperError = new double[horizon];
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,
100, 5, seed, baseDimensions, true);
System.out.println(dataWithKeys.changes.length + " anomalies injected ");
for (int j = 0; j < dataWithKeys.data.length; j++) {
if (dropPRG.nextDouble() < 0.2) {
++dropped;
} else {
// note that the forecast does not change without a new reading in streaming
// impute
// in this case the forecast corresponds to j+1 .. j + horizon
// so we will add the j'th entry and then measure error against j+1 ...
// j+horizon values
long newStamp = 1000L * j + 10 * dropPRG.nextInt(10) - 5;
forest.process(dataWithKeys.data[j], newStamp);
TimedRangeVector extrapolate = forest.extrapolate(horizon, true, 1.0);
RangeVector forecast = extrapolate.rangeVector;
assert (forecast.values.length == horizon);
assert (extrapolate.timeStamps.length == horizon);
RangeVector alternative = forest.extrapolate(horizon, true, 1.0).rangeVector;
// repeated invocations of extrapolate should return same result
// for the same values of correction,centrality
assertArrayEquals(forecast.values, alternative.values, 1e-6f);
assertArrayEquals(forecast.lower, alternative.lower, 1e-6f);
assertArrayEquals(forecast.upper, alternative.upper, 1e-6f);
for (int i = 0; i < horizon; i++) {
// check ranges
assert (forecast.values[i] >= forecast.lower[i]);
assert (forecast.values[i] <= forecast.upper[i]);
assertEquals(extrapolate.timeStamps[i], 0);
assertEquals(extrapolate.upperTimeStamps[i], 0);
assertEquals(extrapolate.lowerTimeStamps[i], 0);
// compute errors
// NOTE the +1 since we are predicting the unseen values in the data
if (j > outputAfter + shingleSize - 1 && j + i + 1 < dataWithKeys.data.length) {
double t = dataWithKeys.data[j + i + 1][0] - forecast.values[i];
error[i] += t * t;
t = dataWithKeys.data[j + i + 1][0] - forecast.lower[i];
lowerError[i] += t * t;
t = dataWithKeys.data[j + i + 1][0] - forecast.upper[i];
upperError[i] += t * t;
}
}
}
}
System.out.println("Impute with " + dropped + " dropped values from " + dataWithKeys.data.length + " values");
System.out.println(forest.getTransformMethod().name() + " RMSE (as horizon increases)");
for (int i = 0; i < horizon; i++) {
double t = error[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i - dropped);
System.out.print(Math.sqrt(t) + " ");
}
System.out.println();
System.out.println("RMSE Lower (as horizon increases)");
for (int i = 0; i < horizon; i++) {
double t = lowerError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i - dropped);
System.out.print(Math.sqrt(t) + " ");
}
System.out.println();
System.out.println("RMSE Upper (as horizon increases)");
for (int i = 0; i < horizon; i++) {
double t = upperError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i - dropped);
System.out.print(Math.sqrt(t) + " ");
}
System.out.println();
}
}
| 563 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/SequentialAnalysisTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE;
import static java.lang.Math.min;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.List;
import java.util.Random;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.calibration.Calibration;
import com.amazon.randomcutforest.parkservices.returntypes.AnalysisDescriptor;
import com.amazon.randomcutforest.returntypes.RangeVector;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
public class SequentialAnalysisTest {
@ParameterizedTest
@EnumSource(TransformMethod.class)
public void AnomalyTest(TransformMethod method) {
int sampleSize = 256;
long seed = new Random().nextLong();
System.out.println(" seed " + seed);
Random rng = new Random(seed);
int numTrials = 1; // just once since testing exact equality
int length = 40 * sampleSize;
for (int i = 0; i < numTrials; i++) {
int numberOfTrees = 30 + rng.nextInt(20);
int outputAfter = 1 + rng.nextInt(50);
int shingleSize = 1 + rng.nextInt(15);
int baseDimensions = 1 + rng.nextInt(5);
int dimensions = baseDimensions * shingleSize;
long forestSeed = rng.nextLong();
double timeDecay = 0.1 / sampleSize;
double transformDecay = 1.0 / sampleSize;
double fraction = 1.0 * outputAfter / sampleSize;
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).numberOfTrees(numberOfTrees).randomSeed(forestSeed).outputAfter(outputAfter)
.transformMethod(method).timeDecay(timeDecay).transformDecay(transformDecay)
.internalShinglingEnabled(true).initialAcceptFraction(fraction).shingleSize(shingleSize).build();
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,
rng.nextLong(), baseDimensions);
List<AnomalyDescriptor> result = SequentialAnalysis.detectAnomalies(dataWithKeys.data, shingleSize,
sampleSize, numberOfTrees, timeDecay, outputAfter, method, transformDecay, forestSeed);
int count = 0;
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
if (firstResult.getAnomalyGrade() > 0) {
assertEquals(firstResult.getAnomalyGrade(), result.get(count).getAnomalyGrade(), 1e-3);
assertEquals(firstResult.getInternalTimeStamp(), result.get(count).getInternalTimeStamp());
++count;
}
}
assertTrue(count == result.size());
}
}
@ParameterizedTest
@EnumSource(TransformMethod.class)
public void AnomalyTest2(TransformMethod method) {
int sampleSize = 256;
long seed = new Random().nextLong();
System.out.println(" seed " + seed);
Random rng = new Random(seed);
int numTrials = 1; // just once since testing exact equality
int length = 40 * sampleSize;
for (int i = 0; i < numTrials; i++) {
int outputAfter = sampleSize / 4;
int shingleSize = 1 + rng.nextInt(15);
int baseDimensions = 1 + rng.nextInt(5);
int dimensions = baseDimensions * shingleSize;
long forestSeed = rng.nextLong();
double timeDecay = 0.1 / sampleSize;
double fraction = 1.0 * outputAfter / sampleSize;
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).randomSeed(forestSeed).transformMethod(method).timeDecay(timeDecay)
.internalShinglingEnabled(true).transformDecay(timeDecay).initialAcceptFraction(fraction)
.shingleSize(shingleSize).build();
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,
rng.nextLong(), baseDimensions);
List<AnomalyDescriptor> result = SequentialAnalysis.detectAnomalies(dataWithKeys.data, shingleSize,
sampleSize, timeDecay, method, forestSeed);
int count = 0;
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
if (firstResult.getAnomalyGrade() > 0) {
assertEquals(firstResult.getAnomalyGrade(), result.get(count).getAnomalyGrade(), 1e-3);
assertEquals(firstResult.getInternalTimeStamp(), result.get(count).getInternalTimeStamp());
assertEquals(firstResult.getRCFScore(), result.get(count).getRCFScore(), 1e-3);
++count;
}
}
assertTrue(count == result.size());
}
}
@ParameterizedTest
@EnumSource(TransformMethod.class)
public void AnomalyTest3(TransformMethod method) {
int sampleSize = DEFAULT_SAMPLE_SIZE;
long seed = new Random().nextLong();
System.out.println(" seed " + seed);
Random rng = new Random(seed);
int numTrials = 1; // just once since testing exact equality
int length = 40 * sampleSize;
for (int i = 0; i < numTrials; i++) {
int outputAfter = sampleSize / 4;
int shingleSize = 1 + rng.nextInt(15);
int baseDimensions = 1 + rng.nextInt(5);
int dimensions = baseDimensions * shingleSize;
long forestSeed = rng.nextLong();
double timeDecay = 0.1 / sampleSize;
double transformDecay = (1.0 + rng.nextDouble()) / sampleSize;
double fraction = 1.0 * outputAfter / sampleSize;
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).randomSeed(forestSeed).transformMethod(method).timeDecay(timeDecay)
.internalShinglingEnabled(true).transformDecay(transformDecay).initialAcceptFraction(fraction)
.shingleSize(shingleSize).build();
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,
rng.nextLong(), baseDimensions);
List<AnomalyDescriptor> result = SequentialAnalysis.detectAnomalies(dataWithKeys.data, shingleSize,
timeDecay, method, transformDecay, forestSeed);
int count = 0;
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
if (firstResult.getAnomalyGrade() > 0) {
assertEquals(firstResult.getAnomalyGrade(), result.get(count).getAnomalyGrade(), 1e-3);
assertEquals(firstResult.getInternalTimeStamp(), result.get(count).getInternalTimeStamp());
assertEquals(firstResult.getRCFScore(), result.get(count).getRCFScore(), 1e-3);
++count;
}
}
assertTrue(count == result.size());
}
}
@ParameterizedTest
@EnumSource(Calibration.class)
public void ForecasterTest(Calibration calibration) {
int sampleSize = 256;
long seed = new Random().nextLong();
System.out.println(" seed " + seed);
Random rng = new Random(seed);
int numTrials = 1; // just once since testing exact equality
int length = 4 * sampleSize;
for (int i = 0; i < numTrials; i++) {
int numberOfTrees = 50;
int outputAfter = 1 + rng.nextInt(50);
int shingleSize = 2 + rng.nextInt(15);
int forecastHorizon = min(4 * shingleSize, 10);
int errorHorizon = 100;
int baseDimensions = 1 + rng.nextInt(5);
int dimensions = baseDimensions * shingleSize;
long forestSeed = rng.nextLong();
double timeDecay = 0.1 / sampleSize;
double transformDecay = 1.0 / sampleSize;
double fraction = 1.0 * outputAfter / sampleSize;
RCFCaster first = new RCFCaster.Builder().dimensions(dimensions).numberOfTrees(numberOfTrees)
.randomSeed(forestSeed).outputAfter(outputAfter).transformMethod(TransformMethod.NORMALIZE)
.timeDecay(timeDecay).transformDecay(transformDecay).internalShinglingEnabled(true)
.forecastHorizon(forecastHorizon).errorHorizon(errorHorizon).calibration(calibration)
.initialAcceptFraction(fraction).shingleSize(shingleSize).build();
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,
rng.nextLong(), baseDimensions);
AnalysisDescriptor descriptor = SequentialAnalysis.forecastWithAnomalies(dataWithKeys.data, shingleSize,
sampleSize, timeDecay, outputAfter, TransformMethod.NORMALIZE, transformDecay, forecastHorizon,
errorHorizon, 0.1, calibration, forestSeed);
List<AnomalyDescriptor> result = descriptor.getAnomalies();
int count = 0;
ForecastDescriptor last = null;
for (double[] point : dataWithKeys.data) {
ForecastDescriptor firstResult = first.process(point, 0L);
if (firstResult.getAnomalyGrade() > 0) {
assertEquals(firstResult.getAnomalyGrade(), result.get(count).getAnomalyGrade(), 1e-3);
assertEquals(firstResult.getInternalTimeStamp(), result.get(count).getInternalTimeStamp());
assertEquals(firstResult.getRCFScore(), result.get(count).getRCFScore(), 1e-3);
++count;
}
last = firstResult;
}
assertTrue(count == result.size());
RangeVector sequential = descriptor.getForecastDescriptor().getTimedForecast().rangeVector;
RangeVector current = last.getTimedForecast().rangeVector;
assertArrayEquals(current.values, sequential.values, 1e-3f);
assertArrayEquals(current.upper, sequential.upper, 1e-3f);
assertArrayEquals(current.lower, sequential.lower, 1e-3f);
assertArrayEquals(descriptor.getForecastDescriptor().getIntervalPrecision(), last.getIntervalPrecision(),
1e-3f);
assertArrayEquals(descriptor.getForecastDescriptor().getErrorMean(), last.getErrorMean(), 1e-3f);
}
}
}
| 564 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/RCFCasterTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.parkservices.ErrorHandler.MAX_ERROR_HORIZON;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.config.ScoringStrategy;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.calibration.Calibration;
import com.amazon.randomcutforest.parkservices.returntypes.TimedRangeVector;
import com.amazon.randomcutforest.returntypes.RangeVector;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
public class RCFCasterTest {
@Test
public void constructorTest() {
RCFCaster.Builder builder = new RCFCaster.Builder();
assertThrows(IllegalArgumentException.class, builder::build);
builder.forecastHorizon(-1);
assertThrows(IllegalArgumentException.class, builder::build);
builder.forecastHorizon(2).shingleSize(0);
assertThrows(IllegalArgumentException.class, builder::build);
builder.shingleSize(1).dimensions(1);
assertDoesNotThrow(builder::build);
builder.internalShinglingEnabled(false);
assertThrows(IllegalArgumentException.class, builder::build);
builder.internalShinglingEnabled(true);
assertDoesNotThrow(builder::build);
builder.forestMode(ForestMode.STREAMING_IMPUTE);
assertThrows(IllegalArgumentException.class, builder::build);
builder.forestMode(ForestMode.TIME_AUGMENTED);
assertThrows(IllegalArgumentException.class, builder::build);
}
@Test
public void configTest() {
RCFCaster.Builder builder = new RCFCaster.Builder().dimensions(1).shingleSize(1).forecastHorizon(1);
RCFCaster caster = builder.build();
assertThrows(IllegalArgumentException.class,
() -> caster.processSequentially(new double[][] { new double[0] }));
assertThrows(IllegalArgumentException.class, () -> caster.process(new double[1], 0L, new int[1]));
}
@Test
public void errorHandlerConstructorTest() {
RCFCaster.Builder builder = new RCFCaster.Builder();
// builder().compact(true).dimensions(dimensions).randomSeed(seed + 1)
// .numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
// .internalShinglingEnabled(true).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STANDARD)
// .transformMethod(transformMethod).outputAfter(outputAfter).forecastHorizon(forecastHorizon)
// .calibration(calibration).errorHorizon(errorHorizon).initialAcceptFraction(0.125);
assertThrows(IllegalArgumentException.class, () -> new ErrorHandler(builder));
builder.errorHorizon(1).forecastHorizon(2);
assertThrows(IllegalArgumentException.class, () -> new ErrorHandler(builder));
builder.errorHorizon(2).forecastHorizon(2);
assertThrows(IllegalArgumentException.class, () -> new ErrorHandler(builder));
builder.dimensions(1);
assertDoesNotThrow(() -> new ErrorHandler(builder));
builder.errorHorizon(MAX_ERROR_HORIZON + 1);
assertThrows(IllegalArgumentException.class, () -> new ErrorHandler(builder));
assertDoesNotThrow(() -> new ErrorHandler(1, 1, 1, 0.1, 1, new float[2], new float[6], new float[1], null));
assertDoesNotThrow(() -> new ErrorHandler(1, 1, 1, 0.1, 1, null, null, new float[2], null));
assertThrows(IllegalArgumentException.class,
() -> new ErrorHandler(1, 1, 1, 0.1, 1, new float[2], null, new float[1], null));
assertThrows(IllegalArgumentException.class,
() -> new ErrorHandler(1, 1, 1, 0.1, 1, null, new float[6], new float[1], null));
assertThrows(IllegalArgumentException.class,
() -> new ErrorHandler(1, 0, 1, 0.1, 2, new float[2], new float[6], new float[2], null));
assertThrows(IllegalArgumentException.class,
() -> new ErrorHandler(1, 2, 1, 0.1, 1, new float[2], new float[6], new float[1], null));
assertThrows(IllegalArgumentException.class,
() -> new ErrorHandler(1, 1, -1, 0.1, 1, new float[2], new float[6], new float[1], null));
assertThrows(IllegalArgumentException.class,
() -> new ErrorHandler(1, 1, 0, 0.1, 0, new float[2], new float[6], new float[1], null));
assertThrows(IllegalArgumentException.class,
() -> new ErrorHandler(1, 1, -1, 0.1, 1, new float[2], new float[6], new float[1], null));
assertThrows(IllegalArgumentException.class,
() -> new ErrorHandler(1, 1, 0, 0.1, 3, new float[2], new float[6], new float[3], null));
assertThrows(IllegalArgumentException.class,
() -> new ErrorHandler(1, 1, 0, 0.6, 1, new float[2], new float[6], new float[1], null));
assertThrows(IllegalArgumentException.class,
() -> new ErrorHandler(1, 1, -1, 0.1, 2, new float[2], new float[6], new float[2], null));
}
@Test
public void testCalibrate() {
ErrorHandler e = new ErrorHandler(new RCFCaster.Builder().errorHorizon(2).forecastHorizon(2).dimensions(2));
assertThrows(IllegalArgumentException.class, () -> e.calibrate(Calibration.SIMPLE, new RangeVector(5)));
RangeVector r = new RangeVector(4);
e.sequenceIndex = 5;
e.lastDeviations = new float[] { 1.0f, 1.3f };
float v = new Random().nextFloat();
r.shift(0, v);
e.calibrate(Calibration.SIMPLE, new RangeVector(r));
assertEquals(r.values[0], v);
e.calibrate(Calibration.NONE, r);
assertEquals(r.values[0], v);
assertEquals(r.upper[0], v);
assertEquals(r.values[1], 0);
e.lastDeviations = new float[] { v + 1.0f, 1.3f };
e.calibrate(Calibration.MINIMAL, r);
assertEquals(r.values[0], v);
assertEquals(r.values[1], 0);
assertEquals(r.upper[0], v + 1.3 * (v + 1), 1e-6f);
assertEquals(r.lower[0], v - 1.3 * (v + 1), 1e-6f);
e.sequenceIndex = 10000;
e.errorHorizon = 1000;
RangeVector newR = new RangeVector(4);
newR.shift(0, v);
e.errorDistribution.shift(0, 2 * v);
e.calibrate(Calibration.SIMPLE, newR);
assertEquals(newR.values[0], 3 * v, 1e-6f);
assertEquals(newR.values[1], 0);
assertThrows(IllegalArgumentException.class, () -> e.adjustMinimal(0, new RangeVector(10), new RangeVector(9)));
assertThrows(IllegalArgumentException.class,
() -> e.adjustMinimal(10, new RangeVector(10), new RangeVector(10)));
assertThrows(IllegalArgumentException.class,
() -> e.adjustMinimal(-1, new RangeVector(10), new RangeVector(10)));
assertThrows(IllegalArgumentException.class, () -> e.interpolatedMedian(new double[6], 25));
assertThrows(IllegalArgumentException.class, () -> e.interpolatedMedian(null, 25));
assertDoesNotThrow(() -> e.interpolatedMedian(new double[25], 25));
assertThrows(IllegalArgumentException.class, () -> e.adjust(0, new RangeVector(9), new RangeVector(10)));
assertThrows(IllegalArgumentException.class, () -> e.adjust(9, new RangeVector(9), new RangeVector(9)));
assertThrows(IllegalArgumentException.class, () -> e.adjust(-1, new RangeVector(9), new RangeVector(9)));
assertThrows(IllegalArgumentException.class,
() -> new ErrorHandler(1, 1, 0, 0.1, 2, new float[2], new float[6], new float[1], null));
}
@ParameterizedTest
@EnumSource(Calibration.class)
void testRCFCast(Calibration calibration) {
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_32;
int dataSize = 2 * sampleSize;
// change this to try different number of attributes,
// this parameter is not expected to be larger than 5 for this example
int baseDimensions = 1;
int forecastHorizon = 5; // speeding up
int shingleSize = 10;
int outputAfter = 32;
int errorHorizon = 256;
long seed = new Random().nextLong();
System.out.println("seed = " + seed);
// change the last argument seed for a different run
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,
50, 5, seed, baseDimensions, false);
int dimensions = baseDimensions * shingleSize;
TransformMethod transformMethod = TransformMethod.NORMALIZE;
RCFCaster caster = RCFCaster.builder().compact(true).dimensions(dimensions).randomSeed(seed + 1)
.numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.internalShinglingEnabled(true).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STANDARD)
.transformMethod(transformMethod).outputAfter(outputAfter).forecastHorizon(forecastHorizon)
.centerOfMassEnabled(true).storeSequenceIndexesEnabled(true) // neither is relevant
.calibration(calibration).errorHorizon(errorHorizon).initialAcceptFraction(0.125).build();
RCFCaster shadow = RCFCaster.builder().compact(true).dimensions(dimensions).randomSeed(seed + 1)
.numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.internalShinglingEnabled(true).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STANDARD)
.transformMethod(transformMethod).outputAfter(outputAfter).forecastHorizon(forecastHorizon)
.calibration(calibration).errorHorizon(errorHorizon).initialAcceptFraction(0.125)
.boundingBoxCacheFraction(0).build();
RCFCaster secondShadow = RCFCaster.builder().compact(true).dimensions(dimensions).randomSeed(seed + 1)
.numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.internalShinglingEnabled(true).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STANDARD)
.transformMethod(transformMethod).outputAfter(outputAfter).forecastHorizon(forecastHorizon)
.calibration(calibration).errorHorizon(errorHorizon).initialAcceptFraction(0.125)
.boundingBoxCacheFraction(0).build();
RCFCaster thirdShadow = RCFCaster.builder().compact(true).dimensions(dimensions).randomSeed(seed + 1)
.numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.internalShinglingEnabled(true).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STANDARD)
.transformMethod(transformMethod).outputAfter(outputAfter).forecastHorizon(forecastHorizon)
.calibration(calibration).errorHorizon(errorHorizon).initialAcceptFraction(0.125)
.boundingBoxCacheFraction(1.0).build();
// testing scoring strategies
caster.setScoringStrategy(ScoringStrategy.MULTI_MODE);
shadow.setScoringStrategy(ScoringStrategy.MULTI_MODE);
// ensuring/testing that the parameters are the same; otherwise the
// grades/scores cannot
// be the same
caster.setLowerThreshold(1.1);
shadow.setLowerThreshold(1.1);
secondShadow.setLowerThreshold(1.1);
thirdShadow.setLowerThreshold(1.1);
caster.setInitialThreshold(2.0);
shadow.setInitialThreshold(2.0);
secondShadow.setInitialThreshold(2.0);
thirdShadow.setInitialThreshold(2.0);
caster.setScoreDifferencing(0.4);
shadow.setScoreDifferencing(0.4);
secondShadow.setScoreDifferencing(0.4);
thirdShadow.setScoreDifferencing(0.4);
assert (caster.errorHandler.errorHorizon == errorHorizon);
assert (caster.errorHorizon == errorHorizon);
for (int j = 0; j < dataWithKeys.data.length; j++) {
ForecastDescriptor result = caster.process(dataWithKeys.data[j], 0L);
ForecastDescriptor shadowResult = shadow.process(dataWithKeys.data[j], 0L);
assertEquals(result.getRCFScore(), shadowResult.getRCFScore(), 1e-6);
assertArrayEquals(shadowResult.getTimedForecast().rangeVector.values,
result.getTimedForecast().rangeVector.values, 1e-6f);
assertArrayEquals(shadowResult.getTimedForecast().rangeVector.upper,
result.getTimedForecast().rangeVector.upper, 1e-6f);
assertArrayEquals(shadowResult.getTimedForecast().rangeVector.lower,
result.getTimedForecast().rangeVector.lower, 1e-6f);
int sequenceIndex = caster.errorHandler.sequenceIndex;
if (caster.forest.isOutputReady()) {
float[] meanArray = caster.errorHandler.getErrorMean();
for (int i = 0; i < forecastHorizon; i++) {
int len = (sequenceIndex > errorHorizon + i + 1) ? errorHorizon : sequenceIndex - i - 1;
if (len > 0) {
for (int k = 0; k < baseDimensions; k++) {
int pos = i * baseDimensions + k;
double[] array = caster.errorHandler.getErrorVector(len, (i + 1), k, pos,
RCFCaster.defaultError);
double mean = Arrays.stream(array).sum() / len;
assertEquals(meanArray[pos], mean, (1 + Math.abs(mean)) * 1e-4);
double[] another = caster.errorHandler.getErrorVector(len, (i + 1), k, pos,
RCFCaster.alternateError);
// smape; calibration may increase errors
assertTrue(calibration != Calibration.NONE || Arrays.stream(another).sum() < 3 * len);
}
}
}
float[] intervalPrecision = shadow.errorHandler.getIntervalPrecision();
for (float y : intervalPrecision) {
assertTrue(0 <= y && y <= 1.0);
}
assertArrayEquals(intervalPrecision, result.getIntervalPrecision(), 1e-6f);
float[] test = new float[forecastHorizon * baseDimensions];
assertArrayEquals(caster.errorHandler.getAdders().values, test, 1e-6f);
Arrays.fill(test, 1);
assertArrayEquals(caster.errorHandler.getMultipliers().values, test, 1e-6f);
}
}
// 0 length arrays do not change state
secondShadow.processSequentially(new double[0][]);
List<AnomalyDescriptor> firstList = secondShadow.processSequentially(dataWithKeys.data);
List<AnomalyDescriptor> thirdList = thirdShadow.processSequentially(dataWithKeys.data);
// null does not change state
thirdShadow.processSequentially(null);
// calibration fails
assertThrows(IllegalArgumentException.class, () -> caster.extrapolate(forecastHorizon - 1));
assertThrows(IllegalArgumentException.class, () -> caster.extrapolate(forecastHorizon + 1));
TimedRangeVector forecast1 = caster.extrapolate(forecastHorizon);
TimedRangeVector forecast2 = shadow.extrapolate(forecastHorizon);
TimedRangeVector forecast3 = secondShadow.extrapolate(forecastHorizon);
TimedRangeVector forecast4 = thirdShadow.extrapolate(forecastHorizon);
assertArrayEquals(forecast1.rangeVector.values, forecast2.rangeVector.values, 1e-6f);
assertArrayEquals(forecast3.rangeVector.values, forecast4.rangeVector.values, 1e-6f);
// the order of floating point operations now vary
for (int i = 0; i < forecast1.rangeVector.values.length; i++) {
assertTrue(Math.abs(forecast1.rangeVector.values[i] - forecast3.rangeVector.values[i]) < 1e-4
* (1 + Math.abs(forecast1.rangeVector.values[i])));
}
}
@ParameterizedTest
@EnumSource(Calibration.class)
void testRCFCastThresholdedRCF(Calibration calibration) {
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_32;
int dataSize = 4 * sampleSize;
// change this to try different number of attributes,
// this parameter is not expected to be larger than 5 for this example
int baseDimensions = 1;
int forecastHorizon = 15;
int shingleSize = 10;
int outputAfter = 32;
int errorHorizon = 256;
long seed = new Random().nextLong();
System.out.println("seed = " + seed);
// change the last argument seed for a different run
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,
50, 5, seed, baseDimensions, false);
int dimensions = baseDimensions * shingleSize;
TransformMethod transformMethod = TransformMethod.NORMALIZE;
RCFCaster caster = RCFCaster.builder().compact(true).dimensions(dimensions).randomSeed(seed + 1)
.numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.internalShinglingEnabled(true).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STANDARD)
.transformMethod(transformMethod).outputAfter(outputAfter).forecastHorizon(forecastHorizon)
.calibration(calibration).errorHorizon(errorHorizon).initialAcceptFraction(0.125).build();
ThresholdedRandomCutForest shadow = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.randomSeed(seed + 1).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.internalShinglingEnabled(true).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STANDARD)
.transformMethod(transformMethod).outputAfter(outputAfter).initialAcceptFraction(0.125).build();
// ensuring that the parameters are the same; otherwise the grades/scores cannot
// be the same
// weighTime has to be 0
caster.setLowerThreshold(1.1);
shadow.setLowerThreshold(1.1);
assertTrue(caster.errorHandler.errorHorizon == errorHorizon);
assertTrue(caster.errorHorizon == errorHorizon);
for (int j = 0; j < dataWithKeys.data.length; j++) {
ForecastDescriptor result = caster.process(dataWithKeys.data[j], 0L);
AnomalyDescriptor shadowResult = shadow.process(dataWithKeys.data[j], 0L);
assertEquals(result.getRCFScore(), shadowResult.getRCFScore(), 1e-6f);
TimedRangeVector timedShadowForecast = shadow.extrapolate(forecastHorizon);
assertArrayEquals(timedShadowForecast.timeStamps, result.getTimedForecast().timeStamps);
assertArrayEquals(timedShadowForecast.upperTimeStamps, result.getTimedForecast().upperTimeStamps);
assertArrayEquals(timedShadowForecast.lowerTimeStamps, result.getTimedForecast().lowerTimeStamps);
// first check idempotence -- forecasts are state dependent only
// for ThresholdedRCF
TimedRangeVector newShadow = shadow.extrapolate(forecastHorizon);
assertArrayEquals(newShadow.rangeVector.values, timedShadowForecast.rangeVector.values, 1e-6f);
assertArrayEquals(newShadow.rangeVector.upper, timedShadowForecast.rangeVector.upper, 1e-6f);
assertArrayEquals(newShadow.rangeVector.lower, timedShadowForecast.rangeVector.lower, 1e-6f);
assertArrayEquals(newShadow.timeStamps, timedShadowForecast.timeStamps);
assertArrayEquals(newShadow.upperTimeStamps, timedShadowForecast.upperTimeStamps);
assertArrayEquals(newShadow.lowerTimeStamps, timedShadowForecast.lowerTimeStamps);
// extrapolate is idempotent for RCF casters
TimedRangeVector newVector = caster.extrapolate(forecastHorizon);
assertArrayEquals(newVector.rangeVector.values, result.getTimedForecast().rangeVector.values, 1e-6f);
assertArrayEquals(newVector.rangeVector.upper, result.getTimedForecast().rangeVector.upper, 1e-6f);
assertArrayEquals(newVector.rangeVector.lower, result.getTimedForecast().rangeVector.lower, 1e-6f);
assertArrayEquals(newVector.timeStamps, result.getTimedForecast().timeStamps);
assertArrayEquals(newVector.upperTimeStamps, result.getTimedForecast().upperTimeStamps);
assertArrayEquals(newVector.lowerTimeStamps, result.getTimedForecast().lowerTimeStamps);
// only difference between RCFCaster and ThresholdedRCF is calibration
caster.calibrate(calibration, timedShadowForecast.rangeVector);
assertArrayEquals(timedShadowForecast.rangeVector.values, result.getTimedForecast().rangeVector.values,
1e-6f);
assertArrayEquals(timedShadowForecast.rangeVector.upper, result.getTimedForecast().rangeVector.upper,
1e-6f);
assertArrayEquals(timedShadowForecast.rangeVector.lower, result.getTimedForecast().rangeVector.lower,
1e-6f);
}
}
}
| 565 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/DescriptorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.returntypes.DiVector;
import com.amazon.randomcutforest.returntypes.RangeVector;
public class DescriptorTest {
int dimensions;
int horizon;
private ForecastDescriptor forecastDescriptor;
@BeforeEach
public void setUp() {
dimensions = 4;
horizon = 2;
forecastDescriptor = new ForecastDescriptor(new double[] { 2.0, 3.0 }, 0L, 7);
}
@Test
public void testSet() {
assertThrows(IllegalArgumentException.class,
() -> forecastDescriptor.setObservedErrorDistribution(new RangeVector(15)));
assertDoesNotThrow(() -> forecastDescriptor.setObservedErrorDistribution(new RangeVector(14)));
assertThrows(IllegalArgumentException.class, () -> forecastDescriptor.setErrorRMSE(new DiVector(13)));
assertDoesNotThrow(() -> forecastDescriptor.setErrorRMSE(new DiVector(14)));
assertFalse(forecastDescriptor.isExpectedValuesPresent());
assertThrows(IllegalArgumentException.class, () -> forecastDescriptor.setExpectedValues(2, new double[2], 1.0));
forecastDescriptor.setExpectedValues(0, new double[2], 1.0);
assertTrue(forecastDescriptor.isExpectedValuesPresent());
assertArrayEquals(forecastDescriptor.getExpectedValuesList()[0], new double[2]);
forecastDescriptor.setExpectedValues(0, new double[] { -1.0, -1.0 }, 0.5);
assertArrayEquals(forecastDescriptor.getExpectedValuesList()[0], new double[] { -1.0, -1.0 });
}
}
| 566 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/statistics/StatisticsTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.statistics;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Random;
import org.junit.jupiter.api.Test;
public class StatisticsTest {
@Test
void constructorTest() {
assertThrows(IllegalArgumentException.class, () -> new Deviation(-1));
assertThrows(IllegalArgumentException.class, () -> new Deviation(2));
assertDoesNotThrow(() -> new Deviation(new Random().nextDouble()));
}
@Test
void getMeanTest() {
double discount = new Random().nextDouble();
Deviation deviation = new Deviation(discount);
assertEquals(deviation.getMean(), 0);
assertTrue(deviation.isEmpty());
deviation.setCount(100);
assertTrue(deviation.isEmpty());
assertTrue(deviation.count == 100);
deviation.update(-0);
assertEquals(101, deviation.count);
assertEquals(deviation.getMean(), 0);
assertFalse(deviation.isEmpty());
deviation.reset();
assertEquals(deviation.getDiscount(), discount);
assertTrue(deviation.isEmpty());
}
}
| 567 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/state/ThresholdedRandomCutForestMapperTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Random;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.EnumSource;
import org.junit.jupiter.params.provider.MethodSource;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.ImputationMethod;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.amazon.randomcutforest.parkservices.returntypes.TimedRangeVector;
import com.amazon.randomcutforest.state.RandomCutForestMapper;
import com.amazon.randomcutforest.testutils.MultiDimDataWithKey;
import com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
public class ThresholdedRandomCutForestMapperTest {
@Test
public void testRoundTripStandardShingleSizeOne() {
int dimensions = 10;
for (int trials = 0; trials < 1; trials++) {
long seed = new Random().nextLong();
RandomCutForest.Builder<?> builder = RandomCutForest.builder().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(seed);
// note shingleSize == 1
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.internalShinglingEnabled(true).anomalyRate(0.01).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).anomalyRate(0.01)
.forestMode(ForestMode.STANDARD).internalShinglingEnabled(false).build();
RandomCutForest forest = builder.build();
Random r = new Random();
for (int i = 0; i < 2000 + new Random().nextInt(1000); i++) {
double[] point = r.ints(dimensions, 0, 50).asDoubleStream().toArray();
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
assertEquals(firstResult.getDataConfidence(), secondResult.getDataConfidence(), 1e-10);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), forest.getAnomalyScore(point), 1e-10);
forest.update(point);
}
// serialize + deserialize
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));
// update re-instantiated forest
for (int i = 0; i < 100; i++) {
double[] point = r.ints(dimensions, 0, 50).asDoubleStream().toArray();
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
AnomalyDescriptor thirdResult = third.process(point, 0L);
double score = forest.getAnomalyScore(point);
assertEquals(score, firstResult.getRCFScore(), 1e-10);
assertEquals(score, secondResult.getRCFScore(), 1e-10);
assertEquals(score, thirdResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getDataConfidence(), secondResult.getDataConfidence(), 1e-10);
forest.update(point);
}
}
}
@Test
public void testConversions() {
int dimensions = 10;
for (int trials = 0; trials < 10; trials++) {
long seed = new Random().nextLong();
System.out.println("Seed " + seed);
RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).internalShinglingEnabled(false).randomSeed(seed).build();
// note shingleSize == 1
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.internalShinglingEnabled(false).anomalyRate(0.01).build();
Random r = new Random(seed + 1);
for (int i = 0; i < new Random(seed + 2).nextInt(1000); i++) {
double[] point = r.ints(dimensions, 0, 50).asDoubleStream().toArray();
first.process(point, 0L);
forest.update(point);
}
RandomCutForestMapper mapper = new RandomCutForestMapper();
mapper.setSaveExecutorContextEnabled(true);
mapper.setSaveTreeStateEnabled(true);
mapper.setPartialTreeStateEnabled(true);
RandomCutForest copyForest = mapper.toModel(mapper.toState(forest));
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest(copyForest, 0.01, null);
//
for (int i = 0; i < new Random(seed + 3).nextInt(1000); i++) {
double[] point = r.ints(dimensions, 0, 50).asDoubleStream().toArray();
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), forest.getAnomalyScore(point), 1e-10);
forest.update(point);
}
// serialize + deserialize
ThresholdedRandomCutForestMapper newMapper = new ThresholdedRandomCutForestMapper();
ThresholdedRandomCutForest third = newMapper.toModel(newMapper.toState(second));
// update re-instantiated forest
for (int i = 0; i < 100; i++) {
double[] point = r.ints(dimensions, 0, 50).asDoubleStream().toArray();
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
AnomalyDescriptor thirdResult = third.process(point, 0L);
double score = forest.getAnomalyScore(point);
assertEquals(score, firstResult.getRCFScore(), 1e-10);
assertEquals(score, secondResult.getRCFScore(), 1e-10);
assertEquals(score, thirdResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getDataConfidence(), thirdResult.getDataConfidence(), 1e-10);
forest.update(point);
}
}
}
@Test
public void testRoundTripStandardShingled() throws JsonProcessingException {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 4;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
RandomCutForest.Builder<?> builder = RandomCutForest.builder().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(seed);
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).shingleSize(shingleSize)
.internalShinglingEnabled(false).anomalyRate(0.01).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).shingleSize(shingleSize)
.internalShinglingEnabled(false).anomalyRate(0.01).build();
RandomCutForest forest = builder.build();
// thresholds should not affect scores
double value = 0.75 + 0.5 * new Random().nextDouble();
first.setLowerThreshold(value);
second.setLowerThreshold(value);
Random r = new Random();
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.generateShingledDataWithKey(10 * sampleSize, 50,
shingleSize, baseDimensions, seed);
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), forest.getAnomalyScore(point), 1e-4);
forest.update(point);
}
ObjectMapper jsonMapper = new ObjectMapper();
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
String json = jsonMapper.writeValueAsString(mapper.toState(second));
ThresholdedRandomCutForest third = mapper
.toModel(jsonMapper.readValue(json, ThresholdedRandomCutForestState.class));
MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.generateShingledDataWithKey(100, 50, shingleSize,
baseDimensions, seed);
// update re-instantiated forest
for (double[] point : testData.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
AnomalyDescriptor thirdResult = third.process(point, 0L);
double score = forest.getAnomalyScore(point);
assertEquals(score, firstResult.getRCFScore(), 1e-4);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getDataConfidence(), thirdResult.getDataConfidence(), 1e-10);
forest.update(point);
}
}
@Test
public void testRoundTripStandardShingledInternal() throws JsonProcessingException {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 8;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
System.out.println(" seed " + seed);
RandomCutForest forest = RandomCutForest.builder().dimensions(dimensions).internalShinglingEnabled(true)
.shingleSize(shingleSize).randomSeed(seed).build();
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true).shingleSize(shingleSize)
.anomalyRate(0.01).autoAdjust(true).boundingBoxCacheFraction(0).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true).shingleSize(shingleSize)
.anomalyRate(0.01).autoAdjust(true).build();
double value = 0.75 + 0.5 * new Random().nextDouble();
first.setLowerThreshold(value);
second.setLowerThreshold(value);
Random r = new Random();
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(10 * sampleSize, 50, 100, 5,
seed, baseDimensions);
long count = 0;
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor firstResult = first.process(point, count);
AnomalyDescriptor secondResult = second.process(point, count);
++count;
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), forest.getAnomalyScore(point), 1e-4);
if (firstResult.getAnomalyGrade() > 0) {
assertEquals(secondResult.getAnomalyGrade(), firstResult.getAnomalyGrade(), 1e-10);
}
forest.update(point);
}
ObjectMapper jsonMapper = new ObjectMapper();
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
String json = jsonMapper.writeValueAsString(mapper.toState(second));
ThresholdedRandomCutForest third = mapper
.toModel(jsonMapper.readValue(json, ThresholdedRandomCutForestState.class));
MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.getMultiDimData(100, 50, 100, 5, seed,
baseDimensions);
// update re-instantiated forest
for (double[] point : testData.data) {
AnomalyDescriptor firstResult = first.process(point, count);
AnomalyDescriptor secondResult = second.process(point, count);
AnomalyDescriptor thirdResult = third.process(point, count);
++count;
double score = forest.getAnomalyScore(point);
assertEquals(score, firstResult.getRCFScore(), 1e-4);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getDataConfidence(), thirdResult.getDataConfidence(), 1e-10);
forest.update(point);
}
TimedRangeVector one = first.extrapolate(10);
TimedRangeVector two = second.extrapolate(10);
assertArrayEquals(one.upperTimeStamps, two.upperTimeStamps);
assertArrayEquals(one.lowerTimeStamps, two.lowerTimeStamps);
assertArrayEquals(one.timeStamps, two.timeStamps);
assertArrayEquals(one.rangeVector.values, two.rangeVector.values, 1e-6f);
assertArrayEquals(one.rangeVector.upper, two.rangeVector.upper, 1e-6f);
assertArrayEquals(one.rangeVector.lower, two.rangeVector.lower, 1e-6f);
for (int j = 0; j < 10; j++) {
assert (one.lowerTimeStamps[j] <= one.timeStamps[j]);
assert (one.upperTimeStamps[j] >= one.timeStamps[j]);
assert (one.timeStamps[j] == count + j);
}
}
@ParameterizedTest
@EnumSource(value = TransformMethod.class)
public void testRoundTripStandardInitial(TransformMethod method) {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 8;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true)
.shingleSize(shingleSize).anomalyRate(0.01).autoAdjust(true).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true)
.shingleSize(shingleSize).anomalyRate(0.01).autoAdjust(true).build();
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(sampleSize, 50, 100, 5, seed,
baseDimensions);
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(secondResult.getAnomalyGrade(), firstResult.getAnomalyGrade(), 1e-10);
// serialize + deserialize
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
second = mapper.toModel(mapper.toState(second));
}
}
@ParameterizedTest
@EnumSource(value = TransformMethod.class)
public void testRoundTripStandard(TransformMethod method) {
int sampleSize = 256;
int baseDimensions = 1;
int shingleSize = 8;
int dimensions = baseDimensions * shingleSize;
long seed = 0;
new Random().nextLong();
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true)
.shingleSize(shingleSize).anomalyRate(0.01).transformMethod(method).autoAdjust(true)
.boundingBoxCacheFraction(0).weights(new double[] { 1.0 }).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true)
.shingleSize(shingleSize).anomalyRate(0.01).transformMethod(method).autoAdjust(true)
.weights(new double[] { 1.0 }).build();
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(10 * sampleSize, 50, 100, 5,
seed, baseDimensions);
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
if (firstResult.getAnomalyGrade() > 0) {
assertEquals(secondResult.getAnomalyGrade(), firstResult.getAnomalyGrade(), 1e-10);
}
}
// serialize + deserialize
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));
MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.getMultiDimData(100, 50, 100, 5, seed,
baseDimensions);
// update re-instantiated forest
for (double[] point : testData.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
AnomalyDescriptor thirdResult = third.process(point, 0L);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);
}
}
@ParameterizedTest
@EnumSource(value = TransformMethod.class, names = { "WEIGHTED", "NORMALIZE", "NORMALIZE_DIFFERENCE", "DIFFERENCE",
"SUBTRACT_MA" })
public void testRoundTripAugmentedInitial(TransformMethod method) {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 8;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
double value = 0.75 + 0.25 * new Random().nextDouble();
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(true).shingleSize(shingleSize)
.transformMethod(method).anomalyRate(0.01).autoAdjust(true).weights(new double[] { 1.0, 2.0 }).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(true).shingleSize(shingleSize)
.transformMethod(method).anomalyRate(0.01).autoAdjust(true).weights(new double[] { 1.0, 2.0 }).build();
first.setLowerThreshold(value);
second.setLowerThreshold(value);
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(sampleSize, 50, 100, 5, seed,
baseDimensions);
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(secondResult.getAnomalyGrade(), firstResult.getAnomalyGrade(), 1e-10);
// serialize + deserialize
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
second = mapper.toModel(mapper.toState(second));
}
}
@Test
public void testRoundTripAugmentedInitialNone() {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 8;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
double value = 0.75 + 0.25 * new Random().nextDouble();
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(true).shingleSize(shingleSize)
.transformMethod(TransformMethod.NONE).anomalyRate(0.01).autoAdjust(true)
.weights(new double[] { 1.0, 1.0 }).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(true).shingleSize(shingleSize)
.transformMethod(TransformMethod.NONE).anomalyRate(0.01).autoAdjust(true)
.weights(new double[] { 1.0, 1.0 }).build();
first.setLowerThreshold(value);
second.setLowerThreshold(value);
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(sampleSize, 50, 100, 5, seed,
baseDimensions);
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(secondResult.getAnomalyGrade(), firstResult.getAnomalyGrade(), 1e-10);
// serialize + deserialize
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
second = mapper.toModel(mapper.toState(second));
}
}
@ParameterizedTest
@EnumSource(value = TransformMethod.class)
public void testRoundTripTimeAugmented(TransformMethod method) {
int sampleSize = 256;
int baseDimensions = 1;
int shingleSize = 8;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
double value = 0.75 + 0.25 * new Random().nextDouble();
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(true).shingleSize(shingleSize)
.transformMethod(method).anomalyRate(0.01).autoAdjust(true).weights(new double[] { 1.0 }).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(true).shingleSize(shingleSize)
.transformMethod(method).anomalyRate(0.01).autoAdjust(true).weights(new double[] { 1.0 }).build();
first.setLowerThreshold(value);
second.setLowerThreshold(value);
Random r = new Random();
long count = 0;
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(10 * sampleSize, 50, 100, 5,
seed, baseDimensions);
for (double[] point : dataWithKeys.data) {
long stamp = 100 * count + r.nextInt(10) - 5;
AnomalyDescriptor firstResult = first.process(point, stamp);
AnomalyDescriptor secondResult = second.process(point, stamp);
++count;
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
if (firstResult.getAnomalyGrade() > 0) {
assertEquals(secondResult.getAnomalyGrade(), firstResult.getAnomalyGrade(), 1e-10);
}
}
// serialize + deserialize
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));
MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.getMultiDimData(100, 50, 100, 5, seed,
baseDimensions);
// update re-instantiated forest
for (double[] point : testData.data) {
long stamp = 100 * count + r.nextInt(10) - 5;
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
AnomalyDescriptor thirdResult = third.process(point, 0L);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getAnomalyGrade(), thirdResult.getAnomalyGrade(), 1e-10);
++count;
}
}
@ParameterizedTest
@EnumSource(value = TransformMethod.class, names = { "WEIGHTED", "NORMALIZE", "NORMALIZE_DIFFERENCE", "DIFFERENCE",
"SUBTRACT_MA" })
public void testRoundTripTimeAugmentedNormalize(TransformMethod method) {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 4;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
ThresholdedRandomCutForest first = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED)
.normalizeTime(true).transformMethod(method).internalShinglingEnabled(true).shingleSize(shingleSize)
.anomalyRate(0.01).weights(new double[] { 1.0, 2.0 }).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.TIME_AUGMENTED).normalizeTime(true).internalShinglingEnabled(true)
.transformMethod(method).shingleSize(shingleSize).anomalyRate(0.01).weights(new double[] { 1.0, 2.0 })
.build();
Random r = new Random();
long count = 0;
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(10 * sampleSize, 50, 100, 5,
seed, baseDimensions);
for (double[] point : dataWithKeys.data) {
long stamp = 1000 * count + r.nextInt(10) - 5;
AnomalyDescriptor firstResult = first.process(point, stamp);
AnomalyDescriptor secondResult = second.process(point, stamp);
++count;
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
}
// serialize + deserialize
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));
MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.getMultiDimData(100, 50, 100, 5, seed,
baseDimensions);
// update re-instantiated forest
for (double[] point : testData.data) {
long stamp = 100 * count + r.nextInt(10) - 5;
AnomalyDescriptor firstResult = first.process(point, stamp);
AnomalyDescriptor secondResult = second.process(point, stamp);
AnomalyDescriptor thirdResult = third.process(point, stamp);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);
++count;
}
}
@Test
public void testRoundTripTimeAugmentedNone() {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 4;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
ThresholdedRandomCutForest first = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED)
.normalizeTime(true).transformMethod(TransformMethod.NONE).internalShinglingEnabled(true)
.shingleSize(shingleSize).anomalyRate(0.01).weights(new double[] { 1.0, 1.0 }).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.TIME_AUGMENTED).normalizeTime(true).internalShinglingEnabled(true)
.transformMethod(TransformMethod.NONE).shingleSize(shingleSize).anomalyRate(0.01)
.weights(new double[] { 1.0, 1.0 }).build();
Random r = new Random();
long count = 0;
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(10 * sampleSize, 50, 100, 5,
seed, baseDimensions);
for (double[] point : dataWithKeys.data) {
long stamp = 1000 * count + r.nextInt(10) - 5;
AnomalyDescriptor firstResult = first.process(point, stamp);
AnomalyDescriptor secondResult = second.process(point, stamp);
++count;
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
}
// serialize + deserialize
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));
MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.getMultiDimData(100, 50, 100, 5, seed,
baseDimensions);
// update re-instantiated forest
for (double[] point : testData.data) {
long stamp = 100 * count + r.nextInt(10) - 5;
AnomalyDescriptor firstResult = first.process(point, stamp);
AnomalyDescriptor secondResult = second.process(point, stamp);
AnomalyDescriptor thirdResult = third.process(point, stamp);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);
++count;
}
}
@ParameterizedTest
@MethodSource("args")
public void testRoundTripImputeInitial(TransformMethod transformMethod, ImputationMethod imputationMethod) {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 4;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
System.out.println(seed);
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.STREAMING_IMPUTE).internalShinglingEnabled(true).shingleSize(shingleSize)
.transformMethod(transformMethod).imputationMethod(imputationMethod)
.fillValues(new double[] { 1.0, 2.0 }).anomalyRate(0.01).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.STREAMING_IMPUTE).internalShinglingEnabled(true).shingleSize(shingleSize)
.transformMethod(transformMethod).imputationMethod(imputationMethod)
.fillValues(new double[] { 1.0, 2.0 }).anomalyRate(0.01).build();
Random r = new Random(0);
long count = 0;
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(sampleSize, 50, 100, 5, seed,
baseDimensions);
for (double[] point : dataWithKeys.data) {
if (r.nextDouble() > 0.1) {
long stamp = 1000 * count + r.nextInt(10) - 5;
AnomalyDescriptor firstResult = first.process(point, stamp);
AnomalyDescriptor secondResult = second.process(point, stamp);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
}
++count;
// serialize + deserialize
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
second = mapper.toModel(mapper.toState(second));
}
}
@ParameterizedTest
@MethodSource("args")
public void testRoundTripImpute(TransformMethod transformMethod, ImputationMethod imputationMethod) {
int sampleSize = 256;
int baseDimensions = 1;
int shingleSize = 8;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
ThresholdedRandomCutForest first = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)
.precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.STREAMING_IMPUTE)
.internalShinglingEnabled(true).shingleSize(shingleSize).transformMethod(transformMethod)
.imputationMethod(imputationMethod).fillValues(new double[] { 1.0 }).anomalyRate(0.01).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true)
.dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)
.forestMode(ForestMode.STREAMING_IMPUTE).internalShinglingEnabled(true).shingleSize(shingleSize)
.transformMethod(transformMethod).imputationMethod(imputationMethod).fillValues(new double[] { 1.0 })
.anomalyRate(0.01).build();
Random r = new Random();
long count = 0;
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(10 * sampleSize, 50, 100, 5,
seed, baseDimensions);
for (double[] point : dataWithKeys.data) {
if (r.nextDouble() > 0.1) {
long stamp = 1000 * count + r.nextInt(10) - 5;
AnomalyDescriptor firstResult = first.process(point, stamp);
AnomalyDescriptor secondResult = second.process(point, stamp);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
}
++count;
}
;
// serialize + deserialize
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));
MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.getMultiDimData(100, 50, 100, 5, seed,
baseDimensions);
// update re-instantiated forest
for (double[] point : testData.data) {
long stamp = 1000 * count + r.nextInt(10) - 5;
AnomalyDescriptor firstResult = first.process(point, stamp);
// AnomalyDescriptor secondResult = second.process(point, stamp);
AnomalyDescriptor thirdResult = third.process(point, stamp);
// assertEquals(firstResult.getRcfScore(), secondResult.getRcfScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);
++count;
}
}
static Stream<Arguments> args() {
return transformMethodStream().flatMap(
classParameter -> imputationMethod().map(testParameter -> Arguments.of(classParameter, testParameter)));
}
static Stream<ImputationMethod> imputationMethod() {
return Stream.of(ImputationMethod.values());
}
static Stream<TransformMethod> transformMethodStream() {
return Stream.of(TransformMethod.values());
}
}
| 568 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/state/V2TRCFJsonResource.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state;
import lombok.Getter;
@Getter
public enum V2TRCFJsonResource {
TRCF_1("state_1.json"), TRCF_2("state_2.json");
private final String resource;
V2TRCFJsonResource(String resource) {
this.resource = resource;
}
}
| 569 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/state/V2TRCFToV3StateConverterTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.fail;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.Base64;
import java.util.Random;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.MapperFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.protostuff.ProtostuffIOUtil;
import io.protostuff.Schema;
import io.protostuff.runtime.RuntimeSchema;
public class V2TRCFToV3StateConverterTest {
private ThresholdedRandomCutForestMapper trcfMapper = new ThresholdedRandomCutForestMapper();
@ParameterizedTest
@EnumSource(V2TRCFJsonResource.class)
public void testJson(V2TRCFJsonResource jsonResource) throws JsonProcessingException {
String json = getStateFromFile(jsonResource.getResource());
assertNotNull(json);
ObjectMapper mapper = new ObjectMapper();
mapper.configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true);
ThresholdedRandomCutForestState state = mapper.readValue(json, ThresholdedRandomCutForestState.class);
ThresholdedRandomCutForest forest = trcfMapper.toModel(state);
Random r = new Random(0);
for (int i = 0; i < 20000; i++) {
double[] point = r.ints(forest.getForest().getDimensions(), 0, 50).asDoubleStream().toArray();
forest.process(point, 0L);
}
assertNotNull(forest);
}
@ParameterizedTest
@EnumSource(V2TRCFByteBase64Resource.class)
public void testByteBase64(V2TRCFByteBase64Resource byteBase64Resource) {
String byteBase64 = getStateFromFile(byteBase64Resource.getResource());
assertNotNull(byteBase64);
Schema<ThresholdedRandomCutForestState> trcfSchema = RuntimeSchema
.getSchema(ThresholdedRandomCutForestState.class);
byte[] bytes = Base64.getDecoder().decode(byteBase64);
ThresholdedRandomCutForestState state = trcfSchema.newMessage();
ProtostuffIOUtil.mergeFrom(bytes, state, trcfSchema);
ThresholdedRandomCutForest forest = trcfMapper.toModel(state);
assertNotNull(forest);
}
private String getStateFromFile(String resourceFile) {
try (InputStream is = V2TRCFToV3StateConverterTest.class.getResourceAsStream(resourceFile);
BufferedReader rr = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) {
StringBuilder b = new StringBuilder();
String line;
while ((line = rr.readLine()) != null) {
b.append(line);
}
return b.toString();
} catch (IOException e) {
fail("Unable to load resource");
}
return null;
}
}
| 570 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/state/V2TRCFByteBase64Resource.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state;
import lombok.Getter;
@Getter
public enum V2TRCFByteBase64Resource {
TRCF_STATE_1("byte_base64_1.txt"), TRCF_STATE_2("byte_base64_2.txt");
private final String resource;
V2TRCFByteBase64Resource(String resource) {
this.resource = resource;
}
}
| 571 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/state/RCFCasterMapperTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Random;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.ForecastDescriptor;
import com.amazon.randomcutforest.parkservices.RCFCaster;
import com.amazon.randomcutforest.parkservices.calibration.Calibration;
import com.amazon.randomcutforest.returntypes.DiVector;
import com.amazon.randomcutforest.returntypes.RangeVector;
public class RCFCasterMapperTest {
@ParameterizedTest
@CsvSource({ "SIMPLE,1", "MINIMAL,1", "NONE,1", "SIMPLE,2", "MINIMAL,2", "NONE,2" })
public void testRoundTripStandardShingleSizeEight(String calibrationString, int inputLength) {
int shingleSize = 8;
int dimensions = inputLength * shingleSize;
int forecastHorizon = shingleSize * 3;
for (int trials = 0; trials < 1; trials++) {
long seed = new Random().nextLong();
System.out.println(" seed " + seed);
// note shingleSize == 8
RCFCaster first = RCFCaster.builder().compact(true).dimensions(dimensions).precision(Precision.FLOAT_32)
.randomSeed(seed).internalShinglingEnabled(true).anomalyRate(0.01).shingleSize(shingleSize)
.calibration(Calibration.MINIMAL).forecastHorizon(forecastHorizon)
.calibration(Calibration.valueOf(calibrationString)).transformMethod(TransformMethod.NORMALIZE)
.build();
Random r = new Random(seed);
for (int i = 0; i < 2000 + r.nextInt(1000); i++) {
double[] point = r.ints(inputLength, 0, 50).asDoubleStream().toArray();
first.process(point, 0L);
}
// serialize + deserialize
RCFCasterMapper mapper = new RCFCasterMapper();
RCFCaster second = mapper.toModel(mapper.toState(first));
assertArrayEquals(first.getErrorHandler().getIntervalPrecision(),
second.getErrorHandler().getIntervalPrecision(), 1e-6f);
assertArrayEquals(first.getErrorHandler().getErrorRMSE().high, second.getErrorHandler().getErrorRMSE().high,
1e-6f);
assertArrayEquals(first.getErrorHandler().getErrorRMSE().low, second.getErrorHandler().getErrorRMSE().low,
1e-6f);
assertArrayEquals(first.getErrorHandler().getErrorDistribution().values,
second.getErrorHandler().getErrorDistribution().values, 1e-6f);
assertArrayEquals(first.getErrorHandler().getErrorDistribution().upper,
second.getErrorHandler().getErrorDistribution().upper, 1e-6f);
assertArrayEquals(first.getErrorHandler().getErrorDistribution().lower,
second.getErrorHandler().getErrorDistribution().lower, 1e-6f);
// update re-instantiated forest
for (int i = 0; i < 100; i++) {
double[] point = r.ints(inputLength, 0, 50).asDoubleStream().toArray();
ForecastDescriptor firstResult = first.process(point, 0L);
ForecastDescriptor secondResult = second.process(point, 0L);
assertEquals(firstResult.getDataConfidence(), secondResult.getDataConfidence(), 1e-10);
verifyForecast(firstResult, secondResult, 1);
}
}
}
void verifyForecast(ForecastDescriptor firstResult, ForecastDescriptor secondResult, int inputLength) {
RangeVector firstForecast = firstResult.getTimedForecast().rangeVector;
RangeVector secondForecast = secondResult.getTimedForecast().rangeVector;
assertArrayEquals(firstForecast.values, secondForecast.values, 1e-6f);
assertArrayEquals(firstForecast.upper, secondForecast.upper, 1e-6f);
assertArrayEquals(firstForecast.lower, secondForecast.lower, 1e-6f);
float[] firstErrorP50 = firstResult.getObservedErrorDistribution().values;
float[] secondErrorP50 = secondResult.getObservedErrorDistribution().values;
assertArrayEquals(firstErrorP50, secondErrorP50, 1e-6f);
float[] firstUpperError = firstResult.getObservedErrorDistribution().upper;
float[] secondUpperError = secondResult.getObservedErrorDistribution().upper;
assertArrayEquals(firstUpperError, secondUpperError, 1e-6f);
float[] firstLowerError = firstResult.getObservedErrorDistribution().lower;
float[] secondLowerError = secondResult.getObservedErrorDistribution().lower;
assertArrayEquals(firstLowerError, secondLowerError, 1e-6f);
DiVector firstRmse = firstResult.getErrorRMSE();
DiVector secondRmse = secondResult.getErrorRMSE();
assertArrayEquals(firstRmse.high, secondRmse.high, 1e-6);
assertArrayEquals(firstRmse.low, secondRmse.low, 1e-6);
assertArrayEquals(firstResult.getErrorMean(), secondResult.getErrorMean(), 1e-6f);
assertArrayEquals(firstResult.getIntervalPrecision(), secondResult.getIntervalPrecision(), 1e-6f);
}
@ParameterizedTest
@CsvSource({ "SIMPLE,1", "MINIMAL,1", "NONE,1", "SIMPLE,2", "MINIMAL,2", "NONE,2" })
public void testNotFullyInitialized(String calibrationString, int inputLength) {
int shingleSize = 8;
int dimensions = inputLength * shingleSize;
int forecastHorizon = shingleSize * 3;
int outputAfter = 32;
for (int trials = 0; trials < 10; trials++) {
long seed = new Random().nextLong();
System.out.println(" seed " + seed);
// note shingleSize == 8
RCFCaster first = RCFCaster.builder().compact(true).dimensions(dimensions).precision(Precision.FLOAT_32)
.randomSeed(seed).internalShinglingEnabled(true).anomalyRate(0.01).shingleSize(shingleSize)
.calibration(Calibration.valueOf(calibrationString)).forecastHorizon(forecastHorizon)
.transformMethod(TransformMethod.NORMALIZE).outputAfter(outputAfter).build();
Random r = new Random();
for (int i = 0; i < new Random().nextInt(outputAfter); i++) {
double[] point = r.ints(inputLength, 0, 50).asDoubleStream().toArray();
RCFCasterMapper mapper = new RCFCasterMapper();
RCFCaster shadow = mapper.toModel(mapper.toState(first));
ForecastDescriptor a = first.process(point, 0L);
ForecastDescriptor b = shadow.process(point, 0L);
assertEquals(a.getRCFScore(), b.getRCFScore(), 1e-6);
first.process(point, 0L);
}
// serialize + deserialize
RCFCasterMapper mapper = new RCFCasterMapper();
RCFCaster second = mapper.toModel(mapper.toState(first));
// update re-instantiated forest
for (int i = 0; i < 100; i++) {
double[] point = r.ints(inputLength, 0, 50).asDoubleStream().toArray();
ForecastDescriptor firstResult = first.process(point, 0L);
ForecastDescriptor secondResult = second.process(point, 0L);
assertEquals(firstResult.getDataConfidence(), secondResult.getDataConfidence(), 1e-10);
verifyForecast(firstResult, secondResult, 1);
}
}
}
}
| 572 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/threshold/BasicThresholderTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.threshold;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Random;
import java.util.stream.Collectors;
import java.util.stream.DoubleStream;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
public class BasicThresholderTest {
@Test
void scoreDifferencingTest() {
BasicThresholder basicThresholder = new BasicThresholder(0.01);
assertThrows(IllegalArgumentException.class, () -> {
basicThresholder.setScoreDifferencing(-new Random().nextDouble());
});
assertThrows(IllegalArgumentException.class, () -> {
basicThresholder.setScoreDifferencing(1 + 1e-10 + new Random().nextDouble());
});
assertDoesNotThrow(() -> basicThresholder.setScoreDifferencing(new Random().nextDouble()));
}
@Test
void constructorTest() {
BasicThresholder thresholder = new BasicThresholder(null);
assertEquals(thresholder.getDeviations().length, 3);
BasicThresholder thresholder2 = new BasicThresholder(new Deviation[] { new Deviation(0) });
assertNotNull(thresholder2.getSecondaryDeviation());
double[] list = new double[] { 1.0, 2.0, 3.0 };
BasicThresholder basicThresholder = new BasicThresholder(
DoubleStream.of(list).boxed().collect(Collectors.toList()), 0.01);
assertEquals(basicThresholder.getPrimaryDeviation().getCount(), 3);
assertEquals(basicThresholder.getSecondaryDeviation().getCount(), 3);
assertEquals(basicThresholder.getPrimaryDeviation().getMean(), 2, 1e-10);
assertEquals(basicThresholder.getSecondaryDeviation().getMean(), 2, 1e-10);
assertEquals(basicThresholder.getPrimaryDeviation().getDiscount(), 0.01, 1e-10);
System.out.println(basicThresholder.count);
assertFalse(basicThresholder.isDeviationReady());
basicThresholder.updatePrimary(0.0);
basicThresholder.updatePrimary(0.0);
System.out.println(basicThresholder.count);
assertFalse(basicThresholder.isDeviationReady());
basicThresholder.setScoreDifferencing(0);
assertFalse(basicThresholder.isDeviationReady());
basicThresholder.setMinimumScores(5);
assertTrue(basicThresholder.isDeviationReady());
basicThresholder.setScoreDifferencing(1.0);
assertFalse(basicThresholder.isDeviationReady());
basicThresholder.update(0.0, 0.0);
basicThresholder.update(0.0, 0.0);
assertTrue(basicThresholder.isDeviationReady());
basicThresholder.setScoreDifferencing(0.5);
assertTrue(basicThresholder.isDeviationReady());
assertEquals(basicThresholder.intermediateTermFraction(), 0.4, 1e-10);
basicThresholder.updatePrimary(0.0);
assertNotEquals(1, basicThresholder.intermediateTermFraction(), 0.0);
basicThresholder.setMinimumScores(4);
assertEquals(1, basicThresholder.intermediateTermFraction());
}
@ParameterizedTest
@ValueSource(booleans = { true, false })
void gradeTest(boolean flag) {
BasicThresholder thresholder = new BasicThresholder(null);
thresholder.setScoreDifferencing(0.0);
if (flag) {
thresholder.setInitialThreshold(0.0);
thresholder.setAbsoluteThreshold(0.0);
}
assertEquals(0, thresholder.threshold());
assertEquals(0, thresholder.getPrimaryThreshold());
assertEquals(0, thresholder.getPrimaryGrade(0));
assertEquals(0, thresholder.getPrimaryThresholdAndGrade(0.0).weight);
assertEquals(0, thresholder.getPrimaryThresholdAndGrade(1.0).weight);
assertEquals(thresholder.initialThreshold,
thresholder.getThresholdAndGrade(0, TransformMethod.NONE, 1, 1).index);
assertEquals(thresholder.initialThreshold,
thresholder.getThresholdAndGrade(1.0, TransformMethod.NONE, 1, 1).index);
thresholder.setCount(12);
assertTrue(thresholder.isDeviationReady());
assertEquals(thresholder.getSurpriseIndex(1.0, 0, 2.5, 0), 2);
assertEquals(thresholder.getPrimaryGrade(0), 0);
assertEquals(0, thresholder.getPrimaryThresholdAndGrade(0.0).weight);
assertEquals(0, thresholder.getPrimaryThresholdAndGrade(1.0).weight); // threshold 0
thresholder.updatePrimary(1.0);
assertEquals(1.0, thresholder.getPrimaryThresholdAndGrade(2.0).weight);
thresholder.update(1.0, 1.0);
thresholder.update(1.0, 0.5);
assertEquals(0, thresholder.longTermDeviation(TransformMethod.NONE, 1));
assertEquals(thresholder.getThresholdAndGrade(0, TransformMethod.NONE, 1, 1).weight, 0);
assertTrue(thresholder.longTermDeviation(TransformMethod.DIFFERENCE, 1) > 0);
assertTrue(thresholder.longTermDeviation(TransformMethod.NORMALIZE_DIFFERENCE, 1) > 0);
assertTrue(thresholder.longTermDeviation(TransformMethod.NONE, 2) > 0);
assertTrue(thresholder.longTermDeviation(TransformMethod.DIFFERENCE, 2) > 0);
assertTrue(thresholder.longTermDeviation(TransformMethod.NORMALIZE_DIFFERENCE, 2) > 0);
}
}
| 573 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/returntypes/TimedRangeVectorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.returntypes;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.returntypes.RangeVector;
public class TimedRangeVectorTest {
int dimensions;
int horizon;
private TimedRangeVector vector;
@BeforeEach
public void setUp() {
dimensions = 4;
horizon = 2;
vector = new TimedRangeVector(dimensions, horizon);
}
@Test
public void testNew() {
assertThrows(IllegalArgumentException.class, () -> new TimedRangeVector(2, -2));
assertThrows(IllegalArgumentException.class, () -> new TimedRangeVector(-2, 2));
assertThrows(IllegalArgumentException.class, () -> new TimedRangeVector(5, 2));
assertDoesNotThrow(() -> new TimedRangeVector(6, 2));
assertThrows(IllegalArgumentException.class, () -> new TimedRangeVector(new RangeVector(8), 3));
assertDoesNotThrow(() -> new TimedRangeVector(new RangeVector(9), 3));
assertThrows(IllegalArgumentException.class,
() -> new TimedRangeVector(new RangeVector(5), new long[2], new long[2], new long[2]));
assertThrows(IllegalArgumentException.class,
() -> new TimedRangeVector(new RangeVector(4), new long[2], new long[2], new long[1]));
assertThrows(IllegalArgumentException.class,
() -> new TimedRangeVector(new RangeVector(4), new long[2], new long[1], new long[1]));
}
@Test
public void testScale() {
assertTrue(vector.timeStamps.length == 2);
vector.timeStamps[0] = 100L;
vector.upperTimeStamps[0] = 120L;
vector.lowerTimeStamps[0] = -82L;
vector.lowerTimeStamps[1] = -100L;
assertThrows(IllegalArgumentException.class, () -> vector.scaleTime(-1, 1.0));
assertThrows(IllegalArgumentException.class, () -> vector.scaleTime(3, 1.0));
assertThrows(IllegalArgumentException.class, () -> vector.scaleTime(0, -1.0));
vector.scaleTime(0, 0.5);
assertArrayEquals(vector.timeStamps, new long[] { 50, 0 });
assertArrayEquals(vector.upperTimeStamps, new long[] { 60, 0 });
assertArrayEquals(vector.lowerTimeStamps, new long[] { -41, -100 });
}
@Test
public void testShift() {
vector.timeStamps[0] = 100L;
vector.upperTimeStamps[0] = 120L;
vector.lowerTimeStamps[0] = -82L;
vector.lowerTimeStamps[1] = -100L;
assertThrows(IllegalArgumentException.class, () -> vector.shiftTime(-1, 1L));
assertThrows(IllegalArgumentException.class, () -> vector.shiftTime(3, 1L));
vector.shiftTime(1, 13);
TimedRangeVector newVector = new TimedRangeVector(vector);
assertArrayEquals(newVector.timeStamps, new long[] { 100, 13 });
assertArrayEquals(newVector.upperTimeStamps, new long[] { 120, 13 });
assertArrayEquals(newVector.lowerTimeStamps, new long[] { -82, -87 });
newVector.shiftTime(1, -130);
assertArrayEquals(vector.timeStamps, new long[] { 100, 13 });
assertArrayEquals(vector.upperTimeStamps, new long[] { 120, 13 });
assertArrayEquals(vector.lowerTimeStamps, new long[] { -82, -87 });
assertThrows(IllegalArgumentException.class,
() -> new TimedRangeVector(new RangeVector(4), newVector.timeStamps, new long[2], new long[2]));
assertThrows(IllegalArgumentException.class, () -> new TimedRangeVector(new RangeVector(4),
newVector.timeStamps, new long[] { 101L, 0L }, new long[2]));
TimedRangeVector another = new TimedRangeVector(new RangeVector(4), newVector.timeStamps,
new long[] { 101L, 0L }, newVector.lowerTimeStamps);
assertArrayEquals(another.timeStamps, new long[] { 100, -117 });
assertArrayEquals(another.upperTimeStamps, new long[] { 101, 0 });
assertArrayEquals(another.lowerTimeStamps, new long[] { -82, -217 });
}
}
| 574 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/preprocessor/PreprocessorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.preprocessor;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertThrows;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.TransformMethod;
public class PreprocessorTest {
@Test
void constructorTest() {
assertThrows(IllegalArgumentException.class, () -> {
new Preprocessor.Builder<>().transformMethod(null).build();
});
assertThrows(IllegalArgumentException.class, () -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).forestMode(null).build();
});
assertThrows(IllegalArgumentException.class, () -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).forestMode(ForestMode.STANDARD).build();
});
assertThrows(IllegalArgumentException.class, () -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).forestMode(ForestMode.STANDARD)
.inputLength(10).build();
});
assertThrows(IllegalArgumentException.class, () -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).forestMode(ForestMode.STANDARD)
.inputLength(10).dimensions(12).build();
});
assertDoesNotThrow(() -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).forestMode(ForestMode.STANDARD)
.inputLength(12).dimensions(12).build();
});
assertThrows(IllegalArgumentException.class, () -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).forestMode(ForestMode.TIME_AUGMENTED)
.inputLength(12).dimensions(12).build();
});
assertDoesNotThrow(() -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).forestMode(ForestMode.TIME_AUGMENTED)
.inputLength(12).dimensions(13).build();
});
assertThrows(IllegalArgumentException.class, () -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).shingleSize(2)
.forestMode(ForestMode.TIME_AUGMENTED).inputLength(12).dimensions(13).build();
});
assertThrows(IllegalArgumentException.class, () -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).shingleSize(2)
.forestMode(ForestMode.TIME_AUGMENTED).inputLength(12).dimensions(14).build();
});
assertDoesNotThrow(() -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).shingleSize(2)
.forestMode(ForestMode.TIME_AUGMENTED).inputLength(6).dimensions(14).build();
});
assertThrows(IllegalArgumentException.class, () -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).shingleSize(-2)
.forestMode(ForestMode.TIME_AUGMENTED).inputLength(6).dimensions(14).build();
});
assertDoesNotThrow(() -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).shingleSize(2).normalizeTime(true)
.forestMode(ForestMode.TIME_AUGMENTED).inputLength(6).dimensions(14).build();
});
// external shingling in STANDARD mode
assertDoesNotThrow(() -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).shingleSize(2)
.forestMode(ForestMode.TIME_AUGMENTED).inputLength(6).dimensions(14).build();
});
// internal shingling
assertDoesNotThrow(() -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).shingleSize(2)
.forestMode(ForestMode.STANDARD).inputLength(6).dimensions(12).build();
});
assertThrows(IllegalArgumentException.class, () -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).shingleSize(2)
.forestMode(ForestMode.STANDARD).weights(new double[1]).inputLength(6).dimensions(12).build();
});
assertThrows(IllegalArgumentException.class, () -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).shingleSize(2)
.forestMode(ForestMode.STANDARD).weights(new double[2]).inputLength(6).dimensions(12).build();
});
assertThrows(IllegalArgumentException.class, () -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).shingleSize(2)
.forestMode(ForestMode.STANDARD).weights(new double[] { 1.0, 1.0 }).inputLength(6).dimensions(12)
.build();
});
assertThrows(IllegalArgumentException.class, () -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).forestMode(ForestMode.STANDARD)
.inputLength(6).dimensions(12).build();
});
assertDoesNotThrow(() -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).shingleSize(2).normalizeTime(true)
.forestMode(ForestMode.STANDARD).inputLength(6).dimensions(12).build();
});
assertThrows(IllegalArgumentException.class, () -> {
new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE).shingleSize(2)
.forestMode(ForestMode.STANDARD).inputLength(5).dimensions(12).build();
});
}
}
| 575 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/preprocessor | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/preprocessor/transform/WeightedTransformerTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.preprocessor.transform;
import static com.amazon.randomcutforest.parkservices.preprocessor.transform.WeightedTransformer.NUMBER_OF_STATS;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
import com.amazon.randomcutforest.returntypes.RangeVector;
public class WeightedTransformerTest {
public void checkTransformer(WeightedTransformer w, double value, double another) {
w.setWeights(new double[1]);
assertEquals(w.invert(new double[] { 1.0 }, new double[] { 2.0 })[0], value, 1e-6);
assertEquals(w.getScale()[0], 0, 1e-6);
RangeVector r = new RangeVector(1);
r.shift(0, 10);
assertEquals(r.values[0], 10, 1e-6);
assertEquals(r.upper[0], 10, 1e-6);
assertEquals(r.lower[0], 10, 1e-6);
assertThrows(IllegalArgumentException.class,
() -> w.invertForecastRange(r, 1, new double[] { 1.0 }, new double[0]));
w.invertForecastRange(r, 1, new double[] { 1.0 }, new double[1]);
assertEquals(r.values[0], another, 1e-6);
assertEquals(r.upper[0], another, 1e-6);
assertEquals(r.lower[0], another, 1e-6);
}
@Test
void constructorTest() {
assertThrows(IllegalArgumentException.class, () -> new WeightedTransformer(new double[2], new Deviation[5]));
assertThrows(IllegalArgumentException.class,
() -> new WeightedTransformer(new double[2], new Deviation[2 * NUMBER_OF_STATS]));
Deviation[] deviations = new Deviation[NUMBER_OF_STATS];
for (int i = 0; i < NUMBER_OF_STATS; i++) {
deviations[i] = new Deviation(0);
}
WeightedTransformer w = new WeightedTransformer(new double[1], deviations);
assertThrows(IllegalArgumentException.class, () -> w.setWeights(new double[2]));
checkTransformer(w, 0, 0);
checkTransformer(new NormalizedDifferenceTransformer(new double[1], deviations), 2.0, 1.0);
checkTransformer(new DifferenceTransformer(new double[1], deviations), 2.0, 1.0);
}
@Test
void updateDeviationsTest() {
Deviation[] deviations = new Deviation[2 * NUMBER_OF_STATS];
for (int y = 0; y < deviations.length; y++) {
deviations[y] = new Deviation(0);
}
WeightedTransformer transformer = new WeightedTransformer(new double[2], deviations);
assertThrows(IllegalArgumentException.class, () -> transformer.updateDeviation(new double[1], new double[1]));
assertThrows(IllegalArgumentException.class, () -> transformer.updateDeviation(new double[2], new double[1]));
assertDoesNotThrow(() -> transformer.updateDeviation(new double[2], new double[2]));
}
@Test
void normalizeTest() {
Deviation[] deviations = new Deviation[2 * NUMBER_OF_STATS];
for (int y = 0; y < deviations.length; y++) {
deviations[y] = new Deviation(0);
}
WeightedTransformer transformer = new WeightedTransformer(new double[2], deviations);
assertThrows(IllegalArgumentException.class, () -> transformer.normalize(10, 5, 0, 10));
assertTrue(transformer.normalize(10, 5, 0.5, 9) == 9);
assertTrue(transformer.normalize(-10, -5, 0.5, 9) == -9);
}
}
| 576 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/PredictorCorrector.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.toDoubleArray;
import static com.amazon.randomcutforest.CommonUtils.toFloatArray;
import static com.amazon.randomcutforest.config.CorrectionMode.CONDITIONAL_FORECAST;
import static com.amazon.randomcutforest.config.CorrectionMode.DATA_DRIFT;
import static com.amazon.randomcutforest.config.CorrectionMode.NONE;
import static com.amazon.randomcutforest.parkservices.preprocessor.Preprocessor.DEFAULT_NORMALIZATION_PRECISION;
import static java.lang.Math.exp;
import static java.lang.Math.max;
import static java.lang.Math.min;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.CorrectionMode;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.ScoringStrategy;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
import com.amazon.randomcutforest.parkservices.threshold.BasicThresholder;
import com.amazon.randomcutforest.returntypes.DiVector;
import com.amazon.randomcutforest.returntypes.Neighbor;
import com.amazon.randomcutforest.util.Weighted;
/**
* This class provides a combined RCF and thresholder, both of which operate in
* a streaming manner and respect the arrow of time.
*/
public class PredictorCorrector {
private static double DEFAULT_DIFFERENTIAL_FACTOR = 0.3;
public static int DEFAULT_NUMBER_OF_MAX_ATTRIBUTORS = 5;
public static double DEFAULT_NOISE_SUPPRESSION_FACTOR = 1.0;
public static double DEFAULT_MULTI_MODE_SAMPLING_RATE = 0.1;
public static double DEFAULT_SAMPLING_SUPPORT = 0.1;
public static int DEFAULT_RUN_ALLOWED = 2;
// the above will trigger on the 4th occurrence, because the first is not
// counted in the run
protected static int NUMBER_OF_MODES = 2;
protected final static int EXPECTED_INVERSE_DEPTH_INDEX = 0;
protected final static int DISTANCE_INDEX = 1;
// the following vectors enable suppression of anomalies
// the first pair correspond to additive differences
// the second pair correspond to multiplicative differences
// which are not meaningful for differenced operations
double[] ignoreNearExpectedFromBelow;
double[] ignoreNearExpectedFromAbove;
double[] ignoreNearExpectedFromBelowByRatio;
double[] ignoreNearExpectedFromAboveByRatio;
// for anomaly description we would only look at these many top attributors
// AExpected value is not well-defined when this number is greater than 1
// that being said there is no formal restriction other than the fact that the
// answers would be error prone as this parameter is raised.
protected int numberOfAttributors = DEFAULT_NUMBER_OF_MAX_ATTRIBUTORS;
protected double[] lastScore = new double[NUMBER_OF_MODES];
protected ScoringStrategy lastStrategy = ScoringStrategy.EXPECTED_INVERSE_DEPTH;
protected BasicThresholder[] thresholders;
protected int baseDimension;
protected long randomSeed;
protected double[] modeInformation;
protected Deviation[] deviationsActual;
protected Deviation[] deviationsExpected;
protected double samplingRate = DEFAULT_MULTI_MODE_SAMPLING_RATE;
protected double noiseFactor = DEFAULT_NOISE_SUPPRESSION_FACTOR;
protected boolean autoAdjust = false;
protected RCFComputeDescriptor lastDescriptor;
protected int runLength;
protected boolean ignoreDrift = false;
protected double samplingSupport = DEFAULT_SAMPLING_SUPPORT;
public PredictorCorrector(double timeDecay, double anomalyRate, boolean adjust, int baseDimension,
long randomSeed) {
this.thresholders = new BasicThresholder[NUMBER_OF_MODES];
thresholders[0] = new BasicThresholder(timeDecay, anomalyRate, adjust);
thresholders[1] = new BasicThresholder(timeDecay);
this.baseDimension = baseDimension;
this.randomSeed = randomSeed;
this.autoAdjust = adjust;
if (adjust) {
this.deviationsActual = new Deviation[baseDimension];
this.deviationsExpected = new Deviation[baseDimension];
for (int i = 0; i < baseDimension; i++) {
this.deviationsActual[i] = new Deviation(timeDecay);
this.deviationsExpected[i] = new Deviation(timeDecay);
}
}
ignoreNearExpectedFromAbove = new double[baseDimension];
ignoreNearExpectedFromBelow = new double[baseDimension];
ignoreNearExpectedFromAboveByRatio = new double[baseDimension];
ignoreNearExpectedFromBelowByRatio = new double[baseDimension];
}
// for mappers
public PredictorCorrector(BasicThresholder[] thresholders, Deviation[] deviations, int baseDimension,
long randomSeed) {
checkArgument(thresholders.length > 0, " cannot be empty");
checkArgument(deviations == null || deviations.length == 2 * baseDimension, "incorrect state");
this.thresholders = new BasicThresholder[NUMBER_OF_MODES];
int size = min(thresholders.length, NUMBER_OF_MODES);
for (int i = 0; i < size; i++) {
this.thresholders[i] = thresholders[i];
}
for (int i = size; i < NUMBER_OF_MODES; i++) {
this.thresholders[i] = new BasicThresholder(thresholders[0].getPrimaryDeviation().getDiscount());
}
this.deviationsActual = new Deviation[baseDimension];
this.deviationsExpected = new Deviation[baseDimension];
if (deviations != null) {
for (int i = 0; i < baseDimension; i++) {
deviationsActual[i] = deviations[i];
}
for (int i = 0; i < baseDimension; i++) {
deviationsExpected[i] = deviations[i + baseDimension];
}
}
this.baseDimension = baseDimension;
this.randomSeed = randomSeed;
ignoreNearExpectedFromAbove = new double[baseDimension];
ignoreNearExpectedFromBelow = new double[baseDimension];
ignoreNearExpectedFromAboveByRatio = new double[baseDimension];
ignoreNearExpectedFromBelowByRatio = new double[baseDimension];
}
public PredictorCorrector(BasicThresholder thresholder, int baseDimension) {
this(new BasicThresholder[] { thresholder }, null, baseDimension, 0L);
}
protected double nextDouble() {
Random random = new Random(randomSeed);
randomSeed = random.nextLong();
return random.nextDouble();
}
/**
* uses the attribution information to find the time slice which contributed
* most to the anomaly note that the basic length of the vectors is shingleSize
* * basDimension; the startIndex corresponds to the shingle entry beyond which
* the search is performed. if two anomalies are in a shingle it would focus on
* later one, the previous one would have been (hopefully) reported earlier.
*
* @param diVector attribution of current shingle
* @param baseDimension number of attributes/variables in original data
* @param startIndex time slice of the farthest in the past we are looking
* @return the index (in this shingle) which has the largest contributions
*/
protected int maxContribution(DiVector diVector, int baseDimension, int startIndex) {
double val = 0;
int index = startIndex;
int position = diVector.getDimensions() + startIndex * baseDimension;
for (int i = 0; i < baseDimension; i++) {
val += diVector.getHighLowSum(i + position);
}
for (int i = position + baseDimension; i < diVector.getDimensions(); i += baseDimension) {
double sum = 0;
for (int j = 0; j < baseDimension; j++) {
sum += diVector.getHighLowSum(i + j);
}
if (sum > val) {
val = sum;
index = (i - diVector.getDimensions()) / baseDimension;
}
}
return index;
}
/**
* the following creates the expected point based on RCF forecasting
*
* @param diVector the attribution vector that is used to choose which
* elements are to be predicted
* @param position the block of (multivariate) elements we are focusing on
* @param baseDimension the base dimension of the block
* @param point the point near which we wish to predict
* @param forest the resident RCF
* @return a vector that is most likely, conditioned on changing a few elements
* in the block at position
*/
protected float[] getExpectedPoint(DiVector diVector, int position, int baseDimension, float[] point,
RandomCutForest forest) {
int[] likelyMissingIndices;
if (baseDimension == 1) {
likelyMissingIndices = new int[] { position };
} else {
double sum = 0;
double[] values = new double[baseDimension];
for (int i = 0; i < baseDimension; i++) {
sum += values[i] = diVector.getHighLowSum(i + position);
}
Arrays.sort(values);
int pick = 1;
if (values[baseDimension - pick] < 0.1 * sum) {
// largest contributor is only 10 percent; there are too many to predict
return null;
}
double threshold = min(0.1 * sum, 0.1);
while (pick < baseDimension && values[baseDimension - pick - 1] >= threshold) {
++pick;
}
if (pick > numberOfAttributors) {
// we chose everything; not usable
return null;
}
double cutoff = values[baseDimension - pick];
likelyMissingIndices = new int[pick];
int count = 0;
for (int i = 0; i < baseDimension && count < pick; i++) {
if (diVector.getHighLowSum(i + position) >= cutoff
&& (count == 0 || diVector.getHighLowSum(i + position) > sum * 0.1)) {
likelyMissingIndices[count++] = position + i;
}
}
}
if (likelyMissingIndices.length > 0.5 * forest.getDimensions()) {
return null;
} else {
return forest.imputeMissingValues(point, likelyMissingIndices.length, likelyMissingIndices);
}
}
/**
* a subroutine that helps eliminates flagging anomalies too close to a
* previously flagged anomaly -- this avoids the repetition due to shingling;
* but still can detect some anomalies if the deviations are usual
*
* @param candidate the candidate attribution of the point
* @param difference the gap (in RCF space) from the last anomaly
* @param baseDimension the size of a block
* @param ideal an idealized version of the candidate (can be
* null) where the most offending elements are
* imputed out
* @param lastAnomalyDescriptor the description of the last anomaly
* @param workingThreshold the threshold to exceed
* @return true if the candidate is sufficiently different and false otherwise
*/
protected boolean trigger(DiVector candidate, int difference, int baseDimension, DiVector ideal,
RCFComputeDescriptor lastAnomalyDescriptor, double workingThreshold) {
int dimensions = candidate.getDimensions();
if (difference >= dimensions || ideal == null) {
return true;
}
double lastAnomalyScore = lastAnomalyDescriptor.getRCFScore();
double differentialRemainder = 0;
for (int i = dimensions - difference; i < dimensions; i++) {
differentialRemainder += Math.abs(candidate.low[i] - ideal.low[i])
+ Math.abs(candidate.high[i] - ideal.high[i]);
}
return (differentialRemainder > DEFAULT_DIFFERENTIAL_FACTOR * lastAnomalyScore)
&& differentialRemainder * dimensions / difference > 1.2 * workingThreshold;
}
/**
* corrects the effect of a last anomaly -- note that an anomaly by definition
* will alter the shift and scale of transformations. This computation fixes one
* single large anomaly.
*
* @param transformMethod the transformation method used
* @param gap the number of steps the anomaly occurred in the
* past
* @param lastAnomalyDescriptor the descriptor of the last anomaly
* @param currentScale the current scale
* @return a correction vector
*/
public double[] getCorrectionOfLastAnomaly(TransformMethod transformMethod, int gap,
RCFComputeDescriptor lastAnomalyDescriptor, double[] currentScale) {
double[] deltaShift = lastAnomalyDescriptor.getDeltaShift();
double[] answer = new double[currentScale.length];
// correct the effect of shifts in last observed anomaly because the anomaly may
// have skewed the shift and scale
if (deltaShift != null
&& (transformMethod == TransformMethod.NORMALIZE || transformMethod == TransformMethod.SUBTRACT_MA)) {
double factor = exp(-gap * lastAnomalyDescriptor.getTransformDecay());
for (int y = 0; y < answer.length; y++) {
answer[y] = (currentScale[y] == 0) ? 0 : deltaShift[y] * factor / currentScale[y];
}
}
return answer;
}
/**
* a first stage corrector that attempts to fix the after effects of a previous
* anomaly which may be in the shingle, or just preceding the shingle
*
* @param point the current (transformed) point under evaluation
* @param gap the relative position of the previous anomaly
* being corrected
* @param shingleSize size of the shingle
* @param baseDimensions number of dimensions in each shingle
* @param currentScale scale for current point
* @param transformMethod transformation Method
* @param lastAnomalyDescriptor description of the last anomaly
* @return the corrected point
*/
protected <P extends AnomalyDescriptor> float[] applyPastCorrector(float[] point, int gap, int shingleSize,
int baseDimensions, double[] currentScale, TransformMethod transformMethod,
RCFComputeDescriptor lastAnomalyDescriptor) {
float[] correctedPoint = Arrays.copyOf(point, point.length);
// following will fail for first 100ish points and if dimension < 3
if (lastAnomalyDescriptor.getExpectedRCFPoint() != null) {
float[] lastExpectedPoint = toFloatArray(lastAnomalyDescriptor.getExpectedRCFPoint());
double[] lastAnomalyPoint = lastAnomalyDescriptor.getRCFPoint();
int lastRelativeIndex = lastAnomalyDescriptor.getRelativeIndex();
// the following will fail for shingleSize 1
if (gap < shingleSize) {
System.arraycopy(lastExpectedPoint, gap * baseDimensions, correctedPoint, 0,
point.length - gap * baseDimensions);
}
if (gap <= shingleSize && lastRelativeIndex == 0) {
if (transformMethod == TransformMethod.DIFFERENCE
|| transformMethod == TransformMethod.NORMALIZE_DIFFERENCE) {
for (int y = 0; y < baseDimensions; y++) {
correctedPoint[point.length - gap * baseDimensions
+ y] += lastAnomalyPoint[point.length - baseDimensions + y]
- lastExpectedPoint[point.length - baseDimensions + y];
}
}
if (lastAnomalyDescriptor.getForestMode() == ForestMode.TIME_AUGMENTED) {
// definitely correct the time dimension which is always differenced
// this applies to the non-differenced cases
correctedPoint[point.length - (gap - 1) * baseDimensions - 1] += lastAnomalyPoint[point.length - 1]
- lastExpectedPoint[point.length - 1];
}
}
}
double[] correctionVector = getCorrectionOfLastAnomaly(transformMethod, gap, lastAnomalyDescriptor,
currentScale);
int number = min(gap, shingleSize);
for (int y = 0; y < baseDimensions; y++) {
for (int j = 0; j < number; j++) {
correctedPoint[point.length - (number - j) * baseDimensions + y] += correctionVector[y];
}
}
return correctedPoint;
}
/**
* The following verifies that the overall shingled point is not explainable by
* floating point precision. It then verifies that the point is not within
* noiseFactor of the standard deviation of the successive differences (in the
* multivariate setting). Finally, it caps the maximum grade possible for this
* point
*
* @param result the transcript of the current point
* @param point the current point
* @param <P> Either AnomalyDescriptor of ForecastDescriptor
* @return a cap on the grade (can be 0 for filtering out)
*/
protected <P extends AnomalyDescriptor> double centeredTransformPass(P result, float[] point) {
double maxFactor = 0;
// check entire point or some large value
double[] scale = result.getScale();
double[] shift = result.getShift();
double[] deviations = result.getDeviations();
for (int i = 0; i < point.length && maxFactor == 0; i++) {
double scaleFactor = (scale == null) ? 1.0 : scale[i % baseDimension];
double shiftBase = (shift == null) ? 0 : shift[i % baseDimension];
if (Math.abs(point[i]) * scaleFactor > DEFAULT_NORMALIZATION_PRECISION * (1 + Math.abs(shiftBase))) {
maxFactor = 1;
}
}
// check most recent input
if (maxFactor > 0) {
for (int i = 0; i < baseDimension; i++) {
double scaleFactor = (scale == null) ? 1.0 : Math.abs(scale[i]);
double z = Math.abs(point[point.length - baseDimension + i]) * scaleFactor;
double deviation = (deviations == null) ? 0 : Math.abs(deviations[i + baseDimension]);
if (z > noiseFactor * deviation) {
maxFactor = (deviation == 0) ? 1 : min(1.0, max(maxFactor, z / (3 * deviation)));
}
}
}
return maxFactor;
}
/**
* The following is useful for managing late detection of anomalies -- this
* calculates the zig-zag over the values in the late detection
*
* @param point the point being scored
* @param startPosition the position of the block where we think the anomaly
* started
* @param index the specific index in the block being tracked
* @param baseDimension the size of the block
* @param differenced has differencing been performed already
* @return the average L1 deviation
*/
double calculatePathDeviation(float[] point, int startPosition, int index, int baseDimension, boolean differenced) {
int position = startPosition;
double variation = 0;
int observation = 0;
while (position + index + baseDimension < point.length) {
variation += (differenced) ? Math.abs(point[position + index])
: Math.abs(point[position + index] - point[position + baseDimension + index]);
position += baseDimension;
++observation;
}
return (observation == 0) ? 0 : variation / observation;
}
protected <P extends AnomalyDescriptor> DiVector constructUncertaintyBox(float[] point, int startPosition,
P result) {
TransformMethod method = result.getTransformMethod();
boolean differenced = (method == TransformMethod.DIFFERENCE)
|| (method == TransformMethod.NORMALIZE_DIFFERENCE);
double[] scale = result.getScale();
double[] shift = result.getShift();
int baseDimensions = result.getDimension() / result.getShingleSize();
double[] gapLow = new double[baseDimensions];
double[] gapHigh = new double[baseDimensions];
for (int y = 0; y < baseDimensions; y++) {
double a = scale[y] * point[startPosition + y];
double shiftBase = shift[y];
double shiftAmount = 0;
if (shiftBase != 0) {
shiftAmount += DEFAULT_NORMALIZATION_PRECISION * (scale[y] + Math.abs(shiftBase));
}
double pathGap = calculatePathDeviation(point, startPosition, y, baseDimension, differenced);
double noiseGap = noiseFactor * result.getDeviations()[baseDimension + y];
double gap = max(scale[y] * pathGap, noiseGap) + shiftAmount + DEFAULT_NORMALIZATION_PRECISION;
gapLow[y] = max(max(ignoreNearExpectedFromBelow[y], ignoreNearExpectedFromBelowByRatio[y] * Math.abs(a)),
gap);
gapHigh[y] = max(max(ignoreNearExpectedFromAbove[y], ignoreNearExpectedFromAboveByRatio[y] * Math.abs(a)),
gap);
}
return new DiVector(gapHigh, gapLow);
}
protected boolean withinGap(DiVector gap, int startPosition, double[] scale, float[] point, float[] otherPoint,
int baseDimension) {
boolean answer = false;
// only for input dimensions, for which scale is defined currently
for (int y = 0; y < baseDimension && !answer; y++) {
double a = scale[y] * point[startPosition + y];
double b = scale[y] * otherPoint[startPosition + y];
boolean lower = (a < b - gap.low[y]);
boolean upper = (a > b + gap.high[y]);
answer = lower || upper;
}
return !answer;
}
/**
* uses the native approximate near neighbor in RCF to determine what fraction
* of samples from different trees are in the uncertainty box around the queried
* point
*
* @param undertaintyBox the potentially asymmetric box around a point
* @param point the point in question
* @param correctedPoint any correction applied to the point based on prior
* anomalies
* @param startPosition the potential location of the anomaly
* @param result the transcript of the current estimation
* @param forest the resident RCF
* @param <P> an extension of AnomalyDescriptor (to support forecast)
* @return true if there is enough mass within the box
*/
protected <P extends AnomalyDescriptor> boolean explainedByConditionalField(DiVector undertaintyBox, float[] point,
float[] correctedPoint, int startPosition, P result, RandomCutForest forest) {
List<Neighbor> list = forest.getNearNeighborsInSample(correctedPoint);
double weight = 0;
for (Neighbor e : list) {
if (withinGap(undertaintyBox, startPosition, result.getScale(), point, e.point,
point.length / result.getShingleSize())) {
weight += e.count;
}
}
return (weight >= samplingSupport * forest.getNumberOfTrees());
}
/**
* populates the scores and sets the score and attribution vectors; note some
* attributions can remain null (for efficiency reasons)
*
* @param strategy the scoring strategy
* @param point the current point being evaluated
* @param forest the resident RCF
* @param scoreVector the vector of scores
* @param attributionVector the vector of attributions
* @return the index of the score/attribution that is relevant
*/
protected int populateScores(ScoringStrategy strategy, float[] point, RandomCutForest forest, double[] scoreVector,
DiVector[] attributionVector) {
if (strategy != ScoringStrategy.DISTANCE) {
scoreVector[EXPECTED_INVERSE_DEPTH_INDEX] = forest.getAnomalyScore(point);
if (strategy == ScoringStrategy.MULTI_MODE || strategy == ScoringStrategy.MULTI_MODE_RECALL) {
attributionVector[DISTANCE_INDEX] = forest.getSimpleDensity(point).distances;
scoreVector[DISTANCE_INDEX] = attributionVector[DISTANCE_INDEX].getHighLowSum();
}
return 0;
} else {
attributionVector[DISTANCE_INDEX] = forest.getSimpleDensity(point).distances;
scoreVector[DISTANCE_INDEX] = attributionVector[DISTANCE_INDEX].getHighLowSum();
return 1;
}
}
/**
* returned the attribution vector; it tries to reuse cached version to save
* computation
*
* @param choice the mode of the attribution in question
* @param point the point being considered
* @param attributionVector the vector (cached) of attributions
* @param forest the resident RCF
* @return the attribution correspond to the mode of attribution
*/
DiVector getCachedAttribution(int choice, float[] point, DiVector[] attributionVector, RandomCutForest forest) {
if (attributionVector[choice] == null) {
checkArgument(choice == EXPECTED_INVERSE_DEPTH_INDEX, "incorrect cached state of scores");
attributionVector[EXPECTED_INVERSE_DEPTH_INDEX] = forest.getAnomalyAttribution(point);
}
return attributionVector[choice];
}
/**
* computes the attribution of a (candidate) point based on mode, when the
* results are not expected to be cached
*
* @param choice the mode
* @param point the point in question
* @param forest the resident RCF
* @return the attribution of that mode
*/
DiVector getNewAttribution(int choice, float[] point, RandomCutForest forest) {
if (choice == EXPECTED_INVERSE_DEPTH_INDEX) {
return forest.getAnomalyAttribution(point);
} else {
return forest.getSimpleDensity(point).distances;
}
}
/**
* same as getNewAttribution, except when just the score suffices
*
* @param choice the mode in question
* @param point the point in question
* @param forest the resident RCF
* @return the score corresponding to the mode
*/
double getNewScore(int choice, float[] point, RandomCutForest forest) {
if (choice == EXPECTED_INVERSE_DEPTH_INDEX) {
return forest.getAnomalyScore(point);
} else {
return forest.getSimpleDensity(point).distances.getHighLowSum();
}
}
/**
* returns the threshold and grade corresponding to a mode choice (based on
* scoring strategy) currently the scoring strategy is unused, but would likely
* be used in future
*
* @param strategy the scoring strategy
* @param choice the chosen mode
* @param scoreVector the vector of scores
* @param method the transformation method used
* @param dimension the number of dimensions in RCF (used in auto adjustment
* of thresholds)
* @param shingleSize the shingle size (used in auto adjustment of thresholds)
* @return a weighted object where the index is the threshold and the weight is
* the grade
*/
protected Weighted<Double> getThresholdAndGrade(ScoringStrategy strategy, int choice, double[] scoreVector,
TransformMethod method, int dimension, int shingleSize) {
if (choice == EXPECTED_INVERSE_DEPTH_INDEX) {
return thresholders[EXPECTED_INVERSE_DEPTH_INDEX]
.getThresholdAndGrade(scoreVector[EXPECTED_INVERSE_DEPTH_INDEX], method, dimension, shingleSize);
} else {
return thresholders[DISTANCE_INDEX].getPrimaryThresholdAndGrade(scoreVector[DISTANCE_INDEX]);
}
}
/**
* the strategy to save scores based on the scoring strategy
*
* @param strategy the strategy
* @param choice the mode for which corrected score applies
* @param scoreVector the vector of scores
* @param correctedScore the estimated score with corrections (can be the same
* as score)
* @param method the transformation method used
* @param shingleSize the shingle size
*/
protected void saveScores(ScoringStrategy strategy, int choice, double[] scoreVector, double correctedScore,
TransformMethod method, int shingleSize) {
if (scoreVector[EXPECTED_INVERSE_DEPTH_INDEX] > 0) {
double temp = (choice == EXPECTED_INVERSE_DEPTH_INDEX) ? correctedScore
: scoreVector[EXPECTED_INVERSE_DEPTH_INDEX];
double last = (strategy == lastStrategy) ? lastScore[EXPECTED_INVERSE_DEPTH_INDEX] : 0;
thresholders[EXPECTED_INVERSE_DEPTH_INDEX].update(scoreVector[EXPECTED_INVERSE_DEPTH_INDEX], temp, last,
method);
}
if (scoreVector[DISTANCE_INDEX] > 0) {
thresholders[DISTANCE_INDEX].update(scoreVector[DISTANCE_INDEX], lastScore[DISTANCE_INDEX]);
}
if (shingleSize > 1) {
for (int i = 0; i < NUMBER_OF_MODES; i++) {
lastScore[i] = scoreVector[i];
}
}
}
/**
* the core of the predictor-corrector thresholding for shingled data points. It
* uses a simple threshold provided by the basic thresholder. It first checks if
* obvious effects of the present; and absent such, for repeated breaches, how
* critical is the new current information
*
* @param result returns the augmented description
* @param lastSignificantDescriptor state of the computation for the last
* candidate anomaly
* @param forest the resident RCF
* @return the anomaly descriptor result (which has plausibly mutated)
*/
protected <P extends AnomalyDescriptor> P detect(P result, RCFComputeDescriptor lastSignificantDescriptor,
RandomCutForest forest) {
if (result.getRCFPoint() == null) {
return result;
}
float[] point = toFloatArray(result.getRCFPoint());
ScoringStrategy strategy = result.getScoringStrategy();
double[] scoreVector = new double[NUMBER_OF_MODES];
DiVector[] attributionVector = new DiVector[NUMBER_OF_MODES];
final int originalChoice = populateScores(strategy, point, forest, scoreVector, attributionVector);
DiVector attribution = null;
final double score = scoreVector[originalChoice];
// we will not alter the basic score from RCF under any circumstance
result.setRCFScore(score);
// we will not have zero scores affect any thresholding
if (score == 0) {
return result;
}
long internalTimeStamp = result.getInternalTimeStamp();
int shingleSize = result.getShingleSize();
Weighted<Double> thresholdAndGrade = getThresholdAndGrade(strategy, originalChoice, scoreVector,
result.transformMethod, point.length, shingleSize);
final double originalThreshold = thresholdAndGrade.index;
double workingThreshold = originalThreshold;
double workingGrade = thresholdAndGrade.weight;
// we will not alter this
result.setThreshold(originalThreshold);
boolean candidate = false;
if (workingGrade > 0 && lastDescriptor != null) {
if (score > lastDescriptor.getRCFScore()
|| lastDescriptor.getRCFScore() - lastDescriptor.getThreshold() > score
- max(workingThreshold, lastDescriptor.getThreshold())
* (1 + max(0.2, runLength / (2.0 * max(10, shingleSize))))) {
// the 'run' or the sequence of observations that create large scores
// because of data (concept?) drift is defined to increase permissively
// so that it is clear when the threshold is above the scores
// a consequence of this can be masking -- anomalies just after a run/drift
// would be difficult to determine -- but those should be difficult to determine
candidate = true;
}
}
if (workingGrade > 0 && strategy == ScoringStrategy.MULTI_MODE) {
Weighted<Double> temp = thresholders[DISTANCE_INDEX]
.getPrimaryThresholdAndGrade(scoreVector[DISTANCE_INDEX]);
if (temp.index > 0 && temp.weight == 0) {
// there is a valid threshold and the grade is 0
workingGrade = 0;
result.setCorrectionMode(CorrectionMode.MULTI_MODE);
}
}
if (lastDescriptor != null && lastDescriptor.getExpectedRCFPoint() != null) {
lastSignificantDescriptor = lastDescriptor;
}
int gap = (int) (internalTimeStamp - lastSignificantDescriptor.getInternalTimeStamp());
int difference = gap * baseDimension;
float[] correctedPoint = null;
double correctedScore = score;
float[] expectedPoint = null;
boolean inHighScoreRegion = false;
int index = 0;
int relative = (gap >= shingleSize) ? -shingleSize : -gap;
int choice = originalChoice;
if (strategy == ScoringStrategy.MULTI_MODE_RECALL && workingGrade == 0 && gap >= shingleSize) {
// if overlapping shingles are being ruled out, then reconsidering those may not
// be useful
Weighted<Double> temp = thresholders[DISTANCE_INDEX]
.getPrimaryThresholdAndGrade(scoreVector[DISTANCE_INDEX]);
choice = DISTANCE_INDEX;
correctedScore = scoreVector[DISTANCE_INDEX];
workingGrade = temp.weight;
workingThreshold = temp.index;
}
// we perform basic correction
correctedPoint = applyPastCorrector(point, gap, shingleSize, point.length / shingleSize, result.getScale(),
result.getTransformMethod(), lastSignificantDescriptor);
/**
* we check if the point is too close to 0 for centered transforms as well as
* explainable by the default distribution of differences this acts as a filter
* and an upper bound for the grade
*/
if (workingGrade > 0) {
workingGrade *= centeredTransformPass(result, correctedPoint);
if (workingGrade == 0) {
result.setCorrectionMode(CorrectionMode.NOISE);
}
}
if (workingGrade > 0) {
inHighScoreRegion = true;
if (!Arrays.equals(correctedPoint, point)) {
attribution = getNewAttribution(choice, correctedPoint, forest);
correctedScore = attribution.getHighLowSum();
if (correctedScore > workingThreshold) {
int tempIndex = maxContribution(attribution, point.length / shingleSize, relative) + 1;
// use the additional new data for explanation
int tempStartPosition = point.length + (tempIndex - 1) * point.length / shingleSize;
float[] tempPoint = getExpectedPoint(attribution, tempStartPosition, point.length / shingleSize,
correctedPoint, forest);
if (tempPoint != null) {
DiVector tempAttribution = getNewAttribution(choice, tempPoint, forest);
correctedScore = tempAttribution.getHighLowSum();
if (!trigger(attribution, difference, point.length / shingleSize, tempAttribution,
lastSignificantDescriptor, workingThreshold)) {
workingGrade = 0;
result.setCorrectionMode(CorrectionMode.ANOMALY_IN_SHINGLE);
}
}
}
} else {
attribution = getCachedAttribution(choice, point, attributionVector, forest);
}
assert (workingGrade == 0 || attribution != null);
if (workingGrade > 0 && result.getScale() != null && result.getShift() != null) {
index = (shingleSize == 1) ? 0 : maxContribution(attribution, point.length / shingleSize, relative) + 1;
int startPosition = point.length + (index - 1) * point.length / shingleSize;
DiVector uncertaintyBox = constructUncertaintyBox(point, startPosition, result);
if (autoAdjust && explainedByConditionalField(uncertaintyBox, point, correctedPoint, startPosition,
result, forest)) {
workingGrade = 0;
result.setCorrectionMode(CONDITIONAL_FORECAST);
} else {
expectedPoint = getExpectedPoint(attribution, startPosition, point.length / shingleSize,
correctedPoint, forest);
if (expectedPoint != null) {
if (difference < point.length) {
DiVector newAttribution = getNewAttribution(choice, expectedPoint, forest);
correctedScore = newAttribution.getHighLowSum();
if (!trigger(attribution, difference, point.length / shingleSize, newAttribution,
lastSignificantDescriptor, workingThreshold)) {
workingGrade = 0;
result.setCorrectionMode(CorrectionMode.ANOMALY_IN_SHINGLE);
}
} else {
// attribution will not be used
correctedScore = getNewScore(choice, point, forest);
}
if (workingGrade > 0 && withinGap(uncertaintyBox, startPosition, result.getScale(), point,
expectedPoint, point.length / shingleSize)) {
workingGrade = 0;
result.setCorrectionMode(CorrectionMode.FORECAST);
}
}
}
}
if (workingGrade == 0) {
// note score is the original score
correctedScore = score;
}
}
if (candidate) {
if (ignoreDrift && workingGrade > 0) {
if (runLength > 0) {
result.setCorrectionMode(DATA_DRIFT);
workingGrade = 0;
}
}
if (autoAdjust) {
for (int y = 0; y < baseDimension; y++) {
deviationsActual[y].update(point[point.length - baseDimension + y]);
if (expectedPoint != null) {
deviationsExpected[y].update(expectedPoint[point.length - baseDimension + y]);
}
}
if (runLength > DEFAULT_RUN_ALLOWED && workingGrade > 0) {
boolean within = true;
for (int y = 0; y < baseDimension && within; y++) {
within = Math
.abs(deviationsActual[y].getMean() - point[point.length - baseDimension + y]) < max(
2 * deviationsActual[y].getDeviation(),
noiseFactor * result.getDeviations()[baseDimension + y]);
// estimation of noise from within the run as well as a long term estimation
if (expectedPoint != null) {
within = within && Math.abs(deviationsExpected[y].getMean()
- expectedPoint[point.length - baseDimension + y]) < 2
* max(deviationsExpected[y].getDeviation(),
deviationsActual[y].getDeviation())
+ 0.1 * Math.abs(
deviationsActual[y].getMean() - deviationsExpected[y].getMean());
// forecasts cannot be more accurate than actuals; and forecasting would
// not be exact
}
}
if (within) {
result.setCorrectionMode(DATA_DRIFT);
workingGrade = 0;
}
}
}
}
result.setAnomalyGrade(workingGrade);
result.setInHighScoreRegion(inHighScoreRegion);
if (workingGrade > 0) {
if (expectedPoint != null) {
result.setExpectedRCFPoint(toDoubleArray(expectedPoint));
}
attribution.renormalize(result.getRCFScore());
result.setStartOfAnomaly(true);
result.setAttribution(attribution);
result.setRelativeIndex(index);
++runLength;
} else if (result.getCorrectionMode() == NONE) {
runLength = 0;
if (autoAdjust) {
for (int y = 0; y < baseDimension; y++) {
deviationsActual[y].reset();
deviationsExpected[y].reset();
}
}
} else if (runLength > 0) {
// cannot start a run; but the run can be sustained
++runLength;
}
lastDescriptor = result.copyOf();
saveScores(strategy, choice, scoreVector, correctedScore, result.transformMethod, shingleSize);
return result;
}
public void setZfactor(double factor) {
for (int i = 0; i < thresholders.length; i++) {
thresholders[i].setZfactor(factor);
}
}
public void setAbsoluteThreshold(double lower) {
// only applies to thresholder 0
thresholders[EXPECTED_INVERSE_DEPTH_INDEX].setAbsoluteThreshold(lower);
}
public void setScoreDifferencing(double persistence) {
// only applies to thresholder 0
thresholders[EXPECTED_INVERSE_DEPTH_INDEX].setScoreDifferencing(persistence);
}
public void setInitialThreshold(double initial) {
// only applies to thresholder 0
thresholders[EXPECTED_INVERSE_DEPTH_INDEX].setInitialThreshold(initial);
}
public void setNumberOfAttributors(int numberOfAttributors) {
checkArgument(numberOfAttributors > 0, "cannot be negative");
this.numberOfAttributors = numberOfAttributors;
}
public int getNumberOfAttributors() {
return numberOfAttributors;
}
public double[] getLastScore() {
return lastScore;
}
public void setLastScore(double[] score) {
if (score != null) {
System.arraycopy(score, 0, lastScore, 0, min(NUMBER_OF_MODES, score.length));
}
}
void validateIgnore(double[] shift, int length) {
checkArgument(shift.length == length, () -> "has to be of length " + 4 * baseDimension);
for (double element : shift) {
checkArgument(element >= 0, "has to be non-negative");
}
}
public void setIgnoreNearExpectedFromAbove(double[] ignoreSimilarShift) {
if (ignoreSimilarShift != null) {
validateIgnore(ignoreSimilarShift, baseDimension);
System.arraycopy(ignoreSimilarShift, 0, ignoreNearExpectedFromAbove, 0, baseDimension);
}
}
public void setIgnoreNearExpectedFromBelow(double[] ignoreSimilarShift) {
if (ignoreSimilarShift != null) {
validateIgnore(ignoreSimilarShift, baseDimension);
System.arraycopy(ignoreSimilarShift, 0, ignoreNearExpectedFromBelow, 0, baseDimension);
}
}
public void setIgnoreNearExpectedFromAboveByRatio(double[] ignoreSimilarShift) {
if (ignoreSimilarShift != null) {
validateIgnore(ignoreSimilarShift, baseDimension);
System.arraycopy(ignoreSimilarShift, 0, ignoreNearExpectedFromAboveByRatio, 0, baseDimension);
}
}
public void setIgnoreNearExpectedFromBelowByRatio(double[] ignoreSimilarShift) {
if (ignoreSimilarShift != null) {
validateIgnore(ignoreSimilarShift, baseDimension);
System.arraycopy(ignoreSimilarShift, 0, ignoreNearExpectedFromBelowByRatio, 0, baseDimension);
}
}
// to be used for the state classes only
public void setIgnoreNearExpected(double[] ignoreSimilarShift) {
if (ignoreSimilarShift != null) {
validateIgnore(ignoreSimilarShift, 4 * baseDimension);
System.arraycopy(ignoreSimilarShift, 0, ignoreNearExpectedFromAbove, 0, baseDimension);
System.arraycopy(ignoreSimilarShift, baseDimension, ignoreNearExpectedFromBelow, 0, baseDimension);
System.arraycopy(ignoreSimilarShift, 2 * baseDimension, ignoreNearExpectedFromAboveByRatio, 0,
baseDimension);
System.arraycopy(ignoreSimilarShift, 3 * baseDimension, ignoreNearExpectedFromBelowByRatio, 0,
baseDimension);
}
}
public double[] getIgnoreNearExpected() {
double[] answer = new double[4 * baseDimension];
System.arraycopy(ignoreNearExpectedFromAbove, 0, answer, 0, baseDimension);
System.arraycopy(ignoreNearExpectedFromBelow, 0, answer, baseDimension, baseDimension);
System.arraycopy(ignoreNearExpectedFromAboveByRatio, 0, answer, 2 * baseDimension, baseDimension);
System.arraycopy(ignoreNearExpectedFromBelowByRatio, 0, answer, 3 * baseDimension, baseDimension);
return answer;
}
public long getRandomSeed() {
return randomSeed;
}
public BasicThresholder[] getThresholders() {
return thresholders;
}
public int getBaseDimension() {
return baseDimension;
}
public ScoringStrategy getLastStrategy() {
return lastStrategy;
}
public void setLastStrategy(ScoringStrategy strategy) {
this.lastStrategy = strategy;
}
public Deviation[] getDeviations() {
if (!autoAdjust) {
return null;
}
checkArgument(deviationsActual.length == deviationsExpected.length, "incorrect state");
checkArgument(deviationsActual.length == baseDimension, "length should be base dimension");
Deviation[] answer = new Deviation[2 * deviationsActual.length];
for (int i = 0; i < deviationsActual.length; i++) {
answer[i] = deviationsActual[i];
}
for (int i = 0; i < deviationsExpected.length; i++) {
answer[i + deviationsActual.length] = deviationsExpected[i];
}
return answer;
}
public double getSamplingRate() {
return samplingRate;
}
public void setSamplingRate(double samplingRate) {
checkArgument(samplingRate > 0 && samplingRate < 1.0, " hast to be in [0,1)");
this.samplingRate = samplingRate;
}
public double[] getModeInformation() {
return modeInformation;
}
// to be used in future
public void setModeInformation(double[] modeInformation) {
}
public boolean isAutoAdjust() {
return autoAdjust;
}
public void setAutoAdjust(boolean autoAdjust) {
this.autoAdjust = autoAdjust;
}
public double getNoiseFactor() {
return noiseFactor;
}
public void setNoiseFactor(double noiseFactor) {
this.noiseFactor = noiseFactor;
}
public void setIgnoreDrift(boolean ignoreDrift) {
this.ignoreDrift = ignoreDrift;
}
public boolean isIgnoreDrift() {
return ignoreDrift;
}
public void setLastDescriptor(RCFComputeDescriptor lastDescriptor) {
this.lastDescriptor = lastDescriptor.copyOf();
}
public RCFComputeDescriptor getLastDescriptor() {
return lastDescriptor;
}
public int getRunLength() {
return runLength;
}
public void setRunLength(int runLength) {
this.runLength = runLength;
}
public double getSamplingSupport() {
return samplingSupport;
}
public void setSamplingSupport(double sampling) {
checkArgument(sampling >= 0, " cannot be negative ");
checkArgument(sampling < 0.2, " cannot be more than 0.2");
samplingSupport = sampling;
}
}
| 577 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/Point.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import java.util.Arrays;
/**
* a basic class that defines a proto-point
*/
public class Point {
// current values
double[] currentInput;
// input timestamp
long inputTimestamp;
public Point(double[] input, long inputTimestamp) {
this.currentInput = copyIfNotnull(input);
this.inputTimestamp = inputTimestamp;
}
public double[] getCurrentInput() {
return copyIfNotnull(currentInput);
}
public long getInputTimestamp() {
return inputTimestamp;
}
protected double[] copyIfNotnull(double[] array) {
return array == null ? null : Arrays.copyOf(array, array.length);
}
}
| 578 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/SequentialAnalysis.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_NUMBER_OF_TREES;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE;
import java.util.ArrayList;
import java.util.List;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.calibration.Calibration;
import com.amazon.randomcutforest.parkservices.returntypes.AnalysisDescriptor;
public class SequentialAnalysis {
/**
* provides a list of anomalies given a block of data. While this is a fairly
* simple function, it is provided as a reference such that users do not have
* depend on interpretations of sequentian analysis
*
* @param data the array containing the values
* @param shingleSize shinglesize of RCF
* @param sampleSize sampleSize of RCF
* @param numberOfTrees the numberOfTres used by RCF
* @param timeDecay the time decay parameter of RCF; think of half life of
* data
* @param outputAfter the value after which we
* @param transformMethod the transformation used in preprocessing
* @param transformDecay the half life of data in preprocessing (if in doubt,
* use the same as timeDecay)
* @param seed a random seed
* @return a list of anomalies
*/
public static List<AnomalyDescriptor> detectAnomalies(double[][] data, int shingleSize, int sampleSize,
int numberOfTrees, double timeDecay, int outputAfter, TransformMethod transformMethod,
double transformDecay, long seed) {
checkArgument(data != null, "cannot be a null array");
int inputDimension = data[0].length;
int dimensions = inputDimension * shingleSize;
double fraction = 1.0 * outputAfter / sampleSize;
ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().dimensions(dimensions).randomSeed(seed)
.numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)
.internalShinglingEnabled(true).anomalyRate(0.01).forestMode(ForestMode.STANDARD).timeDecay(timeDecay)
.transformMethod(transformMethod).outputAfter(outputAfter).transformDecay(transformDecay)
.initialAcceptFraction(fraction).build();
return forest.processSequentially(data);
}
public static List<AnomalyDescriptor> detectAnomalies(double[][] data, int shingleSize, int sampleSize,
double timeDecay, TransformMethod transformMethod, long seed) {
return detectAnomalies(data, shingleSize, sampleSize, DEFAULT_NUMBER_OF_TREES, timeDecay, sampleSize / 4,
transformMethod, timeDecay, seed);
}
public static List<AnomalyDescriptor> detectAnomalies(double[][] data, int shingleSize, double timeDecay,
TransformMethod transformMethod, double transformDecay, long seed) {
return detectAnomalies(data, shingleSize, DEFAULT_SAMPLE_SIZE, DEFAULT_NUMBER_OF_TREES, timeDecay,
DEFAULT_SAMPLE_SIZE / 4, transformMethod, transformDecay, seed);
}
/**
* Same as the anomaly detector but provides a list of anomalies as well as a
* calibrated (with testing) interval and forecasts.
*
* @param inputArray the input
* @param shingleSize shingle size of RCF
* @param sampleSize samplesize of RCF
* @param timeDecay timedecay of RCF
* @param outputAfter the input after which we perform score evaluation
* @param transformMethod transformation method of preprocessing
* @param transformDecay the time decay of preprocessing
* @param forecastHorizon the number of steps to forecast (during and at the
* end)
* @param errorHorizon the number of steps to perform calibration (during the
* sequence)
* @param percentile the percentile of error one is interested in
* calibrating (we recommend 0.1)
* @param seed random seed
* @return a list of anomalies and the final forecast wilh callibration
*/
public static AnalysisDescriptor forecastWithAnomalies(double[][] inputArray, int shingleSize, int sampleSize,
double timeDecay, int outputAfter, TransformMethod transformMethod, double transformDecay,
int forecastHorizon, int errorHorizon, double percentile, Calibration calibration, long seed) {
checkArgument(inputArray != null, " input cannot be null");
int inputDimension = inputArray[0].length;
int dimensions = shingleSize * inputDimension;
int numberOfTrees = 50;
double fraction = 1.0 * outputAfter / sampleSize;
RCFCaster caster = RCFCaster.builder().dimensions(dimensions).randomSeed(seed).numberOfTrees(numberOfTrees)
.shingleSize(shingleSize).sampleSize(sampleSize).internalShinglingEnabled(true).anomalyRate(0.01)
.forestMode(ForestMode.STANDARD).timeDecay(timeDecay).transformMethod(transformMethod)
.outputAfter(outputAfter).calibration(calibration).initialAcceptFraction(fraction)
.forecastHorizon(forecastHorizon).transformDecay(transformDecay).errorHorizon(errorHorizon)
.percentile(percentile).build();
ArrayList<AnomalyDescriptor> descriptors = new ArrayList<>();
ForecastDescriptor last = null;
for (double[] input : inputArray) {
ForecastDescriptor descriptor = caster.process(input, 0L);
if (descriptor.getAnomalyGrade() > 0) {
descriptors.add(descriptor);
}
last = descriptor;
}
return new AnalysisDescriptor(descriptors, last);
}
public static AnalysisDescriptor forecastWithAnomalies(double[][] inputArray, int shingleSize, int sampleSize,
double timeDecay, TransformMethod transformMethod, int forecastHorizon, int errorHorizon, long seed) {
return forecastWithAnomalies(inputArray, shingleSize, sampleSize, timeDecay, sampleSize / 4, transformMethod,
timeDecay, forecastHorizon, errorHorizon, 0.1, Calibration.SIMPLE, seed);
}
}
| 579 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/AnomalyDescriptor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.Arrays;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class AnomalyDescriptor extends RCFComputeDescriptor {
public static int NUMBER_OF_EXPECTED_VALUES = 1;
// confidence, for both anomalies/non-anomalies
double dataConfidence;
// flag indicating if the anomaly is the start of an anomaly or part of a run of
// anomalies
boolean startOfAnomaly;
// flag indicating if the time stamp is in elevated score region to be
// considered as anomaly
boolean inHighScoreRegion;
// a flattened version denoting the basic contribution of each input variable
// (not shingled) for the
// time slice indicated by relativeIndex
double[] relevantAttribution;
// when time is appended for the anomalous time slice
double timeAttribution;
// the values being replaced; may correspond to past
double[] pastValues;
// older timestamp if that is replaced
long pastTimeStamp;
// expected values, currently set to maximum 1
double[][] expectedValuesList;
// likelihood values for the list
double[] likelihoodOfValues;
public AnomalyDescriptor(double[] input, long inputTimeStamp) {
super(input, inputTimeStamp);
}
public void setPastValues(double[] values) {
pastValues = copyIfNotnull(values);
}
public boolean isExpectedValuesPresent() {
return expectedValuesList != null;
}
public void setRelevantAttribution(double[] values) {
this.relevantAttribution = copyIfNotnull(values);
}
public void setExpectedValues(int position, double[] values, double likelihood) {
checkArgument(position < NUMBER_OF_EXPECTED_VALUES, "Increase size of expected array");
if (expectedValuesList == null) {
expectedValuesList = new double[NUMBER_OF_EXPECTED_VALUES][];
}
if (likelihoodOfValues == null) {
likelihoodOfValues = new double[NUMBER_OF_EXPECTED_VALUES];
}
expectedValuesList[position] = Arrays.copyOf(values, values.length);
likelihoodOfValues[position] = likelihood;
}
public void setDataConfidence(double timeDecay, long valuesSeen, long outputAfter, double dataQuality) {
long total = valuesSeen;
double lambda = timeDecay;
double totalExponent = total * lambda;
if (totalExponent == 0) {
dataConfidence = 0.0;
} else if (totalExponent >= 20) {
dataConfidence = Math.min(1.0, dataQuality);
} else {
double eTotal = Math.exp(totalExponent);
double confidence = dataQuality * (eTotal - Math.exp(lambda * Math.min(total, outputAfter))) / (eTotal - 1);
dataConfidence = Math.max(0, confidence);
}
}
}
| 580 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/RCFComputeDescriptor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.Arrays;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.config.CorrectionMode;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.ImputationMethod;
import com.amazon.randomcutforest.config.ScoringStrategy;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.returntypes.DiVector;
/**
* a basic class that is used to store the internal state of the streaming
* processing in ThresholdedRandomCutForest and others.
*/
@Getter
@Setter
public class RCFComputeDescriptor extends Point {
ForestMode forestMode = ForestMode.STANDARD;
TransformMethod transformMethod = TransformMethod.NONE;
ImputationMethod imputationMethod = ImputationMethod.PREVIOUS;
ScoringStrategy scoringStrategy = ScoringStrategy.EXPECTED_INVERSE_DEPTH;
CorrectionMode correctionMode = CorrectionMode.NONE;
// the most important parameter of the forest
int shingleSize;
// the actual dimensions
int dimension;
// the inputlength; useful for standalone analysis
int inputLength;
// sequence index (the number of updates to RCF) -- it is possible in imputation
// that
// the number of updates more than the input tuples seen by the overall program
long totalUpdates;
// determines if values can be inputed and or expected point calculated
boolean reasonableForecast;
// internal timestamp (basically a sequence index, but can be scaled and
// jittered as in
// the example);
// kept as long for potential future use
long internalTimeStamp;
// number of trees in the forest
int numberOfTrees;
// current missing values, if any
int[] missingValues;
// potential number of imputes before processing current point
int numberOfNewImputes;
// actual, potentially transformed point on which compute occurs
double[] RCFPoint;
// score for various postprocessing
double RCFScore;
// the following describes the grade of the anomaly in the range [0:1] where
// 0 is not an anomaly
double anomalyGrade;
// the threshold used in inference
double threshold;
// same for attribution; this is basic RCF attribution which has high/low
// information
DiVector attribution;
/**
* position of the anomaly vis a vis the current time (can be -ve) if anomaly is
* detected late, which can and should happen sometime; for shingle size 1; this
* is always 0
*/
int relativeIndex;
// useful for detecting noise
double[] deviations;
// useful for calibration in RCFCaster
double[] postDeviations;
// the multiplication factors to convert RCF representation to actuals/input
double[] scale;
// the addition performed (after multiplications) to convert RCF representation
// to actuals/input
double[] shift;
// effects of a specific anomaly
double[] postShift;
// how long the effects last
double transformDecay;
// expected RCFPoint for the current point
double[] expectedRCFPoint;
// internal timestamp of last anomaly
long lastAnomalyInternalTimestamp;
// expected point of last anomaly
double[] lastExpectedRCFPoint;
// if the anomaly is due to timestamp when it is augmented only for current time
long expectedTimeStamp;
// used for streaming imputation
double[][] imputedPoints;
public RCFComputeDescriptor(double[] input, long inputTimeStamp) {
super(input, inputTimeStamp);
}
public RCFComputeDescriptor(double[] input, long inputTimeStamp, ForestMode forestMode,
TransformMethod transformMethod, ImputationMethod imputationMethod) {
super(input, inputTimeStamp);
this.forestMode = forestMode;
this.transformMethod = transformMethod;
this.imputationMethod = imputationMethod;
}
public void setShift(double[] shift) {
this.shift = copyIfNotnull(shift);
}
public void setPostShift(double[] shift) {
this.postShift = copyIfNotnull(shift);
}
public double[] getShift() {
return copyIfNotnull(shift);
}
public void setScale(double[] scale) {
this.scale = copyIfNotnull(scale);
}
public double[] getScale() {
return copyIfNotnull(scale);
}
public double[] getDeltaShift() {
if (shift == null || postShift == null) {
return null;
}
double[] answer = new double[shift.length];
for (int i = 0; i < shift.length; i++) {
answer[i] = postShift[i] - shift[i];
}
return answer;
}
public void setExpectedRCFPoint(double[] point) {
expectedRCFPoint = copyIfNotnull(point);
}
public double[] getExpectedRCFPoint() {
return copyIfNotnull(expectedRCFPoint);
}
public void setRCFPoint(double[] point) {
RCFPoint = copyIfNotnull(point);
}
public double[] getRCFPoint() {
return copyIfNotnull(RCFPoint);
}
public void setLastExpecteRCFdPoint(double[] point) {
lastExpectedRCFPoint = copyIfNotnull(point);
}
public double[] getLastExpectedRCFPoint() {
return copyIfNotnull(lastExpectedRCFPoint);
}
public void setAttribution(DiVector attribution) {
this.attribution = (attribution == null) ? null : new DiVector(attribution);
}
public DiVector getAttribution() {
return (attribution == null) ? null : new DiVector(attribution);
}
public int[] getMissingValues() {
return (missingValues == null) ? null : Arrays.copyOf(missingValues, missingValues.length);
}
public void setMissingValues(int[] values) {
missingValues = (values == null) ? null : Arrays.copyOf(values, values.length);
}
public void setImputedPoint(int index, double[] impute) {
checkArgument(numberOfNewImputes > 0, " no imputation is indicated");
checkArgument(impute != null && impute.length == inputLength, "incorrect length");
if (imputedPoints == null) {
imputedPoints = new double[Math.min(numberOfNewImputes, shingleSize - 1)][];
}
checkArgument(imputedPoints.length > index && index >= 0 && imputedPoints[index] == null, "already set!");
imputedPoints[index] = Arrays.copyOf(impute, inputLength);
}
// an explicit copy operation to control the stored state
public RCFComputeDescriptor copyOf() {
RCFComputeDescriptor answer = new RCFComputeDescriptor(currentInput, inputTimestamp, forestMode,
transformMethod, imputationMethod);
answer.setShingleSize(shingleSize);
answer.setDimension(dimension);
answer.setInputLength(inputLength);
answer.setReasonableForecast(reasonableForecast);
answer.setAttribution(attribution);
answer.setRCFPoint(RCFPoint);
answer.setRCFScore(RCFScore);
answer.setInternalTimeStamp(internalTimeStamp);
answer.setExpectedRCFPoint(expectedRCFPoint);
answer.setNumberOfTrees(numberOfTrees);
answer.setTotalUpdates(totalUpdates);
answer.setNumberOfNewImputes(numberOfNewImputes);
answer.setLastAnomalyInternalTimestamp(lastAnomalyInternalTimestamp);
answer.setLastExpecteRCFdPoint(lastExpectedRCFPoint);
answer.setScoringStrategy(scoringStrategy);
answer.setShift(shift);
answer.setScale(scale);
answer.setPostShift(postShift);
answer.setTransformDecay(transformDecay);
answer.setAnomalyGrade(anomalyGrade);
answer.setThreshold(threshold);
answer.setCorrectionMode(correctionMode);
return answer;
}
}
| 581 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/ErrorHandler.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.toFloatArray;
import static java.lang.Math.max;
import static java.lang.Math.min;
import java.util.Arrays;
import java.util.function.BiFunction;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.parkservices.calibration.Calibration;
import com.amazon.randomcutforest.returntypes.DiVector;
import com.amazon.randomcutforest.returntypes.RangeVector;
// we recommend the article "Regret in the On-Line Decision Problem", by Foster and Vohra,
// Games and Economic Behavior, Vol=29 (1-2), 1999
// the discussion is applicable to non-regret scenarios as well; but essentially boils down to
// fixed point/minimax computation. One could use multiplicative update type methods which would be
// uniform over all quantiles, provided sufficient data and a large enough calibration horizon.
// Multiplicative updates are scale free -- but providing scale free forecasting over a stream raises the
// issue "what is the current scale of the stream". While such questions can be answered, that discussion
// can be involved and out of current scope of this library. We simplify the issue to calibrating two
// fixed quantiles and hence additive updates are reasonable.
@Getter
@Setter
public class ErrorHandler {
/**
* this to constrain the state
*/
public static int MAX_ERROR_HORIZON = 1024;
int sequenceIndex;
double percentile;
int forecastHorizon;
int errorHorizon;
// the following arrays store the state of the sequential computation
// these can be optimized -- for example once could store the errors; which
// would see much fewer
// increments. However for a small enough errorHorizon, the genrality of
// changing the error function
// outweighs the benefit of recomputation. The search in the ensemble tree is
// still a larger bottleneck than
// these computations at the moment; not to mention issues of saving and
// restoring state.
protected RangeVector[] pastForecasts;
protected float[][] actuals;
// the following are derived quantities and present for efficiency reasons
RangeVector errorDistribution;
DiVector errorRMSE;
float[] errorMean;
float[] intervalPrecision;
float[] lastDeviations;
// We keep the multiplers defined for potential
// future use.
RangeVector multipliers;
RangeVector adders;
public ErrorHandler(RCFCaster.Builder builder) {
checkArgument(builder.forecastHorizon > 0, "has to be positive");
checkArgument(builder.errorHorizon >= builder.forecastHorizon,
"intervalPrecision horizon should be at least as large as forecast horizon");
checkArgument(builder.errorHorizon <= MAX_ERROR_HORIZON, "reduce error horizon of change MAX");
forecastHorizon = builder.forecastHorizon;
errorHorizon = builder.errorHorizon;
int inputLength = (builder.dimensions / builder.shingleSize);
int length = inputLength * forecastHorizon;
percentile = builder.percentile;
pastForecasts = new RangeVector[errorHorizon + forecastHorizon];
for (int i = 0; i < errorHorizon + forecastHorizon; i++) {
pastForecasts[i] = new RangeVector(length);
}
actuals = new float[errorHorizon + forecastHorizon][inputLength];
sequenceIndex = 0;
errorMean = new float[length];
errorRMSE = new DiVector(length);
lastDeviations = new float[inputLength];
multipliers = new RangeVector(length);
Arrays.fill(multipliers.upper, 1);
Arrays.fill(multipliers.values, 1);
Arrays.fill(multipliers.lower, 1);
adders = new RangeVector(length);
intervalPrecision = new float[length];
errorDistribution = new RangeVector(length);
}
// for mappers
public ErrorHandler(int errorHorizon, int forecastHorizon, int sequenceIndex, double percentile, int inputLength,
float[] actualsFlattened, float[] pastForecastsFlattened, float[] lastDeviations, float[] auxilliary) {
checkArgument(forecastHorizon > 0, " incorrect forecast horizon");
checkArgument(errorHorizon >= forecastHorizon, "incorrect error horizon");
checkArgument(actualsFlattened != null || pastForecastsFlattened == null,
" actuals and forecasts are a mismatch");
checkArgument(inputLength > 0, "incorrect parameters");
checkArgument(sequenceIndex >= 0, "cannot be negative");
checkArgument(Math.abs(percentile - 0.25) < 0.24, "has to be between (0,0.5) ");
// calibration would have been performed at previous value
this.sequenceIndex = sequenceIndex;
this.errorHorizon = errorHorizon;
this.percentile = percentile;
this.forecastHorizon = forecastHorizon;
int currentLength = (actualsFlattened == null) ? 0 : actualsFlattened.length;
checkArgument(currentLength % inputLength == 0, "actuals array is incorrect");
int forecastLength = (pastForecastsFlattened == null) ? 0 : pastForecastsFlattened.length;
int arrayLength = max(forecastHorizon + errorHorizon, currentLength / inputLength);
this.pastForecasts = new RangeVector[arrayLength];
this.actuals = new float[arrayLength][inputLength];
int length = forecastHorizon * inputLength;
// currentLength = (number of actual time steps stored) x inputLength and for
// each of the stored time steps we get a forecast whose length is
// forecastHorizon x inputLength (and then upper and lower for each, hence x 3)
// so forecastLength = number of actual time steps stored x forecastHorizon x
// inputLength x 3
// = currentLength x forecastHorizon x 3
checkArgument(forecastLength == currentLength * 3 * forecastHorizon, "misaligned forecasts");
checkArgument(lastDeviations.length >= inputLength, "incorrect length");
this.lastDeviations = Arrays.copyOf(lastDeviations, lastDeviations.length);
this.errorMean = new float[length];
this.errorRMSE = new DiVector(length);
this.intervalPrecision = new float[length];
this.adders = new RangeVector(length);
this.multipliers = new RangeVector(length);
this.errorDistribution = new RangeVector(length);
if (pastForecastsFlattened != null) {
for (int i = 0; i < arrayLength; i++) {
float[] values = Arrays.copyOfRange(pastForecastsFlattened, i * 3 * length, (i * 3 + 1) * length);
float[] upper = Arrays.copyOfRange(pastForecastsFlattened, (i * 3 + 1) * length, (i * 3 + 2) * length);
float[] lower = Arrays.copyOfRange(pastForecastsFlattened, (i * 3 + 2) * length, (i * 3 + 3) * length);
pastForecasts[i] = new RangeVector(values, upper, lower);
System.arraycopy(actualsFlattened, i * inputLength, actuals[i], 0, inputLength);
}
recomputeErrors();
}
}
/**
* updates the stored information (actuals) and recomputes the calibrations
*
* @param input the actual input
* @param deviations the deviations (post the current input)
*/
public void updateActuals(double[] input, double[] deviations) {
int arrayLength = pastForecasts.length;
int inputLength = input.length;
if (sequenceIndex > 0) {
// sequenceIndex indicates the first empty place for input
// note the predictions have already been stored
int inputIndex = (sequenceIndex + arrayLength - 1) % arrayLength;
for (int i = 0; i < inputLength; i++) {
actuals[inputIndex][i] = (float) input[i];
}
}
// sequence index is increased first so that recomputeErrors is idempotent; that
// is they are only state dependent and not event dependent
++sequenceIndex;
recomputeErrors();
lastDeviations = toFloatArray(deviations);
}
public void augmentDescriptor(ForecastDescriptor descriptor) {
descriptor.setErrorMean(errorMean);
descriptor.setErrorRMSE(errorRMSE);
descriptor.setObservedErrorDistribution(errorDistribution);
descriptor.setIntervalPrecision(intervalPrecision);
}
/**
* saves the forecast -- note that this section assumes that updateActuals() has
* been invoked prior (to recompute the deviations)
*
* @param vector the forecast
*/
public void updateForecasts(RangeVector vector) {
int arrayLength = pastForecasts.length;
int storedForecastIndex = (sequenceIndex + arrayLength - 1) % (arrayLength);
int length = pastForecasts[0].values.length;
System.arraycopy(vector.values, 0, pastForecasts[storedForecastIndex].values, 0, length);
System.arraycopy(vector.upper, 0, pastForecasts[storedForecastIndex].upper, 0, length);
System.arraycopy(vector.lower, 0, pastForecasts[storedForecastIndex].lower, 0, length);
}
public RangeVector getErrorDistribution() {
return new RangeVector(errorDistribution);
}
public float[] getErrorMean() {
return Arrays.copyOf(errorMean, errorMean.length);
}
public DiVector getErrorRMSE() {
return new DiVector(errorRMSE);
}
public float[] getIntervalPrecision() {
return Arrays.copyOf(intervalPrecision, intervalPrecision.length);
}
public RangeVector getMultipliers() {
return new RangeVector(multipliers);
}
public RangeVector getAdders() {
return new RangeVector(adders);
}
protected double[] getErrorVector(int len, int leadtime, int inputCoordinate, int position,
BiFunction<Float, Float, Float> error) {
int arrayLength = pastForecasts.length;
int errorIndex = (sequenceIndex - 1 + arrayLength) % arrayLength;
double[] copy = new double[len];
for (int k = 0; k < len; k++) {
int pastIndex = (errorIndex - leadtime - k + arrayLength) % arrayLength;
int index = (errorIndex - k - 1 + arrayLength) % arrayLength;
copy[k] = error.apply(actuals[index][inputCoordinate], pastForecasts[pastIndex].values[position]);
}
return copy;
}
int length(int sequenceIndex, int errorHorizon, int index) {
return (sequenceIndex > errorHorizon + index + 1) ? errorHorizon : sequenceIndex - index - 1;
}
protected void recomputeErrors() {
int inputLength = actuals[0].length;
int arrayLength = pastForecasts.length;
// shows the last filled entry with -2
int inputIndex = (sequenceIndex - 2 + arrayLength) % arrayLength;
double[] medianError = new double[errorHorizon];
Arrays.fill(intervalPrecision, 0);
for (int i = 0; i < forecastHorizon; i++) {
int len = length(sequenceIndex, errorHorizon, i);
for (int j = 0; j < inputLength; j++) {
int pos = i * inputLength + j;
if (len > 0) {
double positiveSum = 0;
int positiveCount = 0;
double negativeSum = 0;
double positiveSqSum = 0;
double negativeSqSum = 0;
for (int k = 0; k < len; k++) {
int pastIndex = (inputIndex - i - k + arrayLength) % arrayLength;
// one more forecast
int index = (inputIndex - k + arrayLength) % arrayLength;
double error = actuals[index][j] - pastForecasts[pastIndex].values[pos];
medianError[k] = error;
intervalPrecision[pos] += (pastForecasts[pastIndex].upper[pos] >= actuals[index][j]
&& actuals[index][j] >= pastForecasts[pastIndex].lower[pos]) ? 1 : 0;
if (error >= 0) {
positiveSum += error;
positiveSqSum += error * error;
++positiveCount;
} else {
negativeSum += error;
negativeSqSum += error * error;
}
}
errorMean[pos] = (float) (positiveSum + negativeSum) / len;
errorRMSE.high[pos] = (positiveCount > 0) ? Math.sqrt(positiveSqSum / positiveCount) : 0;
errorRMSE.low[pos] = (positiveCount < len) ? -Math.sqrt(negativeSqSum / (len - positiveCount)) : 0;
if (len * percentile >= 1.0) {
Arrays.sort(medianError, 0, len);
// medianError array is now sorted
errorDistribution.values[pos] = interpolatedMedian(medianError, len);
errorDistribution.upper[pos] = interpolatedUpperRank(medianError, len, len * percentile);
errorDistribution.lower[pos] = interpolatedLowerRank(medianError, len * percentile);
}
intervalPrecision[pos] = intervalPrecision[pos] / len;
} else {
errorMean[pos] = 0;
errorRMSE.high[pos] = errorRMSE.low[pos] = 0;
errorDistribution.values[pos] = errorDistribution.upper[pos] = errorDistribution.lower[pos] = 0;
adders.upper[pos] = adders.lower[pos] = adders.values[pos] = 0;
intervalPrecision[pos] = 0;
}
}
}
}
protected void calibrate(Calibration calibration, RangeVector ranges) {
int inputLength = actuals[0].length;
checkArgument(inputLength * forecastHorizon == ranges.values.length, "mismatched lengths");
for (int i = 0; i < forecastHorizon; i++) {
// this is the only place where the newer (possibly shorter) horizon matters
int len = length(sequenceIndex, errorHorizon, i);
for (int j = 0; j < inputLength; j++) {
int pos = i * inputLength + j;
if (len > 0) {
if (calibration != Calibration.NONE) {
if (len * percentile < 1.0) {
double deviation = lastDeviations[j];
ranges.upper[pos] = max(ranges.upper[pos], ranges.values[pos] + (float) (1.3 * deviation));
ranges.lower[pos] = min(ranges.lower[pos], ranges.values[pos] - (float) (1.3 * deviation));
} else {
if (calibration == Calibration.SIMPLE) {
adjust(pos, ranges, errorDistribution);
}
if (calibration == Calibration.MINIMAL) {
adjustMinimal(pos, ranges, errorDistribution);
}
}
}
}
}
}
}
protected float interpolatedMedian(double[] ascendingArray, int len) {
checkArgument(ascendingArray != null, " cannot be null");
checkArgument(ascendingArray.length >= len, "incorrect length parameter");
float lower = (float) ((len % 2 == 0) ? ascendingArray[len / 2 - 1]
: (ascendingArray[len / 2] + ascendingArray[len / 2 - 1]) / 2);
float upper = (float) ((len % 2 == 0) ? ascendingArray[len / 2]
: (ascendingArray[len / 2] + ascendingArray[len / 2 + 1]) / 2);
if (lower <= 0 && 0 <= upper) {
// 0 is plausible, and introduces minimal externality
return 0;
} else {
return (upper + lower) / 2;
}
}
float interpolatedLowerRank(double[] ascendingArray, double fracRank) {
int rank = (int) Math.floor(fracRank);
return (float) (ascendingArray[rank - 1]
+ (fracRank - rank) * (ascendingArray[rank] - ascendingArray[rank - 1]));
}
float interpolatedUpperRank(double[] ascendingArray, int len, double fracRank) {
int rank = (int) Math.floor(fracRank);
return (float) (ascendingArray[len - rank]
+ (fracRank - rank) * (ascendingArray[len - rank - 1] - ascendingArray[len - rank]));
}
void adjust(int pos, RangeVector rangeVector, RangeVector other) {
checkArgument(other.values.length == rangeVector.values.length, " mismatch in lengths");
checkArgument(pos >= 0, " cannot be negative");
checkArgument(pos < other.values.length, " cannot be this large");
rangeVector.values[pos] += other.values[pos];
rangeVector.upper[pos] = max(rangeVector.values[pos], rangeVector.upper[pos] + other.upper[pos]);
rangeVector.lower[pos] = min(rangeVector.values[pos], rangeVector.lower[pos] + other.lower[pos]);
}
void adjustMinimal(int pos, RangeVector rangeVector, RangeVector other) {
checkArgument(other.values.length == rangeVector.values.length, " mismatch in lengths");
checkArgument(pos >= 0, " cannot be negative");
checkArgument(pos < other.values.length, " cannot be this large");
float oldVal = rangeVector.values[pos];
rangeVector.values[pos] += other.values[pos];
rangeVector.upper[pos] = max(rangeVector.values[pos], oldVal + other.upper[pos]);
rangeVector.lower[pos] = min(rangeVector.values[pos], oldVal + other.lower[pos]);
}
}
| 582 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/ForecastDescriptor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.Arrays;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.parkservices.returntypes.TimedRangeVector;
import com.amazon.randomcutforest.returntypes.DiVector;
import com.amazon.randomcutforest.returntypes.RangeVector;
@Getter
@Setter
public class ForecastDescriptor extends AnomalyDescriptor {
// all the following objects will be of length (forecast horizon x the number of
// input variables)
/**
* basic forecast field, with the time information to be used for TIME_AUGMENTED
* mode in the future
*/
TimedRangeVector timedForecast;
/**
* the distribution of errors -- for an algorithm that self-calibrates, this
* information has to be computed exposing the error can be of use for the user
* to audit the results. The distributions will use interpolations and will not
* adhere to specific quantile values -- thereby allowing for better
* generalization.
*/
RangeVector observedErrorDistribution;
/**
* typically RMSE is a single vector -- however unlike standard literature, we
* would not be limited to zero mean time series; in fact converting a time
* series to a zero mean series in an online manner is already challenging.
* Moreover, it is often the case that errors have a typical distribution skew;
* in the current library we have partitioned many of the explainabilty aspects
* (e.g., attribution in anomaly detection, directionality in density
* estimation, etc.) based on high/low; when the actual value being observed is
* correspondingly higher/lower than some (possibly implicit) baseline. We split
* the same for error.
*/
DiVector errorRMSE;
/**
* mean error corresponding to the forecast horizon x the number of input
* variables This is not used in the current intervalPrecision -- we use the
* median value from the error distribution.
*/
float[] errorMean;
/**
* in the forecast horizon x the number of input variables this corresponds to
* the fraction of variables \predicted correctly over the error horizon. A
* value of 1.0 is terrific.
*/
float[] intervalPrecision;
public ForecastDescriptor(double[] input, long inputTimeStamp, int horizon) {
super(input, inputTimeStamp);
int forecastLength = input.length * horizon;
this.timedForecast = new TimedRangeVector(forecastLength, horizon);
this.observedErrorDistribution = new RangeVector(forecastLength);
Arrays.fill(this.observedErrorDistribution.lower, -Float.MAX_VALUE);
Arrays.fill(this.observedErrorDistribution.upper, Float.MAX_VALUE);
this.errorMean = new float[forecastLength];
this.errorRMSE = new DiVector(forecastLength);
this.intervalPrecision = new float[forecastLength];
}
void setObservedErrorDistribution(RangeVector base) {
checkArgument(base.values.length == this.observedErrorDistribution.values.length, " incorrect length");
System.arraycopy(base.values, 0, this.observedErrorDistribution.values, 0, base.values.length);
System.arraycopy(base.upper, 0, this.observedErrorDistribution.upper, 0, base.upper.length);
System.arraycopy(base.lower, 0, this.observedErrorDistribution.lower, 0, base.lower.length);
}
void setIntervalPrecision(float[] calibration) {
System.arraycopy(calibration, 0, this.intervalPrecision, 0, calibration.length);
}
public float[] getIntervalPrecision() {
return Arrays.copyOf(intervalPrecision, intervalPrecision.length);
}
void setErrorMean(float[] errorMean) {
System.arraycopy(errorMean, 0, this.errorMean, 0, errorMean.length);
}
void setErrorRMSE(DiVector errorRMSE) {
checkArgument(this.errorRMSE.getDimensions() == errorRMSE.getDimensions(), " incorrect input");
System.arraycopy(errorRMSE.high, 0, this.errorRMSE.high, 0, errorRMSE.high.length);
System.arraycopy(errorRMSE.low, 0, this.errorRMSE.low, 0, errorRMSE.low.length);
}
}
| 583 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/RCFCaster.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static java.lang.Math.abs;
import static java.lang.Math.max;
import java.util.ArrayList;
import java.util.List;
import java.util.function.BiFunction;
import java.util.function.Function;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.ScoringStrategy;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.calibration.Calibration;
import com.amazon.randomcutforest.parkservices.preprocessor.Preprocessor;
import com.amazon.randomcutforest.parkservices.returntypes.TimedRangeVector;
import com.amazon.randomcutforest.returntypes.RangeVector;
@Getter
@Setter
public class RCFCaster extends ThresholdedRandomCutForest {
public static double DEFAULT_ERROR_PERCENTILE = 0.1;
public static Calibration DEFAULT_CALIBRATION = Calibration.SIMPLE;
public static BiFunction<Float, Float, Float> defaultError = (x, y) -> x - y;
public static BiFunction<Float, Float, Float> alternateError = (x, y) -> 2 * (x - y) / (abs(x) + abs(y));
protected int forecastHorizon;
protected ErrorHandler errorHandler;
protected int errorHorizon;
protected Calibration calibrationMethod;
public static class Builder extends ThresholdedRandomCutForest.Builder<Builder> {
int forecastHorizon;
int errorHorizon;
double percentile = DEFAULT_ERROR_PERCENTILE;
protected Calibration calibrationMethod = DEFAULT_CALIBRATION;
Builder() {
super();
// changing the default;
transformMethod = TransformMethod.NORMALIZE;
}
public Builder forecastHorizon(int horizon) {
this.forecastHorizon = horizon;
return this;
}
public Builder errorHorizon(int errorHorizon) {
this.errorHorizon = errorHorizon;
return this;
}
public Builder percentile(double percentile) {
this.percentile = percentile;
return this;
}
public Builder calibration(Calibration calibrationMethod) {
this.calibrationMethod = calibrationMethod;
return this;
}
@Override
public RCFCaster build() {
checkArgument(forecastHorizon > 0, "need non-negative horizon");
checkArgument(shingleSize > 0, "need shingle size > 1");
checkArgument(forestMode != ForestMode.STREAMING_IMPUTE,
"error estimation with on the fly imputation should not be abstracted, "
+ "either estimate errors outside of this object "
+ "or perform on the fly imputation outside this code");
checkArgument(forestMode != ForestMode.TIME_AUGMENTED,
"error estimation when time is used as a field in the forest should not be abstracted"
+ "perform estimation outside this code");
checkArgument(!internalShinglingEnabled.isPresent() || internalShinglingEnabled.get(),
"internal shingling only");
if (errorHorizon == 0) {
errorHorizon = max(sampleSize, 2 * forecastHorizon);
}
validate();
return new RCFCaster(this);
}
}
public static Builder builder() {
return new Builder();
}
public RCFCaster(Builder builder) {
super(builder);
forecastHorizon = builder.forecastHorizon;
errorHorizon = builder.errorHorizon;
errorHandler = new ErrorHandler(builder);
calibrationMethod = builder.calibrationMethod;
}
// for mappers
public RCFCaster(ForestMode forestMode, TransformMethod transformMethod, ScoringStrategy scoringStrategy,
RandomCutForest forest, PredictorCorrector predictorCorrector, Preprocessor preprocessor,
RCFComputeDescriptor descriptor, int forecastHorizon, ErrorHandler errorHandler, int errorHorizon,
Calibration calibrationMethod) {
super(forestMode, transformMethod, scoringStrategy, forest, predictorCorrector, preprocessor, descriptor);
this.forecastHorizon = forecastHorizon;
this.errorHandler = errorHandler;
this.errorHorizon = errorHorizon;
this.calibrationMethod = calibrationMethod;
}
/**
* a single call that preprocesses data, compute score/grade, generates forecast
* and updates state
*
* @param inputPoint current input point
* @param timestamp time stamp of input
* @return forecast descriptor for the current input point
*/
@Override
public ForecastDescriptor process(double[] inputPoint, long timestamp) {
return process(inputPoint, timestamp, null);
}
void augmentForecast(ForecastDescriptor answer) {
answer.setScoringStrategy(scoringStrategy);
answer = preprocessor.postProcess(predictorCorrector
.detect(preprocessor.preProcess(answer, lastAnomalyDescriptor, forest), lastAnomalyDescriptor, forest),
lastAnomalyDescriptor, forest);
if (saveDescriptor(answer)) {
lastAnomalyDescriptor = answer.copyOf();
}
TimedRangeVector timedForecast = new TimedRangeVector(
forest.getDimensions() * forecastHorizon / preprocessor.getShingleSize(), forecastHorizon);
// note that internal timestamp of answer is 1 step in the past
// outputReady corresponds to first (and subsequent) forecast
if (forest.isOutputReady()) {
errorHandler.updateActuals(answer.getCurrentInput(), answer.getPostDeviations());
errorHandler.augmentDescriptor(answer);
// if the last point was an anomaly then it would be corrected
// note that forecast would show up for a point even when the anomaly score is 0
// because anomaly designation needs X previous points and forecast needs X
// points; note that calibration would have been performed already
timedForecast = extrapolate(forecastHorizon);
// note that internal timestamp of answer is 1 step in the past
// outputReady corresponds to first (and subsequent) forecast
errorHandler.updateForecasts(timedForecast.rangeVector);
}
answer.setTimedForecast(timedForecast);
}
/**
* a single call that preprocesses data, compute score/grade and updates state
* when the current input has potentially missing values
*
* @param inputPoint current input point
* @param timestamp time stamp of input
* @param missingValues this is not meaningful for forecast; but kept as a
* parameter since it conforms to (sometimes used)
* ThresholdedRCF
* @return forecast descriptor for the current input point
*/
@Override
public ForecastDescriptor process(double[] inputPoint, long timestamp, int[] missingValues) {
checkArgument(missingValues == null, "on the fly imputation and error estimation should not mix");
ForecastDescriptor answer = new ForecastDescriptor(inputPoint, timestamp, forecastHorizon);
answer.setScoringStrategy(scoringStrategy);
boolean cacheDisabled = (forest.getBoundingBoxCacheFraction() == 0);
try {
if (cacheDisabled) {
// turn caching on temporarily
forest.setBoundingBoxCacheFraction(1.0);
}
augmentForecast(answer);
} finally {
if (cacheDisabled) {
// turn caching off
forest.setBoundingBoxCacheFraction(0);
}
}
return answer;
}
public void calibrate(Calibration calibration, RangeVector ranges) {
errorHandler.calibrate(calibration, ranges);
}
@Override
public TimedRangeVector extrapolate(int horizon, boolean correct, double centrality) {
return this.extrapolate(calibrationMethod, horizon, correct, centrality);
}
public TimedRangeVector extrapolate(Calibration calibration, int horizon, boolean correct, double centrality) {
TimedRangeVector answer = super.extrapolate(horizon, correct, centrality);
calibrate(calibration, answer.rangeVector);
return answer;
}
@Override
public List<AnomalyDescriptor> processSequentially(double[][] data, Function<AnomalyDescriptor, Boolean> filter) {
ArrayList<AnomalyDescriptor> answer = new ArrayList<>();
if (data != null) {
if (data.length > 0) {
boolean cacheDisabled = (forest.getBoundingBoxCacheFraction() == 0);
try {
if (cacheDisabled) { // turn caching on temporarily
forest.setBoundingBoxCacheFraction(1.0);
}
long timestamp = preprocessor.getInternalTimeStamp();
int length = preprocessor.getInputLength();
for (double[] point : data) {
checkArgument(point.length == length, " nonuniform lengths ");
ForecastDescriptor description = new ForecastDescriptor(point, timestamp++, forecastHorizon);
augmentForecast(description);
if (filter.apply(description)) {
answer.add(description);
}
}
} finally {
if (cacheDisabled) { // turn caching off
forest.setBoundingBoxCacheFraction(0);
}
}
}
}
return answer;
}
}
| 584 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/GlobalLocalAnomalyDetector.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.summarization.GenericMultiCenter.DEFAULT_NUMBER_OF_REPRESENTATIVES;
import static com.amazon.randomcutforest.summarization.GenericMultiCenter.DEFAULT_SHRINKAGE;
import static java.lang.Math.abs;
import static java.lang.Math.exp;
import static java.lang.Math.min;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.function.BiFunction;
import com.amazon.randomcutforest.parkservices.returntypes.GenericAnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.threshold.BasicThresholder;
import com.amazon.randomcutforest.summarization.GenericMultiCenter;
import com.amazon.randomcutforest.summarization.ICluster;
import com.amazon.randomcutforest.summarization.Summarizer;
import com.amazon.randomcutforest.util.Weighted;
public class GlobalLocalAnomalyDetector<P> extends StreamSampler<P> {
// default maximum number of clusters to consider
public static int DEFAULT_MAX = 10;
// an upper bound on the score
public static float FLOAT_MAX = 10;
// the relative weight of small clusters which should not be used in anomaly
// detection
// this controls masking effects
public static double DEFAULT_IGNORE_SMALL_CLUSTER_REPRESENTATIVE = 0.005;
// the number of steps we have to wait before reclustering; in principle this
// can be 1, but that would be
// neither be meaningful nor efficient; it is set to a default of the capacity/2
protected int doNotreclusterWithin;
// a thresholder for flagging anomalies (same thresholder as in TRCF)
protected final BasicThresholder thresholder;
// remembering when the last clustering was performed
protected long lastCluster = 0L;
// remembers when the mean of the scores were just above a certain threshold
// acts as a calibration mechanism
protected double lastMean = 1;
// actual list of clusters
List<ICluster<P>> clusters;
// the number of maximum clusters to be considered; this is configurable and can
// be chaned dynamically
protected int maxAllowed;
// the shrinkage parameter in multi-centroid clustering such as CURE. Shrinkage
// of 0 provides
// non-spherical shapes, whereas shrinkage of 1 corresponds to choosing single
// centroid (not recommended)
protected double shrinkage;
// number of representatives used in multi-centroidal clustering
protected int numberOfRepresentatives;
// threshold of weight for small clusters so that masking can be averted, can be
// changed dynamically
protected double ignoreBelow;
// the global function used in clustering, can be changed dynamically (but
// clustering would be controlled
// automatically due to efficiency reasons)
protected BiFunction<P, P, Double> globalDistance;
public static Builder<?> builder() {
return new Builder<>();
}
protected GlobalLocalAnomalyDetector(Builder<?> builder) {
super(builder);
thresholder = new BasicThresholder(builder.timeDecay);
thresholder.setAbsoluteThreshold(1.2);
doNotreclusterWithin = builder.doNotReclusterWithin.orElse(builder.capacity / 2);
shrinkage = builder.shrinkage;
maxAllowed = builder.maxAllowed;
numberOfRepresentatives = builder.numberOfRepresentatives;
ignoreBelow = builder.ignoreBelow;
}
protected GlobalLocalAnomalyDetector(Builder<?> builder, BiFunction<P, P, Double> distance) {
this(builder);
globalDistance = distance;
}
public void setGlobalDistance(BiFunction<P, P, Double> dist) {
globalDistance = dist;
}
// sets the zFactor; increasing this number should increase precision (and will
// likely lower recall)
// this is the same as in BasicThresholder class
public void setZfactor(double factor) {
thresholder.setZfactor(factor);
}
public double getZfactor() {
return thresholder.getZFactor();
}
// as in BasicThresholder class, useful in tuning
public void setLowerThreshold(double lowerThreshold) {
thresholder.setAbsoluteThreshold(lowerThreshold);
}
public double getLowerThreshold() {
return thresholder.getAbsoluteThreshold();
}
public int getDoNotreclusterWithin() {
return doNotreclusterWithin;
}
public void setDoNotreclusterWithin(int value) {
checkArgument(value > 0, " has to be positive, recommended as 1/2 the capacity");
doNotreclusterWithin = value;
}
public int getNumberOfRepresentatives() {
return numberOfRepresentatives;
}
public void setNumberOfRepresentatives(int reps) {
checkArgument(reps > 0, " has to be positive");
}
public double getShrinkage() {
return shrinkage;
}
public void setShrinkage(double value) {
checkArgument(value >= 0 && value <= 1, " has to be in [0,1]");
shrinkage = value;
}
public double getIgnoreBelow() {
return ignoreBelow;
}
public void setIgnoreBelow(double value) {
checkArgument(value >= 0 && value < 0.1, " relative weight has to be in range [0,0.1] ");
ignoreBelow = value;
}
public int getMaxAllowed() {
return maxAllowed;
}
public void setMaxAllowed(int value) {
checkArgument(value >= 5 && value < 100,
" too few or too many clusters are not " + "meaningful to this algorithm");
maxAllowed = value;
}
/**
* The following provides a single invocation for scoring and updating.
* Semantics of the recency biased sampling (sequentiality in decision making)
* and efficient automatic reclustering demand that scoring and updating be
* simultaneous. While scoring is provided as a separate function to let future
* preditor-corrector methods reuse this code, it is strongly recommneded that
* only the process() function be invoked.
*
* @param object current object being considered
* @param weight weight of the object (for clustering purposes as
* well as recency biased sampling)
* @param localDistance a local distance metric that determines the order in
* which different clusters are considered; can be
* null, in which case the global distance would be
* used
* @param considerOcclusion consider occlusion by smaller dense clusters, when
* adjacent to larger and more spread out clusters
* @return a generic descriptor with score, threshold, anomaly grade (anomaly
* grade greater than zero is likely anomalous; anomaly grade can be -ve
* to allow down stream correction using semi-supervision or other
* means) and a list of cluster representatives (sorted by distance)
* with corresponding scores (lowest score may not correspond to lowest
* distance) which can be used to investigate anomalous points further
*/
public GenericAnomalyDescriptor<P> process(P object, float weight, BiFunction<P, P, Double> localDistance,
boolean considerOcclusion) {
checkArgument(weight >= 0, "weight cannot be negative");
// recompute clusters first; this enables easier merges and deserialization
if (sequenceNumber > lastCluster + doNotreclusterWithin) {
checkArgument(globalDistance != null, "set global distance function");
double currentMean = thresholder.getPrimaryDeviation().getMean();
if (abs(currentMean - lastMean) > 0.1 || currentMean > 1.7
|| sequenceNumber > lastCluster + 20 * doNotreclusterWithin) {
lastCluster = sequenceNumber;
lastMean = currentMean;
clusters = getClusters(maxAllowed, 4 * maxAllowed, 1, numberOfRepresentatives, shrinkage,
globalDistance, null);
}
}
List<Weighted<P>> result = score(object, localDistance, considerOcclusion);
double threshold = thresholder.threshold();
double grade = 0;
float score = 0;
if (result != null) {
score = result.stream().map(a -> a.weight).reduce(FLOAT_MAX, Float::min);
if (score < FLOAT_MAX) {
// an exponential attribution
double sum = result.stream()
.map(a -> (double) ((a.weight == FLOAT_MAX) ? 0 : exp(-a.weight * a.weight)))
.reduce(0.0, Double::sum);
for (Weighted<P> item : result) {
item.weight = (item.weight == FLOAT_MAX) ? 0.0f
: (float) min(1.0f, (float) exp(-item.weight * item.weight) / sum);
}
} else {
// uniform attribution
for (Weighted<P> item : result) {
item.weight = (float) 1.0 / (result.size());
}
}
grade = thresholder.getAnomalyGrade(score, false);
}
// note average score would be 1
thresholder.update(score, min(score, thresholder.getZFactor()));
sample(object, weight);
return new GenericAnomalyDescriptor<>(result, score, threshold, grade);
}
/**
* The following function scores a point -- it considers an ordering of the
* representatives based on the local distance; and considers occlusion --
* namely, should an asteroid between moon and the earth be considered to be a
* part of a cluster around the moon or the earth? The below provides some
* initial geometric take on the three objects. We deliberately avoid explicit
* density computation since it would be difficult to define uniform definition
* of density.
*
* @param current the object being scored
* @param localDistance a distance function that we wish to use for this
* specific score. This can be null, and in that case
* the global distance would be used
* @param considerOcclusion a boolean that determines if closeby dense clusters
* can occlude membership in further away "less dense
* cluster"
* @return A list of weighted type where the index is a representative (based on
* local distance) and the weight is the score corresponding to that
* representative. The scores are sorted from least anomalous to most
* anomalous.
*/
public List<Weighted<P>> score(P current, BiFunction<P, P, Double> localDistance, boolean considerOcclusion) {
if (clusters == null) {
return null;
} else {
BiFunction<P, P, Double> local = (localDistance != null) ? localDistance : globalDistance;
double totalWeight = clusters.stream().map(e -> e.getWeight()).reduce(0.0, Double::sum);
ArrayList<Candidate> candidateList = new ArrayList<>();
for (ICluster<P> cluster : clusters) {
double wt = cluster.averageRadius();
double tempMinimum = Double.MAX_VALUE;
P closestInCluster = null;
for (Weighted<P> rep : cluster.getRepresentatives()) {
if (rep.weight > ignoreBelow * totalWeight) {
double tempDist = local.apply(current, rep.index);
if (tempDist < 0) {
throw new IllegalArgumentException(" distance cannot be negative ");
}
if (tempMinimum > tempDist) {
tempMinimum = tempDist;
closestInCluster = rep.index;
}
}
}
if (closestInCluster != null) {
candidateList.add(new Candidate(closestInCluster, wt, tempMinimum));
}
}
candidateList.sort((o1, o2) -> Double.compare(o1.distance, o2.distance));
checkArgument(candidateList.size() > 0, "empty candidate list, should not happen");
ArrayList<Weighted<P>> answer = new ArrayList<>();
if (candidateList.get(0).distance == 0.0) {
answer.add(new Weighted<P>(candidateList.get(0).representative, 0.0f));
return answer;
}
int index = 0;
while (index < candidateList.size()) {
Candidate head = candidateList.get(index);
double dist = (localDistance == null) ? head.distance
: globalDistance.apply(current, head.representative);
float tempMeasure = (head.averageRadiusOfCluster > 0.0)
? min(FLOAT_MAX, (float) (dist / head.averageRadiusOfCluster))
: FLOAT_MAX;
answer.add(new Weighted<P>(head.representative, tempMeasure));
if (considerOcclusion) {
for (int j = index + 1; j < candidateList.size(); j++) {
double occludeDistance = local.apply(head.representative, candidateList.get(j).representative);
double candidateDistance = candidateList.get(j).distance;
if (occludeDistance < candidateDistance && head.distance > Math
.sqrt(head.distance * head.distance + candidateDistance * candidateDistance)) {
// delete element
candidateList.remove(j);
}
}
}
++index;
}
// we will not resort answer; the scores will be in order of distance
// we note that score() should be invoked with care and likely postprocessing
return answer;
}
}
/**
* a merging routine for the mopdels which would be used in the future for
* distributed analysis. Note that there is no point of storing sequence indices
* explicitly in case of a merge.
*
* @param first the first model
* @param second the second model
* @param builder the parameters of the new clustering
* @param recluster a boolean that determines immediate reclustering
* @param distance the distance function of the new clustering
*/
public GlobalLocalAnomalyDetector(GlobalLocalAnomalyDetector first, GlobalLocalAnomalyDetector second,
Builder<?> builder, boolean recluster, BiFunction<P, P, Double> distance) {
super(first, second, builder.capacity, builder.timeDecay, builder.randomSeed);
thresholder = new BasicThresholder(builder.timeDecay, builder.anomalyRate, false);
thresholder.setAbsoluteThreshold(1.2);
doNotreclusterWithin = builder.doNotReclusterWithin.orElse(builder.capacity / 2);
shrinkage = builder.shrinkage;
maxAllowed = builder.maxAllowed;
numberOfRepresentatives = builder.numberOfRepresentatives;
globalDistance = distance;
if (recluster) {
lastCluster = sequenceNumber;
clusters = getClusters(maxAllowed, 4 * maxAllowed, 1, numberOfRepresentatives, shrinkage, globalDistance,
null);
}
}
/**
* an inner class that is useful for the scoring procedure to avoid
* recomputation of fields.
*/
class Candidate {
P representative;
double averageRadiusOfCluster;
double distance;
Candidate(P representative, double averageRadiusOfCluster, double distance) {
this.representative = representative;
this.averageRadiusOfCluster = averageRadiusOfCluster;
this.distance = distance;
}
}
public ArrayList<Weighted<P>> getObjectList() {
return objectList;
}
public List<ICluster<P>> getClusters() {
return clusters;
}
public List<ICluster<P>> getClusters(int maxAllowed, int initial, int stopAt, int representatives, double shrink,
BiFunction<P, P, Double> distance, List<ICluster<P>> previousClusters) {
BiFunction<P, Float, ICluster<P>> clusterInitializer = (a, b) -> GenericMultiCenter.initialize(a, b, shrink,
representatives);
return Summarizer.summarize(objectList, maxAllowed, initial, stopAt, false, 0.8, distance, clusterInitializer,
0L, false, previousClusters);
}
/**
* a builder
*/
public static class Builder<T extends Builder<T>> extends StreamSampler.Builder<T> {
protected double shrinkage = DEFAULT_SHRINKAGE;
protected double ignoreBelow = DEFAULT_IGNORE_SMALL_CLUSTER_REPRESENTATIVE;
protected int numberOfRepresentatives = DEFAULT_NUMBER_OF_REPRESENTATIVES;
protected Optional<Integer> doNotReclusterWithin = Optional.empty();
protected int maxAllowed = DEFAULT_MAX;
protected double anomalyRate = 0.01;
// ignores small clusters with population weight below this threshold
public T ignoreBelow(double ignoreBelow) {
this.ignoreBelow = ignoreBelow;
return (T) this;
}
// parameters of the multi-representative CURE algorithm
public T shrinkage(double shrinkage) {
this.shrinkage = shrinkage;
return (T) this;
}
// a parameter that ensures that clustering is not recomputed too frequently,
// which can be both inefficient as well as jittery
public T doNotReclusterWithin(int refresh) {
this.doNotReclusterWithin = Optional.of(refresh);
return (T) this;
}
// maximum number of clusters to consider
public T maxAllowed(int maxAllowed) {
this.maxAllowed = maxAllowed;
return (T) this;
}
// parameters of the multi-representative CURE algorithm
public T numberOfRepresentatives(int number) {
this.numberOfRepresentatives = number;
return (T) this;
}
// a flag that can adjust to the burstiness of anomalies
public T anomalyRate(double anomalyRate) {
this.anomalyRate = anomalyRate;
return (T) this;
}
@Override
public GlobalLocalAnomalyDetector build() {
return new GlobalLocalAnomalyDetector<>(this);
}
}
}
| 585 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/StreamSampler.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_INITIAL_ACCEPT_FRACTION;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE_COEFFICIENT_IN_TIME_DECAY;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;
import static java.lang.Math.max;
import static java.lang.Math.min;
import java.util.ArrayList;
import java.util.Optional;
import java.util.Random;
import com.amazon.randomcutforest.sampler.CompactSampler;
import com.amazon.randomcutforest.sampler.ISampled;
import com.amazon.randomcutforest.store.IndexIntervalManager;
import com.amazon.randomcutforest.util.Weighted;
/**
* The following class is a sampler for generic objects that allow weighted time
* dependent sampling. It is an encapsulation of CompactSampler in
* RandomCutForest core and is meant to be extended in multiple ways. Hence the
* functions are protected and should be overriden/not used arbirarily.
*/
public class StreamSampler<P> {
// basic time dependent sampler
protected final CompactSampler sampler;
// list of objects
protected final ArrayList<Weighted<P>> objectList;
// managing indices
protected final IndexIntervalManager intervalManager;
// accounting for evicted items
protected Optional<P> evicted;
// sequence number used in sequential sampling
protected long sequenceNumber = -1L;
// number of items seen, different from sequenceNumber in case of merge
protected long entriesSeen = 0L;
public static Builder<?> builder() {
return new Builder<>();
}
public StreamSampler(Builder<?> builder) {
sampler = new CompactSampler.Builder<>().capacity(builder.capacity)
.storeSequenceIndexesEnabled(builder.storeSequenceIndexesEnabled).randomSeed(builder.randomSeed)
.initialAcceptFraction(builder.initialAcceptFraction).timeDecay(builder.timeDecay).build();
objectList = new ArrayList<>(builder.capacity);
intervalManager = new IndexIntervalManager(builder.capacity);
evicted = Optional.empty();
}
/**
* a basic sampling operation that accounts for weights of items. This function
* will be overriden in future classes.
*
* @param object to be sampled
* @param weight weight of object (non-negative); although 0 weight implies do
* not sample
* @return true if the object is sampled and false if the object is not sampled;
* if true then there may have been an eviction which is updated
*/
protected boolean sample(P object, float weight) {
++sequenceNumber;
++entriesSeen;
if (sampler.acceptPoint(sequenceNumber, weight)) {
Optional<ISampled<Integer>> samplerEvicted = sampler.getEvictedPoint();
if (samplerEvicted.isPresent()) {
int oldIndex = samplerEvicted.get().getValue();
evicted = Optional.of(objectList.get(oldIndex).index);
intervalManager.releaseIndex(oldIndex);
}
int index = intervalManager.takeIndex();
if (index < objectList.size()) {
objectList.set(index, new Weighted<>(object, weight));
} else {
checkArgument(index == objectList.size(), "incorrect behavior");
objectList.add(new Weighted<>(object, weight));
}
sampler.addPoint(index);
return true;
}
evicted = Optional.empty();
return false;
}
public StreamSampler(StreamSampler<P> first, StreamSampler<P> second, int capacity, double timeDecay, long seed) {
checkArgument(capacity > 0, "capacity has to be positive");
double initialAcceptFraction = max(first.sampler.getInitialAcceptFraction(),
second.sampler.getInitialAcceptFraction());
// merge would remove sequenceIndex information
objectList = new ArrayList<>(capacity);
int[] pointList = new int[capacity];
float[] weightList = new float[capacity];
intervalManager = new IndexIntervalManager(capacity);
evicted = Optional.empty();
double firstUpdate = -(first.sampler.getMaxSequenceIndex() - first.sampler.getMostRecentTimeDecayUpdate())
* first.sampler.getTimeDecay();
ArrayList<Weighted<Integer>> list = new ArrayList<>();
int offset = first.sampler.size();
int[] firstList = first.sampler.getPointIndexArray();
float[] firstWeightList = first.sampler.getWeightArray();
checkArgument(firstList.length == offset, " incorrect length");
checkArgument(firstWeightList.length == offset, " incorrect length");
for (int i = 0; i < firstList.length; i++) {
checkArgument(firstList[i] < offset, "incorrect heap numbering");
list.add(new Weighted<>(firstList[i], (float) (firstWeightList[i] + firstUpdate)));
}
double secondUpdate = -(second.sampler.getMaxSequenceIndex() - second.sampler.getMostRecentTimeDecayUpdate())
* second.sampler.getTimeDecay();
int[] secondList = second.sampler.getPointIndexArray();
float[] secondWeightList = second.sampler.getWeightArray();
checkArgument(secondList.length == secondWeightList.length, " incorrect length");
for (int i = 0; i < secondList.length; i++) {
list.add(new Weighted<>(secondList[i] + offset, (float) (secondWeightList[i] + secondUpdate)));
}
list.sort((o1, o2) -> Float.compare(o1.weight, o2.weight));
int size = min(capacity, list.size());
for (int j = size - 1; j >= 0; j--) {
int index = intervalManager.takeIndex();
checkArgument(index == size - j - 1, "error in behavior");
pointList[index] = index;
weightList[index] = list.get(j).weight;
if (list.get(j).index < offset) {
objectList.add(first.objectList.get(list.get(j).index));
} else {
objectList.add(second.objectList.get(list.get(j).index - offset));
}
}
// sequence number corresponds to linear order of time
this.sequenceNumber = max(first.sequenceNumber, second.sequenceNumber);
// entries seen is the sum
this.entriesSeen = first.entriesSeen + second.entriesSeen;
sampler = new CompactSampler.Builder<>().capacity(capacity).storeSequenceIndexesEnabled(false).randomSeed(seed)
.initialAcceptFraction(initialAcceptFraction).timeDecay(timeDecay).pointIndex(pointList)
.weight(weightList).randomSeed(seed).maxSequenceIndex(this.sequenceNumber)
.mostRecentTimeDecayUpdate(this.sequenceNumber).build();
}
public ArrayList<Weighted<P>> getObjectList() {
return objectList;
}
public int getCapacity() {
return sampler.getCapacity();
}
public long getSequenceNumber() {
return sequenceNumber;
}
public long getEntriesSeen() {
return entriesSeen;
}
public static class Builder<T extends Builder<T>> {
private boolean storeSequenceIndexesEnabled = DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;
protected int capacity = DEFAULT_SAMPLE_SIZE;
protected double timeDecay = 1.0 / (DEFAULT_SAMPLE_SIZE_COEFFICIENT_IN_TIME_DECAY * capacity);
protected long randomSeed = new Random().nextLong();
protected double initialAcceptFraction = DEFAULT_INITIAL_ACCEPT_FRACTION;
public T capacity(int capacity) {
this.capacity = capacity;
return (T) this;
}
public T randomSeed(long seed) {
this.randomSeed = seed;
return (T) this;
}
public T initialAcceptFraction(double initialAcceptFraction) {
this.initialAcceptFraction = initialAcceptFraction;
return (T) this;
}
public T timeDecay(double timeDecay) {
this.timeDecay = timeDecay;
return (T) this;
}
public T storeSequenceIndexesEnabled(boolean storeSequenceIndexesEnabled) {
this.storeSequenceIndexesEnabled = storeSequenceIndexesEnabled;
return (T) this;
}
public StreamSampler build() {
return new StreamSampler<>(this);
}
}
}
| 586 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/ThresholdedRandomCutForest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.toFloatArray;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_BOUNDING_BOX_CACHE_FRACTION;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_CENTER_OF_MASS_ENABLED;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_INITIAL_ACCEPT_FRACTION;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_INTERNAL_SHINGLING_ENABLED;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_NUMBER_OF_TREES;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_OUTPUT_AFTER_FRACTION;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_PARALLEL_EXECUTION_ENABLED;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_PRECISION;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SHINGLE_SIZE;
import static com.amazon.randomcutforest.RandomCutForest.DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;
import static com.amazon.randomcutforest.config.ImputationMethod.PREVIOUS;
import static com.amazon.randomcutforest.parkservices.preprocessor.Preprocessor.DEFAULT_START_NORMALIZATION;
import static com.amazon.randomcutforest.parkservices.threshold.BasicThresholder.DEFAULT_ABSOLUTE_THRESHOLD;
import static com.amazon.randomcutforest.parkservices.threshold.BasicThresholder.DEFAULT_SCORE_DIFFERENCING;
import static com.amazon.randomcutforest.parkservices.threshold.BasicThresholder.DEFAULT_Z_FACTOR;
import static java.lang.Math.max;
import static java.lang.Math.min;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.Random;
import java.util.function.Function;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.ImputationMethod;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.config.ScoringStrategy;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.preprocessor.IPreprocessor;
import com.amazon.randomcutforest.parkservices.preprocessor.Preprocessor;
import com.amazon.randomcutforest.parkservices.returntypes.TimedRangeVector;
import com.amazon.randomcutforest.parkservices.threshold.BasicThresholder;
import com.amazon.randomcutforest.returntypes.RangeVector;
/**
* This class provides a combined RCF and thresholder, both of which operate in
* a streaming manner and respect the arrow of time.
*/
@Getter
@Setter
public class ThresholdedRandomCutForest {
// saved description of the last seen anomaly
RCFComputeDescriptor lastAnomalyDescriptor;
// forestMode of operation
protected ForestMode forestMode = ForestMode.STANDARD;
protected TransformMethod transformMethod = TransformMethod.NONE;
protected ScoringStrategy scoringStrategy = ScoringStrategy.EXPECTED_INVERSE_DEPTH;
protected RandomCutForest forest;
protected PredictorCorrector predictorCorrector;
protected IPreprocessor preprocessor;
public ThresholdedRandomCutForest(Builder<?> builder) {
forestMode = builder.forestMode;
transformMethod = builder.transformMethod;
scoringStrategy = builder.scoringStrategy;
Preprocessor.Builder<?> preprocessorBuilder = Preprocessor.builder().shingleSize(builder.shingleSize)
.transformMethod(builder.transformMethod).forestMode(builder.forestMode);
int inputLength;
if (builder.forestMode == ForestMode.TIME_AUGMENTED) {
inputLength = builder.dimensions / builder.shingleSize;
preprocessorBuilder.inputLength(inputLength);
builder.dimensions += builder.shingleSize;
preprocessorBuilder.normalizeTime(builder.normalizeTime);
// force internal shingling for this option
builder.internalShinglingEnabled = Optional.of(true);
} else if (builder.forestMode == ForestMode.STREAMING_IMPUTE) {
checkArgument(builder.shingleSize > 1, " shingle size 1 is not useful in impute");
inputLength = builder.dimensions / builder.shingleSize;
preprocessorBuilder.inputLength(inputLength);
preprocessorBuilder.imputationMethod(builder.imputationMethod);
preprocessorBuilder.normalizeTime(true);
if (builder.fillValues != null) {
preprocessorBuilder.fillValues(builder.fillValues);
}
// forcing external for the forest to control admittance
builder.internalShinglingEnabled = Optional.of(false);
preprocessorBuilder.useImputedFraction(builder.useImputedFraction.orElse(0.5));
} else {
// STANDARD
boolean smallInput = builder.internalShinglingEnabled.orElse(DEFAULT_INTERNAL_SHINGLING_ENABLED);
inputLength = (smallInput) ? builder.dimensions / builder.shingleSize : builder.dimensions;
preprocessorBuilder.inputLength(inputLength);
}
forest = builder.buildForest();
validateNonNegativeArray(builder.weights);
preprocessorBuilder.weights(builder.weights);
preprocessorBuilder.weightTime(builder.weightTime.orElse(1.0));
preprocessorBuilder.timeDecay(builder.transformDecay.orElse(1.0 / builder.sampleSize));
// to be used later
preprocessorBuilder.randomSeed(builder.randomSeed.orElse(0L) + 1);
preprocessorBuilder.dimensions(builder.dimensions);
preprocessorBuilder
.stopNormalization(builder.stopNormalization.orElse(Preprocessor.DEFAULT_STOP_NORMALIZATION));
preprocessorBuilder.startNormalization(builder.startNormalization.orElse(DEFAULT_START_NORMALIZATION));
preprocessor = preprocessorBuilder.build();
predictorCorrector = new PredictorCorrector(forest.getTimeDecay(), builder.anomalyRate, builder.autoAdjust,
builder.dimensions / builder.shingleSize, builder.randomSeed.orElse(0L));
lastAnomalyDescriptor = new RCFComputeDescriptor(null, 0, builder.forestMode, builder.transformMethod,
builder.imputationMethod);
predictorCorrector.setAbsoluteThreshold(builder.lowerThreshold.orElse(DEFAULT_ABSOLUTE_THRESHOLD));
predictorCorrector.setZfactor(builder.zFactor);
predictorCorrector.setScoreDifferencing(builder.scoreDifferencing.orElse(DEFAULT_SCORE_DIFFERENCING));
builder.ignoreNearExpectedFromAbove.ifPresent(predictorCorrector::setIgnoreNearExpectedFromAbove);
builder.ignoreNearExpectedFromBelow.ifPresent(predictorCorrector::setIgnoreNearExpectedFromBelow);
builder.ignoreNearExpectedFromAboveByRatio.ifPresent(predictorCorrector::setIgnoreNearExpectedFromAboveByRatio);
builder.ignoreNearExpectedFromBelowByRatio.ifPresent(predictorCorrector::setIgnoreNearExpectedFromBelowByRatio);
predictorCorrector.setLastStrategy(builder.scoringStrategy);
predictorCorrector.setIgnoreDrift(builder.alertOnceInDrift);
}
void validateNonNegativeArray(double[] array) {
if (array != null) {
for (double element : array) {
checkArgument(element >= 0, " has to be non-negative");
}
}
}
// for mappers
public ThresholdedRandomCutForest(ForestMode forestMode, TransformMethod transformMethod,
ScoringStrategy scoringStrategy, RandomCutForest forest, PredictorCorrector predictorCorrector,
Preprocessor preprocessor, RCFComputeDescriptor descriptor) {
this.forestMode = forestMode;
this.transformMethod = transformMethod;
this.scoringStrategy = scoringStrategy;
this.forest = forest;
this.predictorCorrector = predictorCorrector;
this.preprocessor = preprocessor;
this.lastAnomalyDescriptor = descriptor;
}
// for conversion from other thresholding models
public ThresholdedRandomCutForest(RandomCutForest forest, double futureAnomalyRate, List<Double> values) {
this.forest = forest;
int dimensions = forest.getDimensions();
int inputLength = (forest.isInternalShinglingEnabled()) ? dimensions / forest.getShingleSize()
: forest.getDimensions();
Preprocessor preprocessor = new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE)
.dimensions(dimensions).shingleSize(forest.getShingleSize()).inputLength(inputLength).build();
this.predictorCorrector = new PredictorCorrector(new BasicThresholder(values, futureAnomalyRate), inputLength);
preprocessor.setValuesSeen((int) forest.getTotalUpdates());
preprocessor.getDataQuality()[0].update(1.0);
this.preprocessor = preprocessor;
this.lastAnomalyDescriptor = new RCFComputeDescriptor(null, forest.getTotalUpdates());
}
protected <T extends AnomalyDescriptor> boolean saveDescriptor(T lastDescriptor) {
return (lastDescriptor.getAnomalyGrade() > 0);
}
/**
* an extensible function call that applies a preprocess, a core function and
* the postprocessing corresponding to the preprocess step. It manages the
* caching strategy of the forest since there are multiple calls to the forest
*
* @param input an abstract input (which may be mutated)
* @param preprocessor the preprocessor applied to the input
* @param core the core function applied after preprocessing
* @param <T> the type of the input
* @return the final result (switching caching off if needed)
*/
public <T extends AnomalyDescriptor> T singleStepProcess(T input, IPreprocessor preprocessor, Function<T, T> core) {
return preprocessor.postProcess(core.apply(preprocessor.preProcess(input, lastAnomalyDescriptor, forest)),
lastAnomalyDescriptor, forest);
}
/**
* a single call that prepreprocesses data, compute score/grade and updates
* state
*
* @param inputPoint current input point
* @param timestamp time stamp of input
* @return anomaly descriptor for the current input point
*/
public AnomalyDescriptor process(double[] inputPoint, long timestamp) {
return process(inputPoint, timestamp, null);
}
/**
* a single call that prepreprocesses data, compute score/grade and updates
* state when the current input has potentially missing values
*
* @param inputPoint current input point
* @param timestamp time stamp of input
* @param missingValues indices of the input which are missing/questionable
* values
* @return anomaly descriptor for the current input point
*/
public AnomalyDescriptor process(double[] inputPoint, long timestamp, int[] missingValues) {
Function<AnomalyDescriptor, AnomalyDescriptor> function = (x) -> predictorCorrector.detect(x,
lastAnomalyDescriptor, forest);
AnomalyDescriptor description = new AnomalyDescriptor(inputPoint, timestamp);
description.setScoringStrategy(scoringStrategy);
boolean cacheDisabled = (forest.getBoundingBoxCacheFraction() == 0);
try {
if (cacheDisabled) { // turn caching on temporarily
forest.setBoundingBoxCacheFraction(1.0);
}
if (missingValues != null) {
checkArgument(missingValues.length <= inputPoint.length, " incorrect data");
for (int i = 0; i < missingValues.length; i++) {
checkArgument(missingValues[i] >= 0 && missingValues[i] < inputPoint.length,
" incorrect positions ");
}
description.setMissingValues(missingValues);
}
description = singleStepProcess(description, preprocessor, function);
} finally {
if (cacheDisabled) { // turn caching off
forest.setBoundingBoxCacheFraction(0);
}
}
if (saveDescriptor(description)) {
lastAnomalyDescriptor = description.copyOf();
}
return description;
}
/**
* the following function processes a list of vectors sequentially; the main
* benefit of this invocation is the caching is persisted from one data point to
* another and thus the execution is efficient. Moreover in many scenarios where
* serialization deserialization is expensive then it may be of benefit of
* invoking sequential process on a contiguous chunk of input (we avoid the use
* of the word batch -- the entire goal of this procedure is to provide
* sequential processing and not standard batch processing). The procedure
* avoids transfer of ephemeral transient objects for non-anomalies and thereby
* can have additional benefits. At the moment the operation does not support
* external timestamps.
*
* @param data a vectors of vectors (each of which has to have the same
* inputLength)
* @param filter a condition to drop desriptor (recommended filter: anomalyGrade
* positive)
* @return collection of descriptors of the anomalies filtered by the condition
*/
public List<AnomalyDescriptor> processSequentially(double[][] data, Function<AnomalyDescriptor, Boolean> filter) {
ArrayList<AnomalyDescriptor> answer = new ArrayList<>();
Function<AnomalyDescriptor, AnomalyDescriptor> function = (x) -> predictorCorrector.detect(x,
lastAnomalyDescriptor, forest);
if (data != null && data.length > 0) {
boolean cacheDisabled = (forest.getBoundingBoxCacheFraction() == 0);
try {
if (cacheDisabled) { // turn caching on temporarily
forest.setBoundingBoxCacheFraction(1.0);
}
long timestamp = preprocessor.getInternalTimeStamp();
int length = preprocessor.getInputLength();
for (double[] point : data) {
checkArgument(point.length == length, " nonuniform lengths ");
AnomalyDescriptor description = new AnomalyDescriptor(point, timestamp++);
description.setScoringStrategy(scoringStrategy);
description = singleStepProcess(description, preprocessor, function);
if (saveDescriptor(description)) {
lastAnomalyDescriptor = description.copyOf();
}
if (filter.apply(description)) {
answer.add(description);
}
}
} finally {
if (cacheDisabled) { // turn caching off
forest.setBoundingBoxCacheFraction(0);
}
}
}
return answer;
}
// recommended filter
public List<AnomalyDescriptor> processSequentially(double[][] data) {
return processSequentially(data, x -> x.anomalyGrade > 0);
}
/**
* a function that extrapolates the data seen by the ThresholdedRCF model, and
* uses the transformations allowed (as opposed to just using RCFs). The
* forecasting also allows for predictor-corrector pattern which implies that
* some noise can be eliminated -- this can be important for various
* transformations. While the algorithm can function for STREAMING_IMPUTE mode
* where missing data is imputed on the fly, it may require effort to validate
* that the internal imputation is reasonably consistent with extrapolation. In
* general, since the STREAMING_IMPUTE can use non-RCF options to fill in
* missing data, the internal imputation and extrapolation need not be
* consistent.
*
* @param horizon the length of time in the future which is being forecast
* @param correct a boolean indicating if predictor-corrector subroutine
* should be turned on; this is specially helpful if there has
* been an anomaly in the recent past
* @param centrality in general RCF predicts the p50 value of conditional
* samples (centrality = 1). This parameter relaxes the
* conditional sampling. Using assumptions about input data
* (hence external to this code) it may be possible to use
* this parameter and the range information for confidence
* bounds.
* @return a timed range vector where the values[i] correspond to the forecast
* for horizon (i+1). The upper and lower arrays indicate the
* corresponding bounds based on the conditional sampling (and
* transformation). Note that TRCF manages time in process() and thus
* the forecasts always have timestamps associated which makes it easier
* to execute the same code for various forest modes such as
* STREAMING_IMPUTE, STANDARD and TIME_AUGMENTED. For STREAMING_IMPUTE
* the time components of the prediction will be 0 because the time
* information is already being used to fill in missing entries. For
* STANDARD mode the time components would correspond to average arrival
* difference. For TIME_AUGMENTED mode the time componentes would be the
* result of the joint prediction. Finally note that setting weight of
* time or any of the input columns will also 0 out the corresponding
* forecast.
*/
public TimedRangeVector extrapolate(int horizon, boolean correct, double centrality) {
int shingleSize = preprocessor.getShingleSize();
checkArgument(shingleSize > 1, "extrapolation is not meaningful for shingle size = 1");
// note the forest may have external shingling ...
int dimensions = forest.getDimensions();
int blockSize = dimensions / shingleSize;
double[] lastPoint = preprocessor.getLastShingledPoint();
if (forest.isOutputReady()) {
int gap = (int) (preprocessor.getInternalTimeStamp() - lastAnomalyDescriptor.getInternalTimeStamp());
float[] newPoint = toFloatArray(lastPoint);
// gap will be at least 1
if (gap <= shingleSize && correct && lastAnomalyDescriptor.getExpectedRCFPoint() != null) {
if (gap == 1) {
newPoint = toFloatArray(lastAnomalyDescriptor.getExpectedRCFPoint());
} else {
newPoint = predictorCorrector.applyPastCorrector(newPoint, gap, shingleSize, blockSize,
preprocessor.getScale(), transformMethod, lastAnomalyDescriptor);
}
}
RangeVector answer = forest.extrapolateFromShingle(newPoint, horizon, blockSize, centrality);
return preprocessor.invertForecastRange(answer, lastAnomalyDescriptor);
} else {
return new TimedRangeVector(new TimedRangeVector(horizon * blockSize, horizon));
}
}
public TimedRangeVector extrapolate(int horizon) {
return extrapolate(horizon, true, 1.0);
}
public RandomCutForest getForest() {
return forest;
}
public void setZfactor(double factor) {
predictorCorrector.setZfactor(factor);
}
public void setLowerThreshold(double lower) {
predictorCorrector.setAbsoluteThreshold(lower);
}
@Deprecated
public void setHorizon(double horizon) {
predictorCorrector.setScoreDifferencing(1 - horizon);
}
public void setScoreDifferencing(double scoreDifferencing) {
predictorCorrector.setScoreDifferencing(scoreDifferencing);
}
public void setIgnoreNearExpectedFromAbove(double[] ignoreSimilarFromAbove) {
predictorCorrector.setIgnoreNearExpectedFromAbove(ignoreSimilarFromAbove);
}
public void setIgnoreNearExpectedFromAboveByRatio(double[] ignoreSimilarFromAbove) {
predictorCorrector.setIgnoreNearExpectedFromAboveByRatio(ignoreSimilarFromAbove);
}
public void setIgnoreNearExpectedFromBelow(double[] ignoreSimilarFromBelow) {
predictorCorrector.setIgnoreNearExpectedFromBelow(ignoreSimilarFromBelow);
}
public void setIgnoreNearExpectedFromBelowByRatio(double[] ignoreSimilarFromBelow) {
predictorCorrector.setIgnoreNearExpectedFromBelowByRatio(ignoreSimilarFromBelow);
}
public void setScoringStrategy(ScoringStrategy strategy) {
this.scoringStrategy = strategy;
}
@Deprecated
public void setInitialThreshold(double initial) {
predictorCorrector.setInitialThreshold(initial);
}
/**
* @return a new builder.
*/
public static Builder<?> builder() {
return new Builder<>();
}
public static class Builder<T extends Builder<T>> {
// We use Optional types for optional primitive fields when it doesn't make
// sense to use a constant default.
protected int dimensions;
protected int sampleSize = DEFAULT_SAMPLE_SIZE;
protected Optional<Integer> outputAfter = Optional.empty();
protected Optional<Integer> startNormalization = Optional.empty();
protected Optional<Integer> stopNormalization = Optional.empty();
protected int numberOfTrees = DEFAULT_NUMBER_OF_TREES;
protected Optional<Double> timeDecay = Optional.empty();
protected Optional<Double> scoreDifferencing = Optional.empty();
protected Optional<Double> lowerThreshold = Optional.empty();
protected Optional<Double> weightTime = Optional.empty();
protected Optional<Long> randomSeed = Optional.empty();
protected boolean storeSequenceIndexesEnabled = DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;
protected boolean centerOfMassEnabled = DEFAULT_CENTER_OF_MASS_ENABLED;
protected boolean parallelExecutionEnabled = DEFAULT_PARALLEL_EXECUTION_ENABLED;
protected Optional<Integer> threadPoolSize = Optional.empty();
protected Precision precision = DEFAULT_PRECISION;
protected double boundingBoxCacheFraction = DEFAULT_BOUNDING_BOX_CACHE_FRACTION;
protected int shingleSize = DEFAULT_SHINGLE_SIZE;
protected Optional<Boolean> internalShinglingEnabled = Optional.empty();
protected double initialAcceptFraction = DEFAULT_INITIAL_ACCEPT_FRACTION;
protected double anomalyRate = 0.01;
protected TransformMethod transformMethod = TransformMethod.NONE;
protected ImputationMethod imputationMethod = PREVIOUS;
protected ForestMode forestMode = ForestMode.STANDARD;
protected ScoringStrategy scoringStrategy = ScoringStrategy.EXPECTED_INVERSE_DEPTH;
protected boolean normalizeTime = false;
protected double[] fillValues = null;
protected double[] weights = null;
protected Optional<Double> useImputedFraction = Optional.empty();
protected boolean autoAdjust = false;
protected double zFactor = DEFAULT_Z_FACTOR;
protected boolean alertOnceInDrift = false;
protected Optional<Double> transformDecay = Optional.empty();
protected Optional<double[]> ignoreNearExpectedFromAbove = Optional.empty();
protected Optional<double[]> ignoreNearExpectedFromBelow = Optional.empty();
protected Optional<double[]> ignoreNearExpectedFromAboveByRatio = Optional.empty();
protected Optional<double[]> ignoreNearExpectedFromBelowByRatio = Optional.empty();
void validate() {
if (forestMode == ForestMode.TIME_AUGMENTED) {
if (internalShinglingEnabled.isPresent()) {
checkArgument(shingleSize == 1 || internalShinglingEnabled.get(),
" shingle size has to be 1 or " + "internal shingling must turned on");
checkArgument(transformMethod == TransformMethod.NONE || internalShinglingEnabled.get(),
" internal shingling must turned on for transforms");
} else {
internalShinglingEnabled = Optional.of(true);
}
if (useImputedFraction.isPresent()) {
throw new IllegalArgumentException(" imputation infeasible");
}
} else if (forestMode == ForestMode.STREAMING_IMPUTE) {
checkArgument(shingleSize > 1, "imputation with shingle size 1 is not meaningful");
internalShinglingEnabled.ifPresent(x -> checkArgument(x,
" input cannot be shingled (even if internal representation is different) "));
} else {
if (!internalShinglingEnabled.isPresent()) {
internalShinglingEnabled = Optional.of(true);
}
if (useImputedFraction.isPresent()) {
throw new IllegalArgumentException(" imputation infeasible");
}
}
if (startNormalization.isPresent()) {
// we should not be setting normalizations unless we are careful
if (outputAfter.isPresent()) {
// can be overspecified
checkArgument(outputAfter.get() + shingleSize - 1 > startNormalization.get(),
"output after has to wait till normalization, reduce normalization");
} else {
int n = startNormalization.get();
checkArgument(n > 0, " startNormalization has to be positive");
// if start normalization is low then first few output can be 0
outputAfter = Optional
.of(max(max(1, (int) (sampleSize * DEFAULT_OUTPUT_AFTER_FRACTION)), n - shingleSize + 1));
}
} else {
if (outputAfter.isPresent()) {
startNormalization = Optional.of(min(DEFAULT_START_NORMALIZATION, outputAfter.get()));
}
}
}
public ThresholdedRandomCutForest build() {
validate();
return new ThresholdedRandomCutForest(this);
}
protected RandomCutForest buildForest() {
RandomCutForest.Builder builder = new RandomCutForest.Builder().dimensions(dimensions)
.sampleSize(sampleSize).numberOfTrees(numberOfTrees).compact(true)
.storeSequenceIndexesEnabled(storeSequenceIndexesEnabled).centerOfMassEnabled(centerOfMassEnabled)
.parallelExecutionEnabled(parallelExecutionEnabled).precision(precision)
.boundingBoxCacheFraction(boundingBoxCacheFraction).shingleSize(shingleSize)
.internalShinglingEnabled(internalShinglingEnabled.get())
.initialAcceptFraction(initialAcceptFraction);
if (forestMode != ForestMode.STREAMING_IMPUTE) {
outputAfter.ifPresent(builder::outputAfter);
} else {
// forcing the change between internal and external shingling
outputAfter.ifPresent(n -> {
int num = max(startNormalization.orElse(DEFAULT_START_NORMALIZATION), n) - shingleSize + 1;
checkArgument(num > 0, " max(start normalization, output after) should be at least " + shingleSize);
builder.outputAfter(num);
});
}
timeDecay.ifPresent(builder::timeDecay);
randomSeed.ifPresent(builder::randomSeed);
threadPoolSize.ifPresent(builder::threadPoolSize);
return builder.build();
}
public T dimensions(int dimensions) {
this.dimensions = dimensions;
return (T) this;
}
public T sampleSize(int sampleSize) {
this.sampleSize = sampleSize;
return (T) this;
}
public T startNormalization(int startNormalization) {
this.startNormalization = Optional.of(startNormalization);
return (T) this;
}
public T stopNormalization(int stopNormalization) {
this.stopNormalization = Optional.of(stopNormalization);
return (T) this;
}
public T outputAfter(int outputAfter) {
this.outputAfter = Optional.of(outputAfter);
return (T) this;
}
public T numberOfTrees(int numberOfTrees) {
this.numberOfTrees = numberOfTrees;
return (T) this;
}
public T shingleSize(int shingleSize) {
this.shingleSize = shingleSize;
return (T) this;
}
public T timeDecay(double timeDecay) {
this.timeDecay = Optional.of(timeDecay);
return (T) this;
}
public T transformDecay(double transformDecay) {
this.transformDecay = Optional.of(transformDecay);
return (T) this;
}
public T zFactor(double zFactor) {
this.zFactor = zFactor;
return (T) this;
}
public T useImputedFraction(double fraction) {
this.useImputedFraction = Optional.of(fraction);
return (T) this;
}
public T randomSeed(long randomSeed) {
this.randomSeed = Optional.of(randomSeed);
return (T) this;
}
public T centerOfMassEnabled(boolean centerOfMassEnabled) {
this.centerOfMassEnabled = centerOfMassEnabled;
return (T) this;
}
public T parallelExecutionEnabled(boolean parallelExecutionEnabled) {
this.parallelExecutionEnabled = parallelExecutionEnabled;
return (T) this;
}
public T threadPoolSize(int threadPoolSize) {
this.threadPoolSize = Optional.of(threadPoolSize);
return (T) this;
}
public T storeSequenceIndexesEnabled(boolean storeSequenceIndexesEnabled) {
this.storeSequenceIndexesEnabled = storeSequenceIndexesEnabled;
return (T) this;
}
@Deprecated
public T compact(boolean compact) {
return (T) this;
}
public T internalShinglingEnabled(boolean internalShinglingEnabled) {
this.internalShinglingEnabled = Optional.of(internalShinglingEnabled);
return (T) this;
}
public T precision(Precision precision) {
this.precision = precision;
return (T) this;
}
public T boundingBoxCacheFraction(double boundingBoxCacheFraction) {
this.boundingBoxCacheFraction = boundingBoxCacheFraction;
return (T) this;
}
public T initialAcceptFraction(double initialAcceptFraction) {
this.initialAcceptFraction = initialAcceptFraction;
return (T) this;
}
public Random getRandom() {
// If a random seed was given, use it to create a new Random. Otherwise, call
// the 0-argument constructor
return randomSeed.map(Random::new).orElseGet(Random::new);
}
public T anomalyRate(double anomalyRate) {
this.anomalyRate = anomalyRate;
return (T) this;
}
public T imputationMethod(ImputationMethod imputationMethod) {
this.imputationMethod = imputationMethod;
return (T) this;
}
public T fillValues(double[] values) {
// values cannot be a null
this.fillValues = Arrays.copyOf(values, values.length);
return (T) this;
}
public T weights(double[] values) {
// values cannot be a null
this.weights = Arrays.copyOf(values, values.length);
return (T) this;
}
public T normalizeTime(boolean normalizeTime) {
this.normalizeTime = normalizeTime;
return (T) this;
}
public T transformMethod(TransformMethod method) {
this.transformMethod = method;
return (T) this;
}
public T forestMode(ForestMode forestMode) {
this.forestMode = forestMode;
return (T) this;
}
public T scoreDifferencing(double persistence) {
this.scoreDifferencing = Optional.of(persistence);
return (T) this;
}
public T autoAdjust(boolean autoAdjust) {
this.autoAdjust = autoAdjust;
return (T) this;
}
public T weightTime(double value) {
this.weightTime = Optional.of(value);
return (T) this;
}
public T ignoreNearExpectedFromAbove(double[] ignoreSimilarFromAbove) {
this.ignoreNearExpectedFromAbove = Optional.ofNullable(ignoreSimilarFromAbove);
return (T) this;
}
public T ignoreNearExpectedFromBelow(double[] ignoreSimilarFromBelow) {
this.ignoreNearExpectedFromBelow = Optional.ofNullable(ignoreSimilarFromBelow);
return (T) this;
}
public T ignoreNearExpectedFromAboveByRatio(double[] ignoreSimilarFromAboveByRatio) {
this.ignoreNearExpectedFromAboveByRatio = Optional.ofNullable(ignoreSimilarFromAboveByRatio);
return (T) this;
}
public T ignoreNearExpectedFromBelowByRatio(double[] ignoreSimilarFromBelowByRatio) {
this.ignoreNearExpectedFromBelowByRatio = Optional.ofNullable(ignoreSimilarFromBelowByRatio);
return (T) this;
}
public T scoringStrategy(ScoringStrategy scoringStrategy) {
this.scoringStrategy = scoringStrategy;
return (T) this;
}
public T alertOnce(boolean alertOnceInDrift) {
this.alertOnceInDrift = alertOnceInDrift;
return (T) this;
}
}
}
| 587 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/statistics/Deviation.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.statistics;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
/**
* This class maintains a simple discounted statistics. Setters are avoided
* except for discount rate which is useful as initialization from raw scores
*/
public class Deviation {
protected double discount;
protected double weight = 0;
protected double sumSquared = 0;
protected double sum = 0;
protected int count = 0;
public Deviation() {
discount = 0;
}
public Deviation(double discount) {
checkArgument(0 <= discount && discount < 1, "incorrect discount parameter");
this.discount = discount;
}
public Deviation(double discount, double weight, double sumSquared, double sum, int count) {
this.discount = discount;
this.weight = weight;
this.sumSquared = sumSquared;
this.sum = sum;
this.count = count;
}
public Deviation copy() {
return new Deviation(this.discount, this.weight, this.sumSquared, this.sum, this.count);
}
public double getMean() {
return (weight <= 0) ? 0 : sum / weight;
}
public void update(double score) {
double factor = (discount == 0) ? 1 : Math.min(1 - discount, 1 - 1.0 / (count + 2));
sum = sum * factor + score;
sumSquared = sumSquared * factor + score * score;
weight = weight * factor + 1.0;
++count;
}
public double getDeviation() {
if (weight <= 0) {
return 0;
}
double temp = sum / weight;
double answer = sumSquared / weight - temp * temp;
return (answer > 0) ? Math.sqrt(answer) : 0;
}
public boolean isEmpty() {
return weight == 0;
}
public double getDiscount() {
return discount;
}
public void setDiscount(double discount) {
this.discount = discount;
}
public double getSum() {
return sum;
}
public double getSumSquared() {
return sumSquared;
}
public double getWeight() {
return weight;
}
public int getCount() {
return count;
}
public void setCount(int count) {
this.count = count;
}
public void reset() {
weight = 0;
sum = 0;
count = 0;
sumSquared = 0;
}
}
| 588 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/calibration/Calibration.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.calibration;
public enum Calibration {
NONE,
/**
* a basic staring point where the intervals are adjusted to be the minimal
* necessary based on past error the intervals are smaller -- but the interval
* precision will likely be close to 1 - 2 * percentile
*/
MINIMAL,
/**
* a Markov inequality based interval, where the past error and model errors are
* additive. The interval precision is likely higher than MINIMAL but so are the
* intervals.
*/
SIMPLE;
}
| 589 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/RCFCasterState.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state;
import static com.amazon.randomcutforest.state.Version.V3_8;
import lombok.Data;
import com.amazon.randomcutforest.parkservices.state.errorhandler.ErrorHandlerState;
@Data
public class RCFCasterState extends ThresholdedRandomCutForestState {
private static final long serialVersionUID = 1L;
private String version = V3_8;
private int forecastHorizon;
private ErrorHandlerState errorHandler;
private int errorHorizon;
private String calibrationMethod;
}
| 590 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/ThresholdedRandomCutForestState.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state;
import static com.amazon.randomcutforest.state.Version.V3_8;
import java.io.Serializable;
import lombok.Data;
import com.amazon.randomcutforest.parkservices.state.predictorcorrector.PredictorCorrectorState;
import com.amazon.randomcutforest.parkservices.state.preprocessor.PreprocessorState;
import com.amazon.randomcutforest.parkservices.state.returntypes.ComputeDescriptorState;
import com.amazon.randomcutforest.parkservices.state.threshold.BasicThresholderState;
import com.amazon.randomcutforest.state.RandomCutForestState;
import com.amazon.randomcutforest.state.returntypes.DiVectorState;
@Data
public class ThresholdedRandomCutForestState implements Serializable {
private static final long serialVersionUID = 1L;
private String version = V3_8;
RandomCutForestState forestState;
// deprecated but not marked due to 2.1 models
private BasicThresholderState thresholderState;
private PreprocessorState[] preprocessorStates;
// following fields are deprecated, but not removed for compatibility with 2.1
// models
private double ignoreSimilarFactor;
private double triggerFactor;
private long lastAnomalyTimeStamp;
private double lastAnomalyScore;
private DiVectorState lastAnomalyAttribution;
private double lastScore;
private double[] lastAnomalyPoint;
private double[] lastExpectedPoint;
private boolean previousIsPotentialAnomaly;
private boolean inHighScoreRegion;
private boolean ignoreSimilar;
private int numberOfAttributors;
// end deprecated segment
private long randomSeed;
private String forestMode;
private String transformMethod;
private String scoringStrategy;
private int lastRelativeIndex;
private int lastReset;
private PredictorCorrectorState predictorCorrectorState;
private ComputeDescriptorState lastDescriptorState;
}
| 591 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/ThresholdedRandomCutForestMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.ImputationMethod;
import com.amazon.randomcutforest.config.ScoringStrategy;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.PredictorCorrector;
import com.amazon.randomcutforest.parkservices.RCFComputeDescriptor;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.amazon.randomcutforest.parkservices.preprocessor.Preprocessor;
import com.amazon.randomcutforest.parkservices.state.predictorcorrector.PredictorCorrectorMapper;
import com.amazon.randomcutforest.parkservices.state.preprocessor.PreprocessorMapper;
import com.amazon.randomcutforest.parkservices.state.preprocessor.PreprocessorState;
import com.amazon.randomcutforest.parkservices.state.returntypes.ComputeDescriptorMapper;
import com.amazon.randomcutforest.parkservices.state.threshold.BasicThresholderMapper;
import com.amazon.randomcutforest.parkservices.threshold.BasicThresholder;
import com.amazon.randomcutforest.state.IStateMapper;
import com.amazon.randomcutforest.state.RandomCutForestMapper;
import com.amazon.randomcutforest.state.returntypes.DiVectorMapper;
@Getter
@Setter
public class ThresholdedRandomCutForestMapper
implements IStateMapper<ThresholdedRandomCutForest, ThresholdedRandomCutForestState> {
@Override
public ThresholdedRandomCutForest toModel(ThresholdedRandomCutForestState state, long seed) {
RandomCutForestMapper randomCutForestMapper = new RandomCutForestMapper();
PreprocessorMapper preprocessorMapper = new PreprocessorMapper();
RandomCutForest forest = randomCutForestMapper.toModel(state.getForestState());
Preprocessor preprocessor = preprocessorMapper.toModel(state.getPreprocessorStates()[0]);
ForestMode forestMode = ForestMode.valueOf(state.getForestMode());
TransformMethod transformMethod = TransformMethod.valueOf(state.getTransformMethod());
ScoringStrategy scoringStrategy = ScoringStrategy.EXPECTED_INVERSE_DEPTH;
if (state.getScoringStrategy() != null && !state.getScoringStrategy().isEmpty()) {
scoringStrategy = ScoringStrategy.valueOf(state.getScoringStrategy());
}
RCFComputeDescriptor descriptor;
if (state.getLastDescriptorState() == null) {
descriptor = new RCFComputeDescriptor(null, 0L);
descriptor.setRCFScore(state.getLastAnomalyScore());
descriptor.setInternalTimeStamp(state.getLastAnomalyTimeStamp());
descriptor.setAttribution(new DiVectorMapper().toModel(state.getLastAnomalyAttribution()));
descriptor.setRCFPoint(state.getLastAnomalyPoint());
descriptor.setExpectedRCFPoint(state.getLastExpectedPoint());
descriptor.setRelativeIndex(state.getLastRelativeIndex());
descriptor.setScoringStrategy(scoringStrategy);
} else {
descriptor = new ComputeDescriptorMapper().toModel(state.getLastDescriptorState());
}
descriptor.setForestMode(forestMode);
descriptor.setTransformMethod(transformMethod);
descriptor.setScoringStrategy(scoringStrategy);
descriptor
.setImputationMethod(ImputationMethod.valueOf(state.getPreprocessorStates()[0].getImputationMethod()));
PredictorCorrector predictorCorrector;
if (state.getPredictorCorrectorState() == null) {
BasicThresholderMapper thresholderMapper = new BasicThresholderMapper();
BasicThresholder thresholder = thresholderMapper.toModel(state.getThresholderState());
predictorCorrector = new PredictorCorrector(thresholder, preprocessor.getInputLength());
predictorCorrector.setNumberOfAttributors(state.getNumberOfAttributors());
predictorCorrector.setLastScore(new double[] { state.getLastScore() });
} else {
PredictorCorrectorMapper mapper = new PredictorCorrectorMapper();
predictorCorrector = mapper.toModel(state.getPredictorCorrectorState());
}
return new ThresholdedRandomCutForest(forestMode, transformMethod, scoringStrategy, forest, predictorCorrector,
preprocessor, descriptor);
}
@Override
public ThresholdedRandomCutForestState toState(ThresholdedRandomCutForest model) {
ThresholdedRandomCutForestState state = new ThresholdedRandomCutForestState();
RandomCutForestMapper randomCutForestMapper = new RandomCutForestMapper();
randomCutForestMapper.setPartialTreeStateEnabled(true);
randomCutForestMapper.setSaveTreeStateEnabled(true);
randomCutForestMapper.setCompressionEnabled(true);
randomCutForestMapper.setSaveCoordinatorStateEnabled(true);
randomCutForestMapper.setSaveExecutorContextEnabled(true);
state.setForestState(randomCutForestMapper.toState(model.getForest()));
PreprocessorMapper preprocessorMapper = new PreprocessorMapper();
state.setPreprocessorStates(
new PreprocessorState[] { preprocessorMapper.toState((Preprocessor) model.getPreprocessor()) });
state.setPredictorCorrectorState(new PredictorCorrectorMapper().toState(model.getPredictorCorrector()));
state.setForestMode(model.getForestMode().name());
state.setTransformMethod(model.getTransformMethod().name());
state.setScoringStrategy(model.getScoringStrategy().name());
state.setLastDescriptorState(
new ComputeDescriptorMapper().toState((RCFComputeDescriptor) model.getLastAnomalyDescriptor()));
return state;
}
}
| 592 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/RCFCasterMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.ForestMode;
import com.amazon.randomcutforest.config.ImputationMethod;
import com.amazon.randomcutforest.config.ScoringStrategy;
import com.amazon.randomcutforest.config.TransformMethod;
import com.amazon.randomcutforest.parkservices.ErrorHandler;
import com.amazon.randomcutforest.parkservices.PredictorCorrector;
import com.amazon.randomcutforest.parkservices.RCFCaster;
import com.amazon.randomcutforest.parkservices.RCFComputeDescriptor;
import com.amazon.randomcutforest.parkservices.calibration.Calibration;
import com.amazon.randomcutforest.parkservices.preprocessor.Preprocessor;
import com.amazon.randomcutforest.parkservices.state.errorhandler.ErrorHandlerMapper;
import com.amazon.randomcutforest.parkservices.state.predictorcorrector.PredictorCorrectorMapper;
import com.amazon.randomcutforest.parkservices.state.preprocessor.PreprocessorMapper;
import com.amazon.randomcutforest.parkservices.state.preprocessor.PreprocessorState;
import com.amazon.randomcutforest.parkservices.state.returntypes.ComputeDescriptorMapper;
import com.amazon.randomcutforest.state.IStateMapper;
import com.amazon.randomcutforest.state.RandomCutForestMapper;
@Getter
@Setter
public class RCFCasterMapper implements IStateMapper<RCFCaster, RCFCasterState> {
@Override
public RCFCasterState toState(RCFCaster model) {
RCFCasterState state = new RCFCasterState();
RandomCutForestMapper randomCutForestMapper = new RandomCutForestMapper();
randomCutForestMapper.setPartialTreeStateEnabled(true);
randomCutForestMapper.setSaveTreeStateEnabled(true);
randomCutForestMapper.setCompressionEnabled(true);
randomCutForestMapper.setSaveCoordinatorStateEnabled(true);
randomCutForestMapper.setSaveExecutorContextEnabled(true);
state.setForestState(randomCutForestMapper.toState(model.getForest()));
PreprocessorMapper preprocessorMapper = new PreprocessorMapper();
state.setPreprocessorStates(
new PreprocessorState[] { preprocessorMapper.toState((Preprocessor) model.getPreprocessor()) });
state.setPredictorCorrectorState(new PredictorCorrectorMapper().toState(model.getPredictorCorrector()));
state.setLastDescriptorState(
new ComputeDescriptorMapper().toState((RCFComputeDescriptor) model.getLastAnomalyDescriptor()));
state.setForestMode(model.getForestMode().name());
state.setTransformMethod(model.getTransformMethod().name());
state.setForecastHorizon(model.getForecastHorizon());
ErrorHandlerMapper errorHandlerMapper = new ErrorHandlerMapper();
state.setErrorHandler(errorHandlerMapper.toState(model.getErrorHandler()));
state.setErrorHorizon(model.getErrorHorizon());
state.setCalibrationMethod(model.getCalibrationMethod().name());
state.setScoringStrategy(model.getScoringStrategy().name());
return state;
}
@Override
public RCFCaster toModel(RCFCasterState state, long seed) {
RandomCutForestMapper randomCutForestMapper = new RandomCutForestMapper();
PreprocessorMapper preprocessorMapper = new PreprocessorMapper();
RandomCutForest forest = randomCutForestMapper.toModel(state.getForestState());
Preprocessor preprocessor = preprocessorMapper.toModel(state.getPreprocessorStates()[0]);
ForestMode forestMode = ForestMode.valueOf(state.getForestMode());
TransformMethod transformMethod = TransformMethod.valueOf(state.getTransformMethod());
RCFComputeDescriptor descriptor = new ComputeDescriptorMapper().toModel(state.getLastDescriptorState());
descriptor.setForestMode(forestMode);
descriptor.setTransformMethod(transformMethod);
descriptor
.setImputationMethod(ImputationMethod.valueOf(state.getPreprocessorStates()[0].getImputationMethod()));
PredictorCorrectorMapper mapper = new PredictorCorrectorMapper();
PredictorCorrector predictorCorrector = mapper.toModel(state.getPredictorCorrectorState());
ErrorHandlerMapper errorHandlerMapper = new ErrorHandlerMapper();
ErrorHandler errorHandler = errorHandlerMapper.toModel(state.getErrorHandler());
Calibration calibrationMethod = Calibration.valueOf(state.getCalibrationMethod());
ScoringStrategy scoringStrategy = ScoringStrategy.valueOf(state.getScoringStrategy());
return new RCFCaster(forestMode, transformMethod, scoringStrategy, forest, predictorCorrector, preprocessor,
descriptor, state.getForecastHorizon(), errorHandler, state.getErrorHorizon(), calibrationMethod);
}
}
| 593 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/statistics/DeviationMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state.statistics;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
import com.amazon.randomcutforest.state.IStateMapper;
@Getter
@Setter
public class DeviationMapper implements IStateMapper<Deviation, DeviationState> {
@Override
public Deviation toModel(DeviationState state, long seed) {
return new Deviation(state.getDiscount(), state.getWeight(), state.getSumSquared(), state.getSum(),
state.getCount());
}
@Override
public DeviationState toState(Deviation model) {
DeviationState state = new DeviationState();
state.setDiscount(model.getDiscount());
state.setSum(model.getSum());
state.setSumSquared(model.getSumSquared());
state.setWeight(model.getWeight());
state.setCount(model.getCount());
return state;
}
public static DeviationState[] getStates(Deviation[] list, DeviationMapper mapper) {
DeviationState[] states = null;
if (list != null) {
states = new DeviationState[list.length];
for (int i = 0; i < list.length; i++) {
states[i] = mapper.toState(list[i]);
}
}
return states;
}
public static Deviation[] getDeviations(DeviationState[] states, DeviationMapper mapper) {
Deviation[] deviations = null;
if (states != null) {
deviations = new Deviation[states.length];
for (int i = 0; i < states.length; i++) {
deviations[i] = mapper.toModel(states[i]);
}
}
return deviations;
}
}
| 594 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/statistics/DeviationState.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state.statistics;
import java.io.Serializable;
import lombok.Data;
@Data
public class DeviationState implements Serializable {
private static final long serialVersionUID = 1L;
private double discount;
private double weight;
private double sumSquared;
private double sum;
private int count;
}
| 595 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/errorhandler/ErrorHandlerMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state.errorhandler;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.Locale;
import com.amazon.randomcutforest.parkservices.ErrorHandler;
import com.amazon.randomcutforest.returntypes.RangeVector;
import com.amazon.randomcutforest.state.IStateMapper;
public class ErrorHandlerMapper implements IStateMapper<ErrorHandler, ErrorHandlerState> {
@Override
public ErrorHandlerState toState(ErrorHandler model) {
ErrorHandlerState errorHandlerState = new ErrorHandlerState();
errorHandlerState.setSequenceIndex(model.getSequenceIndex());
errorHandlerState.setPercentile(model.getPercentile());
errorHandlerState.setForecastHorizon(model.getForecastHorizon());
errorHandlerState.setErrorHorizon(model.getErrorHorizon());
errorHandlerState.setLastDeviations(model.getLastDeviations());
// pastForecasts[i] contains forecasts at timestamp i. We have three float
// arrays:
// upper, lower, values. Each array is of length forecastHorizon*dimensions
// since
// we have forecastHorizon forecasts per dimension.
RangeVector[] pastForecasts = model.getPastForecasts();
float[][] actuals = model.getActuals();
int arrayLength = pastForecasts.length;
checkArgument(pastForecasts != null, "pastForecasts cannot be null");
checkArgument(actuals != null, "actuals cannot be null");
checkArgument(arrayLength == actuals.length, String.format(Locale.ROOT,
"actuals array length %d and pastForecasts array length %d is not equal", actuals.length, arrayLength));
int forecastHorizon = model.getForecastHorizon();
float[] pastForecastsFlattened = null;
int inputLength = 0;
if (pastForecasts.length == 0 || pastForecasts[0].values == null || pastForecasts[0].values.length == 0) {
pastForecastsFlattened = new float[0];
} else {
int pastForecastsLength = pastForecasts[0].values.length;
inputLength = pastForecastsLength / forecastHorizon;
pastForecastsFlattened = new float[arrayLength * 3 * forecastHorizon * inputLength];
for (int i = 0; i < arrayLength; i++) {
System.arraycopy(pastForecasts[i].values, 0, pastForecastsFlattened, 3 * i * pastForecastsLength,
pastForecastsLength);
System.arraycopy(pastForecasts[i].upper, 0, pastForecastsFlattened, (3 * i + 1) * pastForecastsLength,
pastForecastsLength);
System.arraycopy(pastForecasts[i].lower, 0, pastForecastsFlattened, (3 * i + 2) * pastForecastsLength,
pastForecastsLength);
}
}
errorHandlerState.setInputLength(inputLength);
errorHandlerState.setPastForecastsFlattened(pastForecastsFlattened);
float[] actualsFlattened = null;
if (actuals.length == 0 || actuals[0].length == 0) {
actualsFlattened = new float[0];
} else {
actualsFlattened = new float[arrayLength * inputLength];
for (int i = 0; i < arrayLength; i++) {
System.arraycopy(actuals[i], 0, actualsFlattened, i * inputLength, inputLength);
}
}
errorHandlerState.setActualsFlattened(actualsFlattened);
return errorHandlerState;
}
@Override
public ErrorHandler toModel(ErrorHandlerState state, long seed) {
return new ErrorHandler(state.getErrorHorizon(), state.getForecastHorizon(), state.getSequenceIndex(),
state.getPercentile(), state.getInputLength(), state.getActualsFlattened(),
state.getPastForecastsFlattened(), state.getLastDeviations(), null);
}
}
| 596 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/errorhandler/ErrorHandlerState.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state.errorhandler;
import java.io.Serializable;
import lombok.Data;
@Data
public class ErrorHandlerState implements Serializable {
private static final long serialVersionUID = 1L;
private int sequenceIndex;
private double percentile;
private int forecastHorizon;
private int errorHorizon;
private float[] pastForecastsFlattened;
private float[] actualsFlattened;
private int inputLength;
private float[] lastDeviations;
// items below are not used now. Kept for regret computation later.
// Regret is what we feel when we realize that we should have been better off
// had we done something else. A basic requirement of regret computation is that
// it should avoid or at least reduce the regret that will be felt.
private float[] addersFlattened;
private float[] multipliersFlattened;
}
| 597 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/threshold/BasicThresholderMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state.threshold;
import static com.amazon.randomcutforest.parkservices.state.statistics.DeviationMapper.getDeviations;
import static com.amazon.randomcutforest.parkservices.state.statistics.DeviationMapper.getStates;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.parkservices.state.statistics.DeviationMapper;
import com.amazon.randomcutforest.parkservices.statistics.Deviation;
import com.amazon.randomcutforest.parkservices.threshold.BasicThresholder;
import com.amazon.randomcutforest.state.IStateMapper;
@Getter
@Setter
public class BasicThresholderMapper implements IStateMapper<BasicThresholder, BasicThresholderState> {
@Override
public BasicThresholder toModel(BasicThresholderState state, long seed) {
DeviationMapper deviationMapper = new DeviationMapper();
Deviation[] deviations = null;
if (state.getDeviationStates() != null) {
deviations = getDeviations(state.getDeviationStates(), deviationMapper);
} else if (state.getPrimaryDeviationState() != null) {
// backward compatility; will be deprecated in 4.0
deviations = new Deviation[3];
deviations[0] = deviationMapper.toModel(state.getPrimaryDeviationState());
deviations[1] = deviationMapper.toModel(state.getSecondaryDeviationState());
deviations[2] = deviationMapper.toModel(state.getThresholdDeviationState());
}
BasicThresholder thresholder = new BasicThresholder(deviations);
thresholder.setAbsoluteThreshold(state.getAbsoluteThreshold());
thresholder.setLowerThreshold(state.getLowerThreshold());
thresholder.setInitialThreshold(state.getInitialThreshold());
thresholder.setScoreDifferencing(state.getHorizon());
thresholder.setCount(state.getCount());
thresholder.setAutoThreshold(state.isAutoThreshold());
thresholder.setMinimumScores(state.getMinimumScores());
thresholder.setZfactor(state.getZFactor());
return thresholder;
}
@Override
public BasicThresholderState toState(BasicThresholder model) {
BasicThresholderState state = new BasicThresholderState();
DeviationMapper deviationMapper = new DeviationMapper();
state.setZFactor(model.getZFactor());
state.setLowerThreshold(model.getLowerThreshold());
state.setAbsoluteThreshold(model.getAbsoluteThreshold());
state.setInitialThreshold(model.getInitialThreshold());
state.setCount(model.getCount());
state.setAutoThreshold(model.isAutoThreshold());
state.setMinimumScores(model.getMinimumScores());
state.setDeviationStates(getStates(model.getDeviations(), deviationMapper));
state.setHorizon(model.getScoreDifferencing());
return state;
}
}
| 598 |
0 | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state | Create_ds/random-cut-forest-by-aws/Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/threshold/BasicThresholderState.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.parkservices.state.threshold;
import java.io.Serializable;
import lombok.Data;
import com.amazon.randomcutforest.parkservices.state.statistics.DeviationState;
@Data
public class BasicThresholderState implements Serializable {
private static final long serialVersionUID = 1L;
private long randomseed;
@Deprecated
private boolean inAnomaly;
@Deprecated
private double elasticity;
@Deprecated
private boolean attributionEnabled;
private int count;
private int minimumScores;
// do not use
private DeviationState primaryDeviationState;
// do not use
private DeviationState secondaryDeviationState;
// do not use
private DeviationState thresholdDeviationState;
@Deprecated
private double upperThreshold;
private double lowerThreshold;
private double absoluteThreshold;
private boolean autoThreshold;
private double initialThreshold;
private double zFactor;
@Deprecated
private double upperZfactor;
@Deprecated
private double absoluteScoreFraction;
private double horizon;
private DeviationState[] deviationStates;
}
| 599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.