index int64 0 0 | repo_id stringlengths 26 205 | file_path stringlengths 51 246 | content stringlengths 8 433k | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/tree/CutTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class CutTest {
private int splitDimension;
private double splitValue;
private Cut cut;
@BeforeEach
public void setUp() {
splitDimension = 2;
splitValue = 3.4;
cut = new Cut(splitDimension, splitValue);
}
@Test
public void testNew() {
assertThat(cut.getDimension(), is(splitDimension));
assertThat(cut.getValue(), is(splitValue));
}
@Test
public void testIsLeftOf() {
double[] point = new double[] { 1.0, 2.0, 3.0, 4.0 };
assertTrue(Cut.isLeftOf(point, cut));
point[2] = 99.9;
assertFalse(Cut.isLeftOf(point, cut));
}
}
| 400 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/tree/RandomCutTreeTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
import static com.amazon.randomcutforest.CommonUtils.toDoubleArray;
import static com.amazon.randomcutforest.CommonUtils.toFloatArray;
import static com.amazon.randomcutforest.tree.AbstractNodeStore.Null;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import com.amazon.randomcutforest.config.Config;
import com.amazon.randomcutforest.sampler.Weighted;
import com.amazon.randomcutforest.store.PointStore;
public class RandomCutTreeTest {
private static final double EPSILON = 1e-8;
private Random rng;
private RandomCutTree tree;
@BeforeEach
public void setUp() {
rng = mock(Random.class);
PointStore pointStoreFloat = new PointStore.Builder().indexCapacity(100).capacity(100).initialSize(100)
.dimensions(2).build();
tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStoreFloat)
.storeSequenceIndexesEnabled(true).storeParent(true).dimension(2).build();
// Create the following tree structure (in the second diagram., backticks denote
// cuts)
// The leaf point 0,1 has mass 2, all other nodes have mass 1.
//
// /\
// / \
// -1,-1 / \
// / \
// /\ 1,1
// / \
// -1,0 0,1
//
//
// 0,1 1,1
// ----------*---------*
// | ` | ` |
// | ` | ` |
// | ` | ` |
// -1,0 *-------------------|
// | |
// |```````````````````|
// | |
// -1,-1 *--------------------
//
// We choose the insertion order and random draws carefully so that each split
// divides its parent in half.
// The random values are used to set the cut dimensions and values.
assertEquals(pointStoreFloat.add(new float[] { -1, -1 }, 1), 0);
assertEquals(pointStoreFloat.add(new float[] { 1, 1 }, 2), 1);
assertEquals(pointStoreFloat.add(new float[] { -1, 0 }, 3), 2);
assertEquals(pointStoreFloat.add(new float[] { 0, 1 }, 4), 3);
assertEquals(pointStoreFloat.add(new float[] { 0, 1 }, 5), 4);
assertEquals(pointStoreFloat.add(new float[] { 0, 0 }, 6), 5);
assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(0, 1));
tree.addPoint(0, 1);
tree.deletePoint(0, 1);
assertTrue(tree.root == Null);
tree.addPoint(0, 1);
when(rng.nextDouble()).thenReturn(0.625);
tree.addPoint(1, 2);
when(rng.nextDouble()).thenReturn(0.5);
tree.addPoint(2, 3);
when(rng.nextDouble()).thenReturn(0.25);
tree.addPoint(3, 4);
// add mass to 0,1
tree.addPoint(4, 5);
assertArrayEquals(tree.liftFromTree(new float[] { 17, 18 }), new float[] { 17, 18 });
}
@Test
public void testConfig() {
assertThrows(IllegalArgumentException.class, () -> tree.setBoundingBoxCacheFraction(-0.5));
assertThrows(IllegalArgumentException.class, () -> tree.setBoundingBoxCacheFraction(2.0));
assertThrows(IllegalArgumentException.class, () -> tree.setConfig("foo", 0));
assertThrows(IllegalArgumentException.class, () -> tree.getConfig("bar"));
assertEquals(tree.getConfig(Config.BOUNDING_BOX_CACHE_FRACTION), 1.0);
assertThrows(IllegalArgumentException.class, () -> tree.setConfig(Config.BOUNDING_BOX_CACHE_FRACTION, true));
assertThrows(IllegalArgumentException.class,
() -> tree.getConfig(Config.BOUNDING_BOX_CACHE_FRACTION, boolean.class));
tree.setConfig(Config.BOUNDING_BOX_CACHE_FRACTION, 0.2);
}
@Test
public void testConfigStore() {
assertEquals(tree.nodeStore.isLeaf(-1), tree.isLeaf(-1));
assertEquals(tree.nodeStore.isLeaf(256), tree.isLeaf(256));
assertEquals(tree.nodeStore.isInternal(-1), tree.isInternal(-1));
assertEquals(tree.nodeStore.isInternal(0), tree.isInternal(0));
assertEquals(tree.nodeStore.isInternal(255), tree.isInternal(255));
assertEquals(tree.nodeStore.isInternal(256), tree.isInternal(256));
}
@Test
public void testParent() {
PointStore pointStore = mock(PointStore.class);
tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStore)
.storeSequenceIndexesEnabled(true).storeParent(false).dimension(3).build();
assertThrows(IllegalArgumentException.class, () -> tree.nodeStore.getParentIndex(tree.root));
}
@Test
public void testConfigDelete() {
PointStore pointStore = mock(PointStore.class);
tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStore)
.storeSequenceIndexesEnabled(true).storeParent(true).dimension(3).build();
when(pointStore.getNumericVector(any(Integer.class))).thenReturn(new float[] { 0 }).thenReturn(new float[3])
.thenReturn(new float[3]);
tree.addPoint(0, 1);
// fails vor dimension
assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(0, 1));
assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(2, 1));
// wrong sequence index
assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(0, 2));
// state is corrupted
assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(0, 1));
}
@Test
public void testConfigAdd() {
PointStore pointStore = mock(PointStore.class);
float[] test = new float[] { 1.119f, 0f, -3.11f, 100f };
float[] copies = new float[] { 0, 17, 0, 0 };
tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStore)
.centerOfMassEnabled(true).storeSequenceIndexesEnabled(true).storeParent(true).dimension(4).build();
when(pointStore.getNumericVector(any(Integer.class))).thenReturn(new float[0]).thenReturn(test)
.thenReturn(new float[62]).thenReturn(new float[4]).thenReturn(new float[17]).thenReturn(new float[4])
.thenReturn(new float[4]).thenReturn(new float[5]).thenReturn(copies).thenReturn(test)
.thenReturn(copies).thenReturn(copies).thenReturn(test);
// cannot have partial addition to empty tree
assertThrows(IllegalArgumentException.class, () -> tree.addPointToPartialTree(0, 1));
// the following does not consume any points
tree.addPoint(0, 1);
// consumes from pointstore but gets 0 length vector
assertThrows(IllegalArgumentException.class, () -> tree.getPointSum(tree.getRoot()));
// passes, consumes pointstore
assertArrayEquals(tree.getPointSum(tree.getRoot()), test);
// sequel fails because dimension is 62
assertThrows(IllegalArgumentException.class, () -> tree.getBox(tree.root));
// in the sequel point is [0,0,0,0] fails because old point appears to have 17
// dimensions
assertThrows(IllegalArgumentException.class, () -> tree.addPoint(1, 1));
// this invocation succeeds, but points are same
tree.addPoint(1, 1);
assertTrue(tree.isLeaf(tree.getRoot()));
// dimension = 5
assertThrows(IllegalArgumentException.class, () -> tree.addPoint(2, 1));
// switch the vector
assertArrayEquals(tree.getPointSum(tree.getRoot()), new float[] { 0, 34, 0, 0 });
// adding test, consumes the copy
tree.addPoint(2, 1);
assertEquals(tree.getMass(), 3);
assertArrayEquals(tree.getPointSum(tree.getRoot()), new float[] { 1.119f, 34, -3.11f, 100 }, 1e-3f);
// bounding boxes are incorrect they are minvalues = test, maxvalues = test
assertThrows(IllegalStateException.class, () -> tree.validateAndReconstruct(tree.root));
assertTrue(tree.getCutDimension(tree.root) == 3);
// cut cannot be the same as right minvalue
tree.nodeStore.cutValue[tree.root] = 100;
assertThrows(IllegalStateException.class, () -> tree.validateAndReconstruct(tree.root));
}
@Test
public void testConfigPartialAdd() {
PointStore pointStore = mock(PointStore.class);
float[] test = new float[] { 1.119f, 0f, -3.11f, 100f };
float[] copies = new float[] { 0, 17, 0, 0 };
tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStore)
.centerOfMassEnabled(true).storeSequenceIndexesEnabled(true).storeParent(true).dimension(4).build();
when(pointStore.getNumericVector(any(Integer.class))).thenReturn(new float[0]).thenReturn(test)
.thenReturn(new float[0]).thenReturn(test).thenReturn(new float[4]).thenReturn(new float[5])
.thenReturn(copies).thenReturn(test).thenReturn(copies).thenReturn(copies).thenReturn(test);
// the following does not consume any points
tree.addPoint(0, 1);
assertThrows(IllegalArgumentException.class, () -> tree.addPointToPartialTree(1, 1));
// fails at check of dimension of retrieved point
assertThrows(IllegalArgumentException.class, () -> tree.addPointToPartialTree(1, 1));
// fails at equality check
assertThrows(IllegalArgumentException.class, () -> tree.addPointToPartialTree(1, 1));
}
@Test
public void testCut() {
PointStore pointStore = mock(PointStore.class);
Random random = mock(Random.class);
tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStore).random(random)
.storeSequenceIndexesEnabled(true).storeParent(true).dimension(1).build();
when(pointStore.getNumericVector(any(Integer.class))).thenReturn(new float[1]).thenReturn(new float[] { 1 })
.thenReturn(new float[] { 0 }).thenReturn(new float[] { 0 }).thenReturn(new float[] { 2 })
.thenReturn(new float[] { 1 }).thenReturn(new float[0]).thenReturn(new float[] { 2 })
.thenReturn(new float[] { 1 }).thenReturn(new float[1]);
// testing the cut assumptions -- the values should not be 1 or larger, but is
// useful for testing
when(random.nextDouble()).thenReturn(1.2).thenReturn(1.5).thenReturn(1.5).thenReturn(0.0);
// following does not query pointstore
tree.addPoint(0, 1);
// following tries to add [0.0], and discovers point index 0 is [1.0]
tree.addPoint(1, 1);
assertTrue(tree.getCutValue(tree.getRoot()) == (double) Math.nextAfter(1.0f, 0.0));
assertThrows(IllegalArgumentException.class, () -> tree.addPoint(1, 2)); // copy
tree.addPoint(1, 2); // passes
assertTrue(tree.getRoot() == 0);
assertTrue(tree.getCutValue(0) == (double) Math.nextAfter(1.0f, 0.0));
assertTrue(tree.getCutValue(1) == (double) Math.nextAfter(2.0f, 1.0));
assertFalse(tree.checkStrictlyContains(1, new float[] { 2 }));
assertTrue(tree.checkStrictlyContains(1, new float[] { 1.001f }));
}
/**
* Verify that the tree has the form described in the setUp method.
*/
@Test
public void testInitialTreeState() {
int node = tree.getRoot();
// the second double[] is intentional
IBoundingBoxView expectedBox = new BoundingBox(new float[] { -1, -1 }).getMergedBox(new float[] { 1, 1 });
assertThat(tree.getBox(node), is(expectedBox));
assertThat(tree.getCutDimension(node), is(1));
assertThat(tree.getCutValue(node), closeTo(-0.5, EPSILON));
assertThat(tree.getMass(), is(5));
assertArrayEquals(new double[] { -1, 2 }, toDoubleArray(tree.getPointSum(node)), EPSILON);
assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));
assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),
is(new float[] { -1, -1 }));
assertThat(tree.getMass(tree.getLeftChild(node)), is(1));
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(1L), 1);
// testing inappropriate
assertThrows(IllegalArgumentException.class, () -> tree.getLeftChild(Integer.MAX_VALUE));
assertThrows(IllegalArgumentException.class, () -> tree.getRightChild(500));
assertThrows(IllegalArgumentException.class, () -> tree.getCutValue(-1));
assertThrows(IllegalArgumentException.class, () -> tree.getCutDimension(-1));
// pointIndex should have a value at least as large as number of leaves
assertThrows(IllegalArgumentException.class, () -> tree.getPointIndex(0));
NodeStoreSmall nodeStoreSmall = (NodeStoreSmall) tree.nodeStore;
assert (nodeStoreSmall.getParentIndex(tree.getRightChild(node)) == node);
node = tree.getRightChild(node);
expectedBox = new BoundingBox(new float[] { -1, 0 }).getMergedBox(new BoundingBox(new float[] { 1, 1 }));
assertThat(tree.getBox(node), is(expectedBox));
assertThat(tree.getCutDimension(node), is(0));
assertThat(tree.getCutValue(node), closeTo(0.5, EPSILON));
assertThat(tree.getMass(node), is(4));
assertArrayEquals(new double[] { 0.0, 3.0 }, toDoubleArray(tree.getPointSum(node)), EPSILON);
assertThat(tree.isLeaf(tree.getRightChild(node)), is(true));
assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getRightChild(node))),
is(new float[] { 1, 1 }));
assertThat(tree.getMass(tree.getRightChild(node)), is(1));
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(2L), 1);
assert (nodeStoreSmall.getParentIndex(tree.getLeftChild(node)) == node);
node = tree.getLeftChild(node);
expectedBox = new BoundingBox(new float[] { -1, 0 }).getMergedBox(new float[] { 0, 1 });
assertThat(tree.getBox(node), is(expectedBox));
assertThat(tree.getCutDimension(node), is(0));
assertThat(tree.getCutValue(node), closeTo(-0.5, EPSILON));
assertThat(tree.getMass(node), is(3));
assertArrayEquals(new double[] { -1.0, 2.0 }, toDoubleArray(tree.getPointSum(node)), EPSILON);
assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));
assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),
is(new float[] { -1, 0 }));
assertThat(tree.getMass(tree.getLeftChild(node)), is(1));
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(3L), 1);
assertThat(tree.isLeaf(tree.getRightChild(node)), is(true));
assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getRightChild(node))),
is(new float[] { 0, 1 }));
assertThat(tree.getMass(tree.getRightChild(node)), is(2));
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(4L), 1);
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(5L), 1);
assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(5, 6));
}
@Test
public void testDeletePointWithLeafSibling() {
tree.deletePoint(2, 3);
// root node bounding box and cut remains unchanged, mass and centerOfMass are
// updated
int node = tree.getRoot();
IBoundingBoxView expectedBox = new BoundingBox(new float[] { -1, -1 }).getMergedBox(new float[] { 1, 1 });
assertThat(tree.getBox(node), is(expectedBox));
assertThat(tree.getCutDimension(node), is(1));
assertThat(tree.getCutValue(node), closeTo(-0.5, EPSILON));
assertThat(tree.getMass(), is(4));
assertArrayEquals(new double[] { 0.0, 2.0 }, toDoubleArray(tree.getPointSum(node)), EPSILON);
assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));
assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),
is(new float[] { -1, -1 }));
assertThat(tree.getMass(tree.getLeftChild(node)), is(1));
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(1L), 1);
// sibling node moves up and bounding box recomputed
NodeStoreSmall nodeStoreSmall = (NodeStoreSmall) tree.nodeStore;
assert (nodeStoreSmall.getParentIndex(tree.getRightChild(node)) == node);
node = tree.getRightChild(node);
expectedBox = new BoundingBox(new float[] { 0, 1 }).getMergedBox(new float[] { 1, 1 });
assertThat(tree.getBox(node), is(expectedBox));
assertThat(tree.getCutDimension(node), is(0));
assertThat(tree.getCutValue(node), closeTo(0.5, EPSILON));
assertThat(tree.getMass(node), is(3));
assertArrayEquals(new double[] { 1.0, 3.0 }, toDoubleArray(tree.getPointSum(node)), EPSILON);
assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));
assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),
is(new float[] { 0, 1 }));
assertThat(tree.getMass(tree.getLeftChild(node)), is(2));
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(4L), 1);
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(5L), 1);
assertThat(tree.isLeaf(tree.getRightChild(node)), is(true));
assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getRightChild(node))),
is(new float[] { 1, 1 }));
assertThat(tree.getMass(tree.getRightChild(node)), is(1));
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(2L), 1);
}
@Test
public void testDeletePointWithNonLeafSibling() {
tree.deletePoint(1, 2);
// root node bounding box recomputed
int node = tree.getRoot();
IBoundingBoxView expectedBox = new BoundingBox(new float[] { -1, -1 }).getMergedBox(new float[] { 0, 1 });
assertThat(tree.getBox(node), is(expectedBox));
assertThat(tree.getCutDimension(node), is(1));
assertThat(tree.getCutValue(node), closeTo(-0.5, EPSILON));
assertThat(tree.getMass(), is(4));
assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));
assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),
is(new float[] { -1, -1 }));
assertThat(tree.getMass(tree.getLeftChild(node)), is(1));
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(1L), 1);
// sibling node moves up and bounding box stays the same
NodeStoreSmall nodeStoreSmall = (NodeStoreSmall) tree.nodeStore;
assert (nodeStoreSmall.getParentIndex(tree.getRightChild(node)) == node);
node = tree.getRightChild(node);
expectedBox = new BoundingBox(new float[] { -1, 0 }).getMergedBox(new float[] { 0, 1 });
assertThat(tree.getBox(node), is(expectedBox));
assertThat(tree.getCutDimension(node), is(0));
assertThat(tree.getCutValue(node), closeTo(-0.5, EPSILON));
assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));
assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),
is(new float[] { -1, 0 }));
assertThat(tree.getMass(tree.getLeftChild(node)), is(1));
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(3L), 1);
assertThat(tree.isLeaf(tree.getRightChild(node)), is(true));
assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getRightChild(node))),
is(new float[] { 0, 1 }));
assertThat(tree.getMass(tree.getRightChild(node)), is(2));
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(4L), 1);
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(5L), 1);
}
@Test
public void testDeletePointWithMassGreaterThan1() {
assertTrue(tree.boundingBoxCacheFraction == 1.0);
tree.setConfig(Config.BOUNDING_BOX_CACHE_FRACTION, 0.5);
assertTrue(tree.boundingBoxData != null);
assertTrue(tree.boundingBoxData.length == ((tree.numberOfLeaves - 1) / 2) * 4);
assertTrue(tree.rangeSumData != null);
assertTrue(tree.rangeSumData.length == (tree.numberOfLeaves - 1) / 2);
int root = tree.getRoot();
assertTrue(tree.checkStrictlyContains(root, new float[2]));
tree.setConfig(Config.BOUNDING_BOX_CACHE_FRACTION, 0.0);
assertTrue(tree.boundingBoxData == null);
assertTrue(tree.rangeSumData == null);
assertFalse(tree.checkStrictlyContains(root, new float[2]));
tree.deletePoint(3, 4);
tree.setConfig(Config.BOUNDING_BOX_CACHE_FRACTION, 0.5);
assertTrue(tree.boundingBoxData != null);
assertTrue(tree.boundingBoxData.length == ((tree.numberOfLeaves - 1) / 2) * 4);
assertTrue(tree.rangeSumData != null);
assertTrue(tree.rangeSumData.length == (tree.numberOfLeaves - 1) / 2);
// same as initial state except mass at 0,1 is 1
int node = tree.getRoot();
IBoundingBoxView expectedBox = new BoundingBox(new float[] { -1, -1 }).getMergedBox(new float[] { 1, 1 });
assertThat(tree.getBox(node), is(expectedBox));
assertThat(tree.getCutDimension(node), is(1));
assertThat(tree.getCutValue(node), closeTo(-0.5, EPSILON));
assertThat(tree.getMass(), is(4));
assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));
assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),
is(new float[] { -1, -1 }));
assertThat(tree.getMass(tree.getLeftChild(node)), is(1));
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(1L), 1);
assertArrayEquals(new double[] { -1.0, 1.0 }, toDoubleArray(tree.getPointSum(node)), EPSILON);
assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));
assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),
is(new float[] { -1, -1 }));
assertThat(tree.getMass(tree.getLeftChild(node)), is(1));
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(1L), 1);
node = tree.getRightChild(node);
expectedBox = new BoundingBox(new float[] { -1, 0 }).getMergedBox(new float[] { 1, 1 });
assertThat(tree.getBox(node), is(expectedBox));
assertThat(tree.getCutDimension(node), is(0));
assertThat(tree.getCutValue(node), closeTo(0.5, EPSILON));
assertThat(tree.getMass(node), is(3));
NodeView nodeView = new NodeView(tree, tree.pointStoreView, node);
assertTrue(nodeView.getCutDimension() == 0);
assertTrue(nodeView.getCutValue() == 0.5);
assertArrayEquals(new double[] { 0.0, 2.0 }, toDoubleArray(tree.getPointSum(node)), EPSILON);
assertThat(tree.isLeaf(tree.getRightChild(node)), is(true));
assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getRightChild(node))),
is(new float[] { 1, 1 }));
assertThat(tree.getMass(tree.getRightChild(node)), is(1));
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(2L), 1);
NodeStoreSmall nodeStoreSmall = (NodeStoreSmall) tree.nodeStore;
assert (nodeStoreSmall.getParentIndex(tree.getLeftChild(node)) == node);
node = tree.getLeftChild(node);
expectedBox = new BoundingBox(new float[] { -1, 0 }).getMergedBox(new float[] { 0, 1 });
assertThat(tree.getBox(node), is(expectedBox));
assertEquals(expectedBox.toString(), tree.getBox(node).toString());
assertThat(tree.getCutDimension(node), is(0));
assertThat(tree.getCutValue(node), closeTo(-0.5, EPSILON));
assertThat(tree.getMass(), is(4));
assertArrayEquals(new double[] { -1.0, 1.0 }, toDoubleArray(tree.getPointSum(node)), EPSILON);
assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));
assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),
is(new float[] { -1, 0 }));
assertThat(tree.getMass(tree.getLeftChild(node)), is(1));
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(3L), 1);
assertThat(tree.isLeaf(tree.getRightChild(node)), is(true));
assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getRightChild(node))),
is(new float[] { 0, 1 }));
assertThat(tree.getMass(tree.getRightChild(node)), is(1));
assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(5L), 1);
}
@Test
public void testDeletePointInvalid() {
// specified sequence index does not exist
assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(2, 99));
// point does not exist in tree
assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(7, 3));
}
@Test
public void testUpdatesOnSmallBoundingBox() {
// verifies on small bounding boxes random cuts and tree updates are functional
PointStore pointStoreFloat = new PointStore.Builder().indexCapacity(10).capacity(10).currentStoreCapacity(10)
.dimensions(1).build();
RandomCutTree tree = RandomCutTree.builder().random(rng).pointStoreView(pointStoreFloat).build();
List<Weighted<double[]>> points = new ArrayList<>();
points.add(new Weighted<>(new double[] { 48.08 }, 0, 1L));
points.add(new Weighted<>(new double[] { 48.08001 }, 0, 2L));
pointStoreFloat.add(toFloatArray(points.get(0).getValue()), 0);
pointStoreFloat.add(toFloatArray(points.get(1).getValue()), 1);
tree.addPoint(0, points.get(0).getSequenceIndex());
tree.addPoint(1, points.get(1).getSequenceIndex());
assertNotEquals(pointStoreFloat.getNumericVector(0)[0], pointStoreFloat.getNumericVector(1)[0]);
for (int i = 0; i < 10000; i++) {
Weighted<double[]> point = points.get(i % points.size());
tree.deletePoint(i % points.size(), point.getSequenceIndex());
tree.addPoint(i % points.size(), point.getSequenceIndex());
}
}
@Test
public void testfloat() {
float x = 110.13f;
double sum = 0;
int trials = 230000;
for (int i = 0; i < trials; i++) {
float z = (x * (trials - i + 1) - x);
sum += z;
}
System.out.println(sum);
for (int i = 0; i < trials - 1; i++) {
float z = (x * (trials - i + 1) - x);
sum -= z;
}
System.out.println(sum + " " + (double) x + " " + (sum <= (double) x));
float[] possible = new float[trials];
float[] alsoPossible = new float[trials];
for (int i = 0; i < trials; i++) {
possible[i] = x;
alsoPossible[i] = (trials - i + 1) * x;
}
BoundingBox box = new BoundingBox(possible, alsoPossible);
System.out.println("rangesum " + box.getRangeSum());
double factor = 1.0 - 1e-16;
System.out.println(factor);
RandomCutTree tree = RandomCutTree.builder().dimension(trials).build();
// tries both path
tree.randomCut(factor, possible, box);
tree.randomCut(1.0 - 1e-17, possible, box);
}
@ParameterizedTest
@ValueSource(ints = { 100, 10000, 100000 })
void testNodeStore(int size) {
PointStore pointStoreFloat = new PointStore.Builder().indexCapacity(100).capacity(100).initialSize(100)
.dimensions(2).build();
tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStoreFloat)
.capacity(size).storeSequenceIndexesEnabled(true).storeParent(true).dimension(2).build();
long seed = new Random().nextLong();
System.out.println("seed :" + seed);
Random rng = new Random(seed);
for (int i = 0; i < 100; i++) {
pointStoreFloat.add(new double[] { rng.nextDouble(), rng.nextDouble() }, 0L);
}
ArrayList<Weighted<Integer>> list = new ArrayList<>();
for (int i = 0; i < 100; i++) {
tree.addPoint(i, 0L);
list.add(new Weighted<>(i, rng.nextFloat(), 0));
}
list.sort((o1, o2) -> Float.compare(o1.getWeight(), o2.getWeight()));
for (int i = 0; i < 50; i++) {
tree.deletePoint(list.remove(0).getValue(), 0L);
}
AbstractNodeStore nodeStore = tree.getNodeStore();
for (int i = 0; i < 25; i++) {
if (!tree.isLeaf(tree.getLeftChild(tree.getRoot()))) {
assert (nodeStore.getParentIndex(tree.getLeftChild(tree.getRoot())) == tree.root);
}
if (!tree.isLeaf(tree.getRightChild(tree.getRoot()))) {
assert (nodeStore.getParentIndex(tree.getRightChild(tree.getRoot())) == tree.root);
}
tree.deletePoint(list.remove(0).getValue(), 0L);
}
}
// spoofs the cut (using a changing box) to hit illegal state
@Test
public void cutTest1() {
BoundingBox box1 = mock(BoundingBox.class);
when(box1.getMinValue(anyInt())).thenReturn(0.0).thenReturn(0.0).thenReturn(1.0);
assertThrows(IllegalStateException.class, () -> tree.randomCut(1.2, new float[] { 1.0f }, box1));
}
// spoofs the cut (usina a changing box) to hit illegal state
@Test
public void cutTest2() {
BoundingBox box1 = mock(BoundingBox.class);
when(box1.getMinValue(anyInt())).thenReturn(0.0).thenReturn(0.0).thenReturn(1.0);
assertThrows(IllegalStateException.class, () -> tree.randomCut(1.5, new float[] { 1.0f }, box1));
}
@Test
public void cutTestMultiD() {
float[] point = new float[2];
float[] newPoint = new float[] { 0.1f + new Random().nextFloat(), 0.1f + new Random().nextFloat() };
float[] testPoint = new float[] { point[0], newPoint[1] };
float[] testPoint2 = new float[] { newPoint[0], point[1] };
BoundingBox box1 = new BoundingBox(point, point);
BoundingBox box2 = new BoundingBox(newPoint, newPoint);
assertThrows(IllegalArgumentException.class, () -> tree.randomCut(new Random().nextDouble(), point, box1));
assertDoesNotThrow(() -> tree.randomCut(new Random().nextDouble(), point, box2));
assertDoesNotThrow(() -> tree.randomCut(new Random().nextDouble(), newPoint, box1));
Cut cut1 = tree.randomCut(0, new float[] { 0, 1.0f }, box1);
// first dimension is identical
assertTrue(cut1.getDimension() == 1);
assertTrue(cut1.getValue() == 0f);
assertEquals(cut1.toString(), "Cut(1, 0.000000)");
Cut cut2 = tree.randomCut(1.2, point, box2);
assertTrue(cut2.getDimension() == 0);
assertTrue(cut2.getValue() == Math.nextAfter(newPoint[0], point[0]));
Cut largeCut = tree.randomCut(1.2, newPoint, box1);
assertTrue(largeCut.getDimension() == 0);
assertTrue(largeCut.getValue() == Math.nextAfter(newPoint[0], point[0]));
Cut testCut = tree.randomCut(1.2, testPoint, box2);
assertTrue(testCut.getDimension() == 0);
assertTrue(testCut.getValue() == Math.nextAfter(newPoint[0], testPoint[0]));
Cut testCut2 = tree.randomCut(1.2, testPoint2, box2);
assertTrue(testCut2.getDimension() == 1);
assertTrue(testCut2.getValue() == Math.nextAfter(newPoint[1], point[1]));
Cut another = tree.randomCut(1.5, point, box2);
assertTrue(another.getDimension() == 1);
assertTrue(another.getValue() == Math.nextAfter(newPoint[1], point[1]));
Cut anotherLargeCut = tree.randomCut(1.5, newPoint, box1);
assertTrue(anotherLargeCut.getDimension() == 1);
assertTrue(anotherLargeCut.getValue() == Math.nextAfter(newPoint[1], point[1]));
Cut anotherTestCut = tree.randomCut(1.5, testPoint, box1);
assertTrue(testCut.getDimension() == 0);
assertTrue(testCut.getValue() == Math.nextAfter(newPoint[0], point[0]));
Cut anotherTestCut2 = tree.randomCut(1.5, testPoint2, box1);
assertTrue(testCut2.getDimension() == 1);
assertTrue(testCut2.getValue() == Math.nextAfter(newPoint[1], point[1]));
}
// the following are tested directly since they are unreachable
@Test
public void traverseTest() {
PointStore pointStoreFloat = new PointStore.Builder().indexCapacity(100).capacity(100).initialSize(100)
.dimensions(2).build();
tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStoreFloat)
.capacity(188).storeSequenceIndexesEnabled(true).storeParent(true).dimension(2).build();
assertDoesNotThrow(() -> tree.validateAndReconstruct());
assertThrows(IllegalArgumentException.class, () -> tree.traverse(null, null));
assertThrows(IllegalArgumentException.class, () -> tree.traverseMulti(null, null));
}
@Test
public void invalidNodeTest() {
PointStore pointStoreFloat = new PointStore.Builder().indexCapacity(100).capacity(100).initialSize(100)
.dimensions(2).build();
tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStoreFloat)
.capacity(188).storeSequenceIndexesEnabled(true).storeParent(true).dimension(2).build();
tree.root = 187;
assertThrows(IllegalStateException.class, () -> tree.validateAndReconstruct());
assertThrows(IllegalStateException.class,
() -> tree.traversePathToLeafAndVisitNodes(null, null, null, tree.root, 0));
assertThrows(IllegalStateException.class, () -> tree.traverseTreeMulti(null, null, null, tree.root, 0));
assertThrows(IllegalStateException.class, () -> tree.growNodeBox(null, pointStoreFloat, 0, 187));
assertThrows(IllegalStateException.class, () -> tree.getBox(187));
}
}
| 401 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/tree/BoxCacheTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Random;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.RandomCutForestTest;
import com.amazon.randomcutforest.config.Precision;
public class BoxCacheTest {
@Test
public void testChangingBoundingBoxFloat32() {
int dimensions = 4;
int numberOfTrees = 1;
int sampleSize = 64;
int dataSize = 1000 * sampleSize;
Random random = new Random();
long seed = random.nextLong();
double[][] big = RandomCutForestTest.generateShingledData(dataSize, dimensions, 2);
RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)
.numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(Precision.FLOAT_32).randomSeed(seed)
.boundingBoxCacheFraction(0).build();
RandomCutForest otherForest = RandomCutForest.builder().compact(true).dimensions(dimensions)
.numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(Precision.FLOAT_32).randomSeed(seed)
.boundingBoxCacheFraction(1).build();
int num = 0;
for (double[] point : big) {
++num;
if (num % sampleSize == 0) {
forest.setBoundingBoxCacheFraction(random.nextDouble());
}
assertEquals(forest.getAnomalyScore(point), otherForest.getAnomalyScore(point));
forest.update(point);
otherForest.update(point);
}
}
}
| 402 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/tree/BoundingBoxTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
import static com.amazon.randomcutforest.TestUtils.EPSILON;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class BoundingBoxTest {
private float[] point1;
private float[] point2;
private BoundingBox box1;
private BoundingBox box2;
@BeforeEach
public void setUp() {
point1 = new float[] { 1.5f, 2.7f };
point2 = new float[] { 3.0f, 1.2f };
box1 = new BoundingBox(point1);
box2 = new BoundingBox(point2);
}
@Test
public void dimensionTest() {
assertThrows(IllegalArgumentException.class, () -> new BoundingBox(point1, new float[1]));
assertThrows(IllegalArgumentException.class, () -> box1.getMergedBox(new float[1]));
assertThrows(IllegalArgumentException.class, () -> box1.contains(new float[1]));
assertThrows(IllegalArgumentException.class, () -> box1.contains(new BoundingBox(new float[1])));
}
@Test
public void equalsTest() {
assertFalse(box1.equals(point1));
assertFalse(box1.equals(box2));
assertFalse(box1.equals(new BoundingBox(point1, new float[] { 3.0f, 2.7f })));
assertTrue(box1.equals(box1.copy()));
}
@Test
public void testNewFromSinglePoint() {
assertThat(box1.getDimensions(), is(2));
assertThat((float) box1.getMinValue(0), is(point1[0]));
assertThat((float) box1.getMaxValue(0), is(point1[0]));
assertThat(box1.getRange(0), is(0.0));
assertThat((float) box1.getMinValue(1), is(point1[1]));
assertThat((float) box1.getMaxValue(1), is(point1[1]));
assertThat(box1.getRange(1), is(0.0));
assertThat(box1.getRangeSum(), is(0.0));
assertThat(box2.getDimensions(), is(2));
assertThat((float) box2.getMinValue(0), is(point2[0]));
assertThat((float) box2.getMaxValue(0), is(point2[0]));
assertThat(box2.getRange(0), is(0.0));
assertThat((float) box2.getMinValue(1), is(point2[1]));
assertThat((float) box2.getMaxValue(1), is(point2[1]));
assertThat(box2.getRange(1), is(0.0));
assertThat(box2.getRangeSum(), is(0.0));
assertTrue(box1.probabilityOfCut(point2) == 1.0);
assertTrue(box1.probabilityOfCut(point1) == 0.0);
}
@Test
public void testGetMergedBoxWithOtherBox() {
assertThrows(IllegalStateException.class, () -> box1.addBox(box2));
assertThrows(IllegalArgumentException.class, () -> box1.addPoint(new float[1]));
assertThrows(IllegalArgumentException.class, () -> box1.addPoint(new float[2]));
assertDoesNotThrow(() -> box1.copy().addPoint(new float[2]));
BoundingBox mergedBox = box1.getMergedBox(box2);
assertThat(mergedBox.getDimensions(), is(2));
assertThat((float) mergedBox.getMinValue(0), is(1.5f));
assertThat((float) mergedBox.getMaxValue(0), is(3.0f));
assertThat(mergedBox.getRange(0), closeTo(3.0 - 1.5, EPSILON));
assertThat((float) mergedBox.getMinValue(1), is(1.2f));
assertThat((float) mergedBox.getMaxValue(1), is(2.7f));
assertThat(mergedBox.getRange(1), closeTo(2.7 - 1.2, EPSILON));
double rangeSum = (3.0 - 1.5) + (2.7 - 1.2);
assertThat(mergedBox.getRangeSum(), closeTo(rangeSum, EPSILON));
// check that box1 and box2 were not changed
assertThat(box1.getDimensions(), is(2));
assertThat((float) box1.getMinValue(0), is(point1[0]));
assertThat((float) box1.getMaxValue(0), is(point1[0]));
assertThat(box1.getRange(0), is(0.0));
assertThat((float) box1.getMinValue(1), is(point1[1]));
assertThat((float) box1.getMaxValue(1), is(point1[1]));
assertThat(box1.getRange(1), is(0.0));
assertThat(box1.getRangeSum(), is(0.0));
assertThat(box2.getDimensions(), is(2));
assertThat((float) box2.getMinValue(0), is(point2[0]));
assertThat((float) box2.getMaxValue(0), is(point2[0]));
assertThat(box2.getRange(0), is(0.0));
assertThat((float) box2.getMinValue(1), is(point2[1]));
assertThat((float) box2.getMaxValue(1), is(point2[1]));
assertThat(box2.getRange(1), is(0.0));
assertThat(box2.getRangeSum(), is(0.0));
}
@Test
public void testContainsBoundingBox() {
BoundingBox box1 = new BoundingBox(new float[] { 0.0f, 0.0f })
.getMergedBox(new BoundingBox(new float[] { 10.0f, 10.0f }));
BoundingBox box2 = new BoundingBox(new float[] { 2.0f, 2.0f })
.getMergedBox(new BoundingBox(new float[] { 8.0f, 8.0f }));
BoundingBox box3 = new BoundingBox(new float[] { -4.0f, -4.0f })
.getMergedBox(new BoundingBox(new float[] { -1.0f, -1.0f }));
BoundingBox box4 = new BoundingBox(new float[] { -1.0f, -1.0f })
.getMergedBox(new BoundingBox(new float[] { 5.0f, 5.0f }));
// completely contains
assertTrue(box1.contains(box2));
assertFalse(box2.contains(box1));
// completely disjoint
assertFalse(box1.contains(box3));
assertFalse(box3.contains(box1));
// partially intersect
assertFalse(box1.contains(box4));
assertFalse(box4.contains(box1));
}
@Test
public void testContainsPoint() {
BoundingBox box1 = new BoundingBox(new float[] { 0.0f, 0.0f })
.getMergedBox(new BoundingBox(new float[] { 10.0f, 10.0f }));
assertTrue(box1.contains(new float[] { 0.0f, 0.1f }));
assertTrue(box1.contains(new float[] { 5.5f, 6.5f }));
assertFalse(box1.contains(new float[] { -0.7f, -4.5f }));
assertFalse(box1.contains(new float[] { 5.0f, 11.0f }));
assertFalse(box1.contains(new float[] { -5.0f, 10.0f }));
}
}
| 403 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/anomalydetection/DynamicScoreVisitorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.anomalydetection;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.function.BiFunction;
import org.junit.jupiter.api.Test;
public class DynamicScoreVisitorTest {
@Test
public void testScoringMethods() {
BiFunction<Double, Double, Double> scoreSeen = (x, y) -> (x + y) / 2;
BiFunction<Double, Double, Double> scoreUneen = (x, y) -> 0.75 * x + 0.25 * y;
BiFunction<Double, Double, Double> damp = (x, y) -> Math.sqrt(x * y);
DynamicScoreVisitor visitor = new DynamicScoreVisitor(new float[] { 1.1f, -2.2f }, 100, 2, scoreSeen,
scoreUneen, damp);
int x = 9;
int y = 4;
assertEquals((x + y) / 2.0, visitor.scoreSeen(x, y));
assertEquals(0.75 * x + 0.25 * y, visitor.scoreUnseen(x, y));
assertEquals(Math.sqrt(x * y), visitor.damp(x, y));
}
}
| 404 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/anomalydetection/AnomalyAttributionVisitorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.anomalydetection;
import static com.amazon.randomcutforest.CommonUtils.defaultScalarNormalizerFunction;
import static com.amazon.randomcutforest.CommonUtils.defaultScoreUnseenFunction;
import static com.amazon.randomcutforest.TestUtils.EPSILON;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import com.amazon.randomcutforest.CommonUtils;
import com.amazon.randomcutforest.returntypes.DiVector;
import com.amazon.randomcutforest.tree.BoundingBox;
import com.amazon.randomcutforest.tree.INodeView;
import com.amazon.randomcutforest.tree.NodeView;
public class AnomalyAttributionVisitorTest {
@Test
public void testNew() {
float[] point = new float[] { 1.1f, -2.2f, 3.3f };
int treeMass = 99;
AnomalyAttributionVisitor visitor = new AnomalyAttributionVisitor(point, treeMass);
assertFalse(visitor.pointInsideBox);
for (int i = 0; i < point.length; i++) {
assertFalse(visitor.coordInsideBox[i]);
}
assertFalse(visitor.ignoreLeaf);
assertEquals(0, visitor.ignoreLeafMassThreshold);
DiVector result = visitor.getResult();
double[] zero = new double[point.length];
assertArrayEquals(zero, result.high);
assertArrayEquals(zero, result.low);
}
@Test
public void testNewWithIgnoreOptions() {
float[] point = new float[] { 1.1f, -2.2f, 3.3f };
int treeMass = 99;
AnomalyAttributionVisitor visitor = new AnomalyAttributionVisitor(point, treeMass, 7);
assertFalse(visitor.pointInsideBox);
for (int i = 0; i < point.length; i++) {
assertFalse(visitor.coordInsideBox[i]);
}
assertTrue(visitor.ignoreLeaf);
assertEquals(7, visitor.ignoreLeafMassThreshold);
DiVector result = visitor.getResult();
double[] zero = new double[point.length];
assertArrayEquals(zero, result.high);
assertArrayEquals(zero, result.low);
}
@Test
public void testAcceptLeafEquals() {
float[] point = { 1.1f, -2.2f, 3.3f };
INodeView leafNode = mock(NodeView.class);
when(leafNode.getLeafPoint()).thenReturn(point);
when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));
int leafDepth = 100;
int leafMass = 10;
when(leafNode.getMass()).thenReturn(leafMass);
int treeMass = 21;
AnomalyAttributionVisitor visitor = new AnomalyAttributionVisitor(point, treeMass, 0);
visitor.acceptLeaf(leafNode, leafDepth);
assertTrue(visitor.hitDuplicates);
assertEquals(visitor.sumOfNewRange, 0);
double expectedScoreSum = CommonUtils.defaultDampFunction(leafMass, treeMass)
/ (leafDepth + Math.log(leafMass + 1) / Math.log(2));
double expectedScore = expectedScoreSum / (2 * point.length);
DiVector result = visitor.getResult();
for (int i = 0; i < point.length; i++) {
assertEquals(defaultScalarNormalizerFunction(expectedScore, treeMass), result.low[i], EPSILON);
assertEquals(defaultScalarNormalizerFunction(expectedScore, treeMass), result.high[i], EPSILON);
}
}
@Test
public void testAcceptLeafNotEquals() {
float[] point = new float[] { 1.1f, -2.2f, 3.3f };
float[] anotherPoint = new float[] { -4.0f, 5.0f, 6.0f };
INodeView leafNode = mock(NodeView.class);
when(leafNode.getLeafPoint()).thenReturn(anotherPoint);
when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(anotherPoint, anotherPoint));
int leafDepth = 100;
int leafMass = 4;
when(leafNode.getMass()).thenReturn(leafMass);
int treeMass = 21;
AnomalyAttributionVisitor visitor = new AnomalyAttributionVisitor(point, treeMass, 0);
visitor.acceptLeaf(leafNode, leafDepth);
double expectedScoreSum = defaultScoreUnseenFunction(leafDepth, leafMass);
double sumOfNewRange = (1.1 - (-4.0)) + (5.0 - (-2.2)) + (6.0 - 3.3);
DiVector result = visitor.getResult();
assertEquals(defaultScalarNormalizerFunction(expectedScoreSum * (1.1 - (-4.0)) / sumOfNewRange, treeMass),
result.high[0], EPSILON);
assertEquals(0.0, result.low[0]);
assertEquals(0.0, result.high[1]);
assertEquals(defaultScalarNormalizerFunction(expectedScoreSum * (5.0 - (-2.2)) / sumOfNewRange, treeMass),
result.low[1], EPSILON);
assertEquals(0.0, result.high[2]);
assertEquals(defaultScalarNormalizerFunction(expectedScoreSum * (6.0 - 3.3) / sumOfNewRange, treeMass),
result.low[2], EPSILON);
visitor = new AnomalyAttributionVisitor(point, treeMass, 3);
visitor.acceptLeaf(leafNode, leafDepth);
result = visitor.getResult();
assertEquals(defaultScalarNormalizerFunction(expectedScoreSum * (1.1 - (-4.0)) / sumOfNewRange, treeMass),
result.high[0], EPSILON);
assertEquals(0.0, result.low[0]);
assertEquals(0.0, result.high[1]);
assertEquals(defaultScalarNormalizerFunction(expectedScoreSum * (5.0 - (-2.2)) / sumOfNewRange, treeMass),
result.low[1], EPSILON);
assertEquals(0.0, result.high[2]);
assertEquals(defaultScalarNormalizerFunction(expectedScoreSum * (6.0 - 3.3) / sumOfNewRange, treeMass),
result.low[2], EPSILON);
visitor = new AnomalyAttributionVisitor(point, treeMass, 4);
visitor.acceptLeaf(leafNode, leafDepth);
double expectedScore = expectedScoreSum / (2 * point.length);
result = visitor.getResult();
for (int i = 0; i < point.length; i++) {
assertEquals(defaultScalarNormalizerFunction(expectedScore, treeMass), result.low[i], EPSILON);
assertEquals(defaultScalarNormalizerFunction(expectedScore, treeMass), result.high[i], EPSILON);
}
}
@Test
public void testAccept() {
float[] pointToScore = { 0.0f, 0.0f };
int treeMass = 50;
AnomalyAttributionVisitor visitor = new AnomalyAttributionVisitor(pointToScore, treeMass, 0);
INodeView leafNode = mock(NodeView.class);
float[] point = new float[] { 1.0f, -2.0f };
when(leafNode.getLeafPoint()).thenReturn(point);
when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));
int leafMass = 3;
when(leafNode.getMass()).thenReturn(leafMass);
int depth = 4;
visitor.acceptLeaf(leafNode, depth);
DiVector result = visitor.getResult();
double expectedScoreSum = defaultScoreUnseenFunction(depth, leafNode.getMass());
double sumOfNewRange = 1.0 + 2.0;
double[] expectedUnnormalizedLow = new double[] { expectedScoreSum * 1.0 / sumOfNewRange, 0.0 };
double[] expectedUnnormalizedHigh = new double[] { 0.0, expectedScoreSum * 2.0 / sumOfNewRange };
for (int i = 0; i < pointToScore.length; i++) {
assertEquals(defaultScalarNormalizerFunction(expectedUnnormalizedLow[i], treeMass), result.low[i], EPSILON);
assertEquals(defaultScalarNormalizerFunction(expectedUnnormalizedHigh[i], treeMass), result.high[i],
EPSILON);
}
// parent does not contain pointToScore
depth--;
INodeView sibling = mock(NodeView.class);
int siblingMass = 2;
when(sibling.getMass()).thenReturn(siblingMass);
INodeView parent = mock(NodeView.class);
int parentMass = leafMass + siblingMass;
when(parent.getMass()).thenReturn(parentMass);
BoundingBox boundingBox = new BoundingBox(point, new float[] { 2.0f, -0.5f });
when(parent.getBoundingBox()).thenReturn(boundingBox);
visitor.accept(parent, depth);
result = visitor.getResult();
double expectedSumOfNewRange2 = 2.0 + 2.0;
double expectedProbOfCut2 = (1.0 + 0.5) / expectedSumOfNewRange2;
double[] expectedDifferenceInRangeVector2 = { 0.0, 1.0, 0.5, 0.0 };
double expectedScore2 = defaultScoreUnseenFunction(depth, parent.getMass());
double[] expectedUnnormalizedLow2 = new double[pointToScore.length];
double[] expectedUnnormalizedHigh2 = new double[pointToScore.length];
for (int i = 0; i < pointToScore.length; i++) {
double prob = expectedDifferenceInRangeVector2[2 * i] / expectedSumOfNewRange2;
expectedUnnormalizedHigh2[i] = prob * expectedScore2
+ (1 - expectedProbOfCut2) * expectedUnnormalizedHigh[i];
prob = expectedDifferenceInRangeVector2[2 * i + 1] / expectedSumOfNewRange2;
expectedUnnormalizedLow2[i] = prob * expectedScore2 + (1 - expectedProbOfCut2) * expectedUnnormalizedLow[i];
}
for (int i = 0; i < pointToScore.length; i++) {
assertEquals(defaultScalarNormalizerFunction(expectedUnnormalizedLow2[i], treeMass), result.low[i],
EPSILON);
assertEquals(defaultScalarNormalizerFunction(expectedUnnormalizedHigh2[i], treeMass), result.high[i],
EPSILON);
}
// grandparent contains pointToScore
assertFalse(visitor.pointInsideBox);
depth--;
INodeView grandParent = mock(NodeView.class);
when(grandParent.getMass()).thenReturn(parentMass + 2);
when(grandParent.getBoundingBox()).thenReturn(boundingBox
.getMergedBox(new BoundingBox(new float[] { -1.0f, 1.0f }).getMergedBox(new float[] { -0.5f, -1.5f })));
visitor.accept(grandParent, depth);
result = visitor.getResult();
for (int i = 0; i < pointToScore.length; i++) {
assertEquals(defaultScalarNormalizerFunction(expectedUnnormalizedLow2[i], treeMass), result.low[i],
EPSILON);
assertEquals(defaultScalarNormalizerFunction(expectedUnnormalizedHigh2[i], treeMass), result.high[i],
EPSILON);
}
}
@ParameterizedTest
@ValueSource(ints = { 3, 5 })
public void reNormalizeNotEqual(int mass) {
float[] pointToScore = { 0.0f, 0.0f };
int treeMass = 50;
AnomalyAttributionVisitor visitor = new AnomalyAttributionVisitor(pointToScore, treeMass, 4);
INodeView leafNode = mock(NodeView.class);
float[] point = new float[] { 1.0f, -2.0f };
when(leafNode.getLeafPoint()).thenReturn(point);
when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));
int leafMass = mass;
when(leafNode.getMass()).thenReturn(leafMass);
visitor.acceptLeaf(leafNode, 1);
INodeView parent = mock(NodeView.class);
int parentMass = leafMass + 2;
when(parent.getMass()).thenReturn(parentMass);
BoundingBox boundingBox = new BoundingBox(point, new float[] { 2.0f, 2.0f });
when(parent.getBoundingBox()).thenReturn(boundingBox);
when(parent.getSiblingBoundingBox(any())).thenReturn(new BoundingBox(new float[] { 2.0f, 2.0f }));
visitor.accept(parent, 0);
DiVector result = visitor.directionalAttribution;
assertEquals(result.getHighLowSum(), visitor.savedScore, 1e-6);
}
@ParameterizedTest
@ValueSource(ints = { 3, 5 })
public void reNormalize(int mass) {
float[] pointToScore = { 0.0f, 0.0f };
int treeMass = 50;
AnomalyAttributionVisitor visitor = new AnomalyAttributionVisitor(pointToScore, treeMass, 4);
INodeView leafNode = mock(NodeView.class);
float[] point = pointToScore;
when(leafNode.getLeafPoint()).thenReturn(point);
when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));
int leafMass = mass;
when(leafNode.getMass()).thenReturn(leafMass);
visitor.acceptLeaf(leafNode, 1);
INodeView parent = mock(NodeView.class);
int parentMass = leafMass + 2;
when(parent.getMass()).thenReturn(parentMass);
BoundingBox boundingBox = new BoundingBox(point, new float[] { 2.0f, 2.0f });
when(parent.getBoundingBox()).thenReturn(boundingBox);
when(parent.getSiblingBoundingBox(any())).thenReturn(new BoundingBox(new float[] { 2.0f, 2.0f }));
visitor.accept(parent, 0);
DiVector result = visitor.directionalAttribution;
assertEquals(result.getHighLowSum(), visitor.savedScore, 1e-6);
}
}
| 405 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/anomalydetection/AnomalyScoreVisitorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.anomalydetection;
import static com.amazon.randomcutforest.TestUtils.EPSILON;
import static com.amazon.randomcutforest.tree.AbstractNodeStore.Null;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.CommonUtils;
import com.amazon.randomcutforest.tree.BoundingBox;
import com.amazon.randomcutforest.tree.IBoundingBoxView;
import com.amazon.randomcutforest.tree.INodeView;
import com.amazon.randomcutforest.tree.NodeView;
public class AnomalyScoreVisitorTest {
@Test
public void testNew() {
float[] point = new float[] { 1.0f, 2.0f };
int sampleSize = 9;
AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(point, sampleSize);
assertFalse(visitor.pointInsideBox);
for (int i = 0; i < point.length; i++) {
assertFalse(visitor.coordInsideBox[i]);
}
assertFalse(visitor.ignoreLeafEquals);
assertEquals(0, visitor.ignoreLeafMassThreshold);
assertThat(visitor.getResult(), is(0.0));
}
@Test
public void testNewWithIgnoreOptions() {
float[] point = new float[] { 1.0f, 2.0f };
int sampleSize = 9;
AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(point, sampleSize, 7);
assertFalse(visitor.pointInsideBox);
for (int i = 0; i < point.length; i++) {
assertFalse(visitor.coordInsideBox[i]);
}
assertTrue(visitor.ignoreLeafEquals);
assertEquals(7, visitor.ignoreLeafMassThreshold);
assertThat(visitor.getResult(), is(0.0));
}
@Test
public void testAcceptLeafEquals() {
float[] point = { 1.0f, 2.0f, 3.0f };
INodeView leafNode = mock(NodeView.class);
when(leafNode.getLeafPoint()).thenReturn(point);
when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));
int leafDepth = 100;
int leafMass = 10;
when(leafNode.getMass()).thenReturn(leafMass);
int subSampleSize = 21;
AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(point, subSampleSize);
visitor.acceptLeaf(leafNode, leafDepth);
double expectedScore = CommonUtils.defaultDampFunction(leafMass, subSampleSize)
/ (leafDepth + Math.log(leafMass + 1) / Math.log(2));
assertThat(visitor.getResult(),
closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, subSampleSize), EPSILON));
assertTrue(visitor.pointInsideBox);
visitor = new AnomalyScoreVisitor(point, subSampleSize);
visitor.acceptLeaf(leafNode, 0);
expectedScore = CommonUtils.defaultDampFunction(leafMass, subSampleSize)
/ (Math.log(leafMass + 1) / Math.log(2.0));
assertThat(visitor.getResult(),
closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, subSampleSize), EPSILON));
assertTrue(visitor.pointInsideBox);
AnomalyScoreVisitor anotherVisitor = new AnomalyScoreVisitor(point, subSampleSize, 7);
anotherVisitor.acceptLeaf(leafNode, 0);
assertEquals(anotherVisitor.score, visitor.score);
AnomalyScoreVisitor yetAnotherVisitor = new AnomalyScoreVisitor(point, subSampleSize, 12);
yetAnotherVisitor.acceptLeaf(leafNode, 0);
assertNotEquals(yetAnotherVisitor.score, visitor.score);
}
@Test
public void testAcceptLeafNotEquals() {
float[] point = new float[] { 1.0f, 2.0f, 3.0f };
float[] anotherPoint = new float[] { 4.0f, 5.0f, 6.0f };
INodeView leafNode = mock(NodeView.class);
when(leafNode.getLeafPoint()).thenReturn(anotherPoint);
when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(anotherPoint, anotherPoint));
int leafDepth = 100;
AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(point, 2);
visitor.acceptLeaf(leafNode, leafDepth);
double expectedScore = 1.0 / (leafDepth + 1);
assertThat(visitor.getResult(),
closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, 2), EPSILON));
assertFalse(visitor.pointInsideBox);
int leafMass = 10;
when(leafNode.getMass()).thenReturn(leafMass);
AnomalyScoreVisitor anotherVisitor = new AnomalyScoreVisitor(point, 2, 7);
anotherVisitor.acceptLeaf(leafNode, 100);
assertEquals(anotherVisitor.score, visitor.score);
AnomalyScoreVisitor yetAnotherVisitor = new AnomalyScoreVisitor(point, 2, 12);
yetAnotherVisitor.acceptLeaf(leafNode, 100);
assertEquals(yetAnotherVisitor.score, visitor.score);
}
@Test
public void testAcceptEqualsLeafPoint() {
float[] pointToScore = { 0.0f, 0.0f };
int sampleSize = 50;
AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(pointToScore, sampleSize);
float[] point = Arrays.copyOf(pointToScore, pointToScore.length);
INodeView node = mock(NodeView.class);
when(node.getLeafPoint()).thenReturn(point);
when(node.getBoundingBox()).thenReturn(new BoundingBox(point, point));
int depth = 2;
visitor.acceptLeaf(node, depth);
double expectedScore = CommonUtils.defaultDampFunction(node.getMass(), sampleSize)
/ (depth + Math.log(node.getMass() + 1) / Math.log(2));
assertThat(visitor.getResult(),
closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, sampleSize), EPSILON));
depth--;
IBoundingBoxView boundingBox = node.getBoundingBox().getMergedBox(new float[] { 1.0f, 1.0f });
node = new NodeView(null, null, Null);
visitor.accept(node, depth);
assertThat(visitor.getResult(),
closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, sampleSize), EPSILON));
depth--;
boundingBox = boundingBox.getMergedBox(new float[] { -1.0f, -1.0f });
node = new NodeView(null, null, Null);
visitor.accept(node, depth);
assertThat(visitor.getResult(),
closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, sampleSize), EPSILON));
}
@Test
public void testAccept() {
float[] pointToScore = new float[] { 0.0f, 0.0f };
int sampleSize = 50;
AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(pointToScore, sampleSize);
NodeView node = mock(NodeView.class);
float[] otherPoint = new float[] { 1.0f, 1.0f };
when(node.getLeafPoint()).thenReturn(otherPoint);
when(node.getBoundingBox()).thenReturn(new BoundingBox(otherPoint, otherPoint));
int depth = 4;
visitor.acceptLeaf(node, depth);
double expectedScore = 1.0 / (depth + 1);
assertThat(visitor.getResult(),
closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, sampleSize), EPSILON));
depth--;
IBoundingBoxView boundingBox = node.getBoundingBox().getMergedBox(new float[] { 2.0f, 0.0f });
when(node.getBoundingBox()).thenReturn(boundingBox);
when(node.probailityOfSeparation(any())).thenReturn(1.0 / 3);
visitor.accept(node, depth);
double p = visitor.getProbabilityOfSeparation(boundingBox);
expectedScore = p * (1.0 / (depth + 1)) + (1 - p) * expectedScore;
assertThat(visitor.getResult(),
closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, sampleSize), EPSILON));
depth--;
boundingBox = boundingBox.getMergedBox(new float[] { -1.0f, 0.0f });
when(node.getBoundingBox()).thenReturn(boundingBox);
when(node.probailityOfSeparation(any())).thenReturn(0.0);
visitor.accept(node, depth);
p = visitor.getProbabilityOfSeparation(boundingBox);
expectedScore = p * (1.0 / (depth + 1)) + (1 - p) * expectedScore;
assertThat(visitor.getResult(),
closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, sampleSize), EPSILON));
depth--;
boundingBox = boundingBox.getMergedBox(new float[] { -1.0f, -1.0f });
when(node.probailityOfSeparation(any())).thenReturn(0.0);
visitor.accept(node, depth);
p = visitor.getProbabilityOfSeparation(boundingBox);
assertThat(visitor.getResult(),
closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, sampleSize), EPSILON));
assertTrue(visitor.pointInsideBox);
}
@Test
public void testGetProbabilityOfSeparation() {
float[] minPoint = { 0.0f, 0.0f, 0.0f };
float[] maxPoint = { 1.0f, 2.0f, 3.0f };
IBoundingBoxView boundingBox = new BoundingBox(minPoint);
boundingBox = boundingBox.getMergedBox(maxPoint);
float[] point = { 0.5f, 0.5f, 0.5f };
int sampleSize = 2;
AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(point, sampleSize);
double p = visitor.getProbabilityOfSeparation(boundingBox);
assertThat(p, closeTo(0.0, EPSILON));
assertTrue(visitor.coordInsideBox[0]);
assertTrue(visitor.coordInsideBox[1]);
assertTrue(visitor.coordInsideBox[2]);
visitor = new AnomalyScoreVisitor(point, sampleSize);
visitor.coordInsideBox[1] = visitor.coordInsideBox[2] = true;
p = visitor.getProbabilityOfSeparation(boundingBox);
assertThat(p, closeTo(0.0, EPSILON));
assertTrue(visitor.coordInsideBox[0]);
assertTrue(visitor.coordInsideBox[1]);
assertTrue(visitor.coordInsideBox[2]);
point = new float[] { 2.0f, 0.5f, 0.5f };
visitor = new AnomalyScoreVisitor(point, sampleSize);
p = visitor.getProbabilityOfSeparation(boundingBox);
assertThat(p, closeTo(1.0 / (2.0 + 2.0 + 3.0), EPSILON));
assertFalse(visitor.coordInsideBox[0]);
assertTrue(visitor.coordInsideBox[1]);
assertTrue(visitor.coordInsideBox[2]);
visitor = new AnomalyScoreVisitor(point, sampleSize);
visitor.coordInsideBox[1] = visitor.coordInsideBox[2] = true;
p = visitor.getProbabilityOfSeparation(boundingBox);
assertThat(p, closeTo(1.0 / (2.0 + 2.0 + 3.0), EPSILON));
assertFalse(visitor.coordInsideBox[0]);
assertTrue(visitor.coordInsideBox[1]);
assertTrue(visitor.coordInsideBox[2]);
point = new float[] { 0.5f, -3.0f, 4.0f };
visitor = new AnomalyScoreVisitor(point, sampleSize);
p = visitor.getProbabilityOfSeparation(boundingBox);
assertThat(p, closeTo((3.0 + 1.0) / (1.0 + 5.0 + 4.0), EPSILON));
assertTrue(visitor.coordInsideBox[0]);
assertFalse(visitor.coordInsideBox[1]);
assertFalse(visitor.coordInsideBox[2]);
visitor = new AnomalyScoreVisitor(point, sampleSize);
visitor.coordInsideBox[0] = true;
p = visitor.getProbabilityOfSeparation(boundingBox);
assertThat(p, closeTo((3.0 + 1.0) / (1.0 + 5.0 + 4.0), EPSILON));
assertTrue(visitor.coordInsideBox[0]);
assertFalse(visitor.coordInsideBox[1]);
assertFalse(visitor.coordInsideBox[2]);
}
@Test
public void test_getProbabilityOfSeparation_leafNode() {
float[] point = new float[] { 1.0f, 2.0f, 3.0f };
float[] leafPoint = Arrays.copyOf(point, point.length);
BoundingBox boundingBox = new BoundingBox(leafPoint);
AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(point, 2);
assertThrows(IllegalStateException.class, () -> visitor.getProbabilityOfSeparation(boundingBox));
TransductiveScalarScoreVisitor esotericVisitor = new TransductiveScalarScoreVisitor(leafPoint, 2,
CommonUtils::defaultScoreSeenFunction, CommonUtils::defaultScoreUnseenFunction,
CommonUtils::defaultDampFunction, b -> new double[3]);
assertThrows(IllegalStateException.class, () -> esotericVisitor.getProbabilityOfSeparation(boundingBox));
}
}
| 406 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/anomalydetection/DynamicAttributionVisitorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.anomalydetection;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.function.BiFunction;
import org.junit.jupiter.api.Test;
public class DynamicAttributionVisitorTest {
@Test
public void testScoringMethods() {
BiFunction<Double, Double, Double> scoreSeen = (x, y) -> (x + y) / 2;
BiFunction<Double, Double, Double> scoreUneen = (x, y) -> 0.75 * x + 0.25 * y;
BiFunction<Double, Double, Double> damp = (x, y) -> Math.sqrt(x * y);
DynamicAttributionVisitor visitor = new DynamicAttributionVisitor(new float[] { 1.1f, -2.2f }, 100, 2,
scoreSeen, scoreUneen, damp);
int x = 9;
int y = 4;
assertEquals((x + y) / 2.0, visitor.scoreSeen(x, y));
assertEquals(0.75 * x + 0.25 * y, visitor.scoreUnseen(x, y));
assertEquals(Math.sqrt(x * y), visitor.damp(x, y));
}
}
| 407 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/inspect/NearNeighborVisitorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.inspect;
import static com.amazon.randomcutforest.TestUtils.EPSILON;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collector;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.returntypes.Neighbor;
import com.amazon.randomcutforest.tree.INodeView;
import com.amazon.randomcutforest.tree.NodeView;
public class NearNeighborVisitorTest {
private float[] queryPoint;
private double distanceThreshold;
private NearNeighborVisitor visitor;
@BeforeEach
public void setUp() {
queryPoint = new float[] { 7.7f, 8.8f, -6.6f };
distanceThreshold = 10.0;
visitor = new NearNeighborVisitor(queryPoint, distanceThreshold);
}
@Test
public void acceptLeafNear() {
float[] leafPoint = new float[] { 8.8f, 9.9f, -5.5f };
INodeView leafNode = mock(NodeView.class);
when(leafNode.getLeafPoint()).thenReturn(Arrays.copyOf(leafPoint, leafPoint.length));
when(leafNode.getLiftedLeafPoint()).thenReturn(Arrays.copyOf(leafPoint, leafPoint.length));
HashMap<Long, Integer> sequenceIndexes = new HashMap<>();
sequenceIndexes.put(1234L, 1);
sequenceIndexes.put(5678L, 1);
when(leafNode.getSequenceIndexes()).thenReturn(sequenceIndexes);
int depth = 12;
visitor.acceptLeaf(leafNode, depth);
Optional<Neighbor> optional = visitor.getResult();
assertTrue(optional.isPresent());
Neighbor neighbor = optional.get();
assertNotSame(leafPoint, neighbor.point);
assertArrayEquals(leafPoint, neighbor.point);
assertEquals(Math.sqrt(3 * 1.1 * 1.1), neighbor.distance, EPSILON);
assertNotSame(leafNode.getSequenceIndexes(), neighbor.sequenceIndexes);
}
@Test
public void acceptLeafNearTimestampsDisabled() {
float[] leafPoint = new float[] { 8.8f, 9.9f, -5.5f };
INodeView leafNode = mock(NodeView.class);
when(leafNode.getLiftedLeafPoint()).thenReturn(Arrays.copyOf(leafPoint, leafPoint.length));
when(leafNode.getLeafPoint()).thenReturn(Arrays.copyOf(leafPoint, leafPoint.length));
assertEquals(0, leafNode.getSequenceIndexes().size());
int depth = 12;
visitor.acceptLeaf(leafNode, depth);
Optional<Neighbor> optional = visitor.getResult();
assertTrue(optional.isPresent());
NearNeighborVisitor nearNeighborVisitor = new NearNeighborVisitor(queryPoint);
nearNeighborVisitor.acceptLeaf(leafNode, depth);
Map<Integer, Neighbor> map1 = new HashMap<>();
Map<Integer, Neighbor> map2 = new HashMap<>();
// an equality test
Collector<Optional<Neighbor>, Map<Integer, Neighbor>, List<Neighbor>> collector = Neighbor.collector();
map1.put(Arrays.hashCode(optional.get().point), optional.get());
map2.put(Arrays.hashCode(nearNeighborVisitor.getResult().get().point), optional.get());
collector.combiner().apply(map1, map2);
assertEquals(map1.size(), 1);
Neighbor neighbor = optional.get();
assertNotSame(leafPoint, neighbor.point);
assertArrayEquals(leafPoint, neighbor.point);
assertEquals(Math.sqrt(3 * 1.1 * 1.1), neighbor.distance, EPSILON);
assertTrue(neighbor.sequenceIndexes.isEmpty());
}
@Test
public void acceptLeafNotNear() {
float[] leafPoint = new float[] { 108.8f, 209.9f, -305.5f };
INodeView leafNode = mock(NodeView.class);
HashMap<Long, Integer> sequenceIndexes = new HashMap<>();
sequenceIndexes.put(1234L, 1);
sequenceIndexes.put(5678L, 1);
when(leafNode.getLeafPoint()).thenReturn(leafPoint);
when(leafNode.getLiftedLeafPoint()).thenReturn(leafPoint);
when(leafNode.getSequenceIndexes()).thenReturn(sequenceIndexes);
int depth = 12;
visitor.acceptLeaf(leafNode, depth);
Optional<Neighbor> optional = visitor.getResult();
assertFalse(optional.isPresent());
}
}
| 408 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/util/ArrayPackingTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.util;
import static com.amazon.randomcutforest.util.ArrayPacking.pack;
import static com.amazon.randomcutforest.util.ArrayPacking.unpackDoubles;
import static com.amazon.randomcutforest.util.ArrayPacking.unpackFloats;
import static com.amazon.randomcutforest.util.ArrayPacking.unpackInts;
import static com.amazon.randomcutforest.util.ArrayPacking.unpackShorts;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Arrays;
import java.util.Random;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
public class ArrayPackingTest {
private Random rng;
@BeforeEach
public void setUp() {
rng = new Random();
ArrayPacking arrayPacking = new ArrayPacking();
}
@Test
public void testLogMax() {
long[] bases = new long[] { 2, 101, 3_456_789 };
Arrays.stream(bases).forEach(base -> {
int log = ArrayPacking.logMax(base);
assertTrue(Math.pow(base, log + 1) >= Integer.MAX_VALUE);
assertTrue(Math.pow(base, log) < Integer.MAX_VALUE);
});
}
@Test
public void testLogMaxInvalid() {
assertThrows(IllegalArgumentException.class, () -> ArrayPacking.logMax(1));
assertThrows(IllegalArgumentException.class, () -> ArrayPacking.logMax(0));
assertThrows(IllegalArgumentException.class, () -> ArrayPacking.logMax(-123467890));
}
@ParameterizedTest
@ValueSource(ints = { 0, 1, 2, 3, 11, 100 })
public void testIntsPackRoundTrip(int inputLength) {
int[] inputArray = rng.ints().limit(inputLength).toArray();
assertArrayEquals(inputArray, ArrayPacking.unpackInts(ArrayPacking.pack(inputArray, false), false));
assertArrayEquals(inputArray, ArrayPacking.unpackInts(ArrayPacking.pack(inputArray, true), true));
}
@ParameterizedTest
@ValueSource(ints = { 0, 1, 2, 3, 17, 100 })
public void testShortsPackRoundTrip(int inputLength) {
short[] inputArray = new short[inputLength];
for (int i = 0; i < inputLength; i++) {
inputArray[i] = (short) (rng.nextInt() % 100);
}
assertArrayEquals(inputArray, ArrayPacking.unpackShorts(ArrayPacking.pack(inputArray, false), false));
assertArrayEquals(inputArray, ArrayPacking.unpackShorts(ArrayPacking.pack(inputArray, true), true));
}
@ParameterizedTest
@ValueSource(ints = { 0, 1, 2, 3, 11, 100 })
public void testIdenticalInts(int inputLength) {
int[] inputArray = new int[inputLength];
Arrays.fill(inputArray, rng.nextInt());
assertArrayEquals(inputArray, ArrayPacking.unpackInts(ArrayPacking.pack(inputArray, false), false));
int[] result = ArrayPacking.pack(inputArray, true);
assertTrue(result.length == 3 || inputLength < 3 && result.length == inputLength);
assertArrayEquals(inputArray, ArrayPacking.unpackInts(result, true));
}
@ParameterizedTest
@ValueSource(ints = { 0, 1, 2, 3, 17, 100 })
public void testIdenticalShorts(int inputLength) {
short item = (short) (rng.nextInt() % 100);
short[] inputArray = new short[inputLength];
for (int i = 0; i < inputLength; i++) {
inputArray[i] = item;
}
assertArrayEquals(inputArray, ArrayPacking.unpackShorts(ArrayPacking.pack(inputArray, false), false));
int[] result = ArrayPacking.pack(inputArray, true);
assertTrue(result.length == 3 || inputLength < 3 && result.length == inputLength);
assertArrayEquals(inputArray, ArrayPacking.unpackShorts(result, true));
}
@Test
public void testUnpackIntsWithLengthGiven() {
int inputLength = 100;
int[] inputArray = rng.ints().limit(inputLength).toArray();
assertThrows(IllegalArgumentException.class, () -> pack(inputArray, inputLength + 1, false));
assertThrows(IllegalArgumentException.class, () -> pack(inputArray, inputLength + 1, true));
assertThrows(IllegalArgumentException.class, () -> pack(inputArray, -1, false));
assertThrows(IllegalArgumentException.class, () -> pack(inputArray, -1, true));
assertDoesNotThrow(() -> pack(inputArray, 0, true));
assertDoesNotThrow(() -> pack(inputArray, 0, false));
int[] uncompressed = ArrayPacking.pack(inputArray, false);
int[] compressed = ArrayPacking.pack(inputArray, true);
int[] result = ArrayPacking.unpackInts(uncompressed, 50, false);
assertThrows(IllegalArgumentException.class, () -> unpackInts(compressed, -1, true));
assertEquals(50, result.length);
assertArrayEquals(Arrays.copyOf(inputArray, 50), result);
result = ArrayPacking.unpackInts(compressed, 50, true);
assertEquals(50, result.length);
assertArrayEquals(Arrays.copyOf(inputArray, 50), result);
result = ArrayPacking.unpackInts(uncompressed, 200, false);
assertEquals(200, result.length);
assertArrayEquals(inputArray, Arrays.copyOf(result, 100));
for (int i = 100; i < 200; i++) {
assertEquals(0, result[i]);
}
result = ArrayPacking.unpackInts(compressed, 200, true);
assertEquals(200, result.length);
assertArrayEquals(inputArray, Arrays.copyOf(result, 100));
for (int i = 100; i < 200; i++) {
assertEquals(0, result[i]);
}
}
@Test
public void testUnpackShortsWithLengthGiven() {
int inputLength = 100;
short[] inputArray = new short[50];
Arrays.fill(inputArray, (short) 2);
short[] test = new short[2];
short[] test2 = new short[3];
int[] uncompressed = ArrayPacking.pack(inputArray, false);
int[] compressed = ArrayPacking.pack(inputArray, true);
assertArrayEquals(test, unpackShorts(new int[2], true));
assertArrayEquals(test, unpackShorts(new int[2], false));
assertArrayEquals(test2, unpackShorts(new int[3], false));
assertThrows(IllegalArgumentException.class, () -> unpackShorts(uncompressed, -1, false));
short[] result = ArrayPacking.unpackShorts(uncompressed, 50, false);
assertEquals(50, result.length);
assertArrayEquals(Arrays.copyOf(inputArray, 50), result);
result = ArrayPacking.unpackShorts(compressed, 100, true);
assertEquals(100, result.length);
for (int y = 0; y < 50; y++) {
assertTrue(result[y] == 2);
}
for (int y = 50; y < 100; y++) {
assertTrue(result[y] == 0);
}
}
@ParameterizedTest
@ValueSource(ints = { 0, 1, 2, 3, 17, 100 })
public void testPackDoublesRoundTrip(int inputLength) {
double[] inputArray = rng.doubles().limit(inputLength).toArray();
assertArrayEquals(inputArray, ArrayPacking.unpackDoubles(ArrayPacking.pack(inputArray)));
}
@ParameterizedTest
@ValueSource(ints = { 0, 1, 2, 3, 5, 100 })
public void testPackFloatsRoundTrip(int inputLength) {
float[] inputArray = new float[inputLength];
for (int i = 0; i < inputLength; i++) {
inputArray[i] = rng.nextFloat();
}
assertArrayEquals(inputArray, unpackFloats(ArrayPacking.pack(inputArray)));
}
@ParameterizedTest
@ValueSource(booleans = { true, false })
public void testPackShortsWithLength(boolean compress) {
int inputLength = 100;
int packLength = 76;
short[] inputArray = new short[inputLength];
for (int i = 0; i < inputLength; i++) {
inputArray[i] = (short) (rng.nextInt() % 100);
}
assertThrows(IllegalArgumentException.class, () -> pack(inputArray, inputLength + 10, compress));
assertThrows(IllegalArgumentException.class, () -> pack(inputArray, -10, compress));
int[] array = ArrayPacking.pack(inputArray, packLength, compress);
short[] outputArray = ArrayPacking.unpackShorts(array, compress);
assertEquals(packLength, outputArray.length);
assertArrayEquals(Arrays.copyOf(inputArray, packLength), outputArray);
}
@Test
public void testPackDoublesWithLength() {
int inputLength = 100;
int packLength = 76;
double[] inputArray = rng.doubles().limit(inputLength).toArray();
byte[] bytes = ArrayPacking.pack(inputArray, packLength);
double[] outputArray = ArrayPacking.unpackDoubles(bytes);
assertEquals(packLength, outputArray.length);
assertArrayEquals(Arrays.copyOf(inputArray, packLength), outputArray);
assertDoesNotThrow(() -> pack(new double[0], 0));
assertThrows(IllegalArgumentException.class, () -> pack(new double[10], 11));
assertThrows(IllegalArgumentException.class, () -> pack(new double[10], -1));
}
@Test
public void testPackFloatsWithLength() {
int inputLength = 100;
int packLength = 76;
float[] inputArray = new float[inputLength];
for (int i = 0; i < inputLength; i++) {
inputArray[i] = rng.nextFloat();
}
byte[] bytes = ArrayPacking.pack(inputArray, packLength);
assertThrows(IllegalArgumentException.class, () -> pack(inputArray, inputLength + 10));
float[] outputArray = unpackFloats(bytes);
assertEquals(packLength, outputArray.length);
assertArrayEquals(Arrays.copyOf(inputArray, packLength), outputArray);
assertDoesNotThrow(() -> pack(new float[0], 0));
assertThrows(IllegalArgumentException.class, () -> pack(new float[10], -1));
}
@Test
public void testUnpackDoublesWithLength() {
int inputLength = 100;
double[] inputArray = rng.doubles().limit(inputLength).toArray();
byte[] bytes = ArrayPacking.pack(inputArray);
int unpackLength1 = 25;
double[] outputArray1 = ArrayPacking.unpackDoubles(bytes, unpackLength1);
assertEquals(unpackLength1, outputArray1.length);
assertArrayEquals(Arrays.copyOf(inputArray, unpackLength1), outputArray1);
int unpackLength2 = 123;
assertThrows(IllegalArgumentException.class, () -> pack(inputArray, unpackLength2));
double[] outputArray2 = ArrayPacking.unpackDoubles(bytes, unpackLength2);
assertEquals(unpackLength2, outputArray2.length);
assertArrayEquals(inputArray, Arrays.copyOf(outputArray2, inputLength));
for (int i = inputLength; i < unpackLength2; i++) {
assertEquals(0.0, outputArray2[i]);
}
}
@Test
public void testUnpackFloatWithLength() {
int inputLength = 100;
float[] inputArray = new float[inputLength];
for (int i = 0; i < inputLength; i++) {
inputArray[i] = rng.nextFloat();
}
byte[] bytes = ArrayPacking.pack(inputArray);
int unpackLength1 = 25;
float[] outputArray1 = unpackFloats(bytes, unpackLength1);
assertEquals(unpackLength1, outputArray1.length);
assertArrayEquals(Arrays.copyOf(inputArray, unpackLength1), outputArray1);
int unpackLength2 = 123;
float[] outputArray2 = unpackFloats(bytes, unpackLength2);
assertEquals(unpackLength2, outputArray2.length);
assertArrayEquals(inputArray, Arrays.copyOf(outputArray2, inputLength));
for (int i = inputLength; i < unpackLength2; i++) {
assertEquals(0.0, outputArray2[i]);
}
}
@Test
public void testConfig() {
byte[] array = new byte[1];
assertThrows(IllegalArgumentException.class, () -> unpackFloats(array, 1));
assertThrows(IllegalArgumentException.class, () -> unpackDoubles(array, 1));
byte[] newArray = new byte[Double.BYTES];
assertDoesNotThrow(() -> unpackDoubles(newArray, 1));
assertDoesNotThrow(() -> unpackFloats(newArray, 1));
assertThrows(IllegalArgumentException.class, () -> unpackFloats(newArray, -1));
assertThrows(IllegalArgumentException.class, () -> unpackDoubles(newArray, -1));
}
}
| 409 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/util/ArrayUtilsTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.util;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import java.util.Arrays;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
public class ArrayUtilsTest {
ArrayUtils utils = new ArrayUtils();
@ParameterizedTest
@CsvSource({ "-0.0,0.0", "0.0,0.0", "-0.0:0.0:1.0,0.0:0.0:1.0" })
public void cleanCopy(String input, String expected) {
double[] inputArray = array(input);
double[] cleanCopy = ArrayUtils.cleanCopy(inputArray);
assertNotSame(inputArray, cleanCopy);
assertArrayEquals(array(expected), cleanCopy);
}
private double[] array(String arrayString) {
return Arrays.stream(arrayString.split(":")).mapToDouble(Double::valueOf).toArray();
}
}
| 410 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/util/WeightedTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.util;
import static com.amazon.randomcutforest.util.Weighted.createSample;
import static com.amazon.randomcutforest.util.Weighted.prefixPick;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class WeightedTest {
private Random rng;
int size = 10000;
int heavyIndex;
ArrayList<Weighted<Integer>> list = new ArrayList<>();
@BeforeEach
public void setUp() {
rng = new Random();
list = new ArrayList<>();
for (int i = 0; i < size; i++) {
list.add(new Weighted<>(i, (float) (0.1 + rng.nextDouble())));
}
heavyIndex = size + 7;
list.add(new Weighted<>(heavyIndex, size));
}
@Test
public void testCreateSample() {
// forcedSample 0 will return a null list
assertTrue(createSample(list, 0, 10, 0, 1.0).size() == 0);
// the following should add the last item first
List<Weighted<Integer>> sampledList = createSample(list, 0, 10, 0.1, 1.0);
assertTrue(sampledList.size() > 0);
assertTrue(sampledList.get(0).index == heavyIndex);
assertTrue(sampledList.get(0).weight == (float) size);
}
@Test
public void testPrefixPick() {
double total = list.stream().mapToDouble(e -> e.weight).sum();
assertTrue(total < 2 * size);
Weighted<Integer> item = prefixPick(list, size / 3.0);
assertTrue(item.index < size);
assertTrue(item.weight <= 1.1);
// should be the last element
Weighted<Integer> heavyItem = prefixPick(list, 3.0 * size / 4);
assertTrue(heavyItem.index == heavyIndex);
assertTrue(heavyItem.weight == (float) size);
// checking extreme weights
heavyItem = prefixPick(list, 2 * size);
assertTrue(heavyItem.index == heavyIndex);
assertTrue(heavyItem.weight == (float) size);
}
@Test
public void emptyList() {
List<Weighted<Integer>> list = new ArrayList<>();
assertThrows(IllegalArgumentException.class, () -> prefixPick(list, 1.0f));
}
}
| 411 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/util/ShingleBuilderTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.util;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class ShingleBuilderTest {
private int dimensions;
private int shingleSize;
private ShingleBuilder builder;
@BeforeEach
public void setUp() {
dimensions = 2;
shingleSize = 3;
builder = new ShingleBuilder(dimensions, shingleSize);
}
@Test
public void testNew() {
assertEquals(dimensions, builder.getInputPointSize());
assertEquals(dimensions * shingleSize, builder.getShingledPointSize());
assertFalse(builder.isCyclic());
}
@Test
public void testNewWithInvalidArguments() {
assertThrows(IllegalArgumentException.class, () -> new ShingleBuilder(0, shingleSize));
assertThrows(IllegalArgumentException.class, () -> new ShingleBuilder(dimensions, 0));
}
@Test
public void testAddPoint() {
double[] shingle = builder.getShingle();
assertArrayEquals(new double[] { 0, 0, 0, 0, 0, 0 }, shingle);
builder.addPoint(new double[] { 9, 10 });
shingle = builder.getShingle();
assertArrayEquals(new double[] { 0, 0, 0, 0, 9, 10 }, shingle);
builder.addPoint(new double[] { 7, 8 });
shingle = builder.getShingle();
assertArrayEquals(new double[] { 0, 0, 9, 10, 7, 8 }, shingle);
builder.addPoint(new double[] { 5, 6 });
shingle = builder.getShingle();
assertArrayEquals(new double[] { 9, 10, 7, 8, 5, 6 }, shingle);
builder.addPoint(new double[] { 3, 4 });
shingle = builder.getShingle();
assertArrayEquals(new double[] { 7, 8, 5, 6, 3, 4 }, shingle);
}
@Test
public void testAddPointCyclic() {
builder = new ShingleBuilder(dimensions, shingleSize, true);
double[] shingle = builder.getShingle();
assertArrayEquals(new double[] { 0, 0, 0, 0, 0, 0 }, shingle);
builder.addPoint(new double[] { 9, 10 });
shingle = builder.getShingle();
assertArrayEquals(new double[] { 9, 10, 0, 0, 0, 0 }, shingle);
builder.addPoint(new double[] { 7, 8 });
shingle = builder.getShingle();
assertArrayEquals(new double[] { 9, 10, 7, 8, 0, 0 }, shingle);
builder.addPoint(new double[] { 5, 6 });
shingle = builder.getShingle();
assertArrayEquals(new double[] { 9, 10, 7, 8, 5, 6 }, shingle);
builder.addPoint(new double[] { 3, 4 });
shingle = builder.getShingle();
assertArrayEquals(new double[] { 3, 4, 7, 8, 5, 6 }, shingle);
}
@Test
public void testAddPointWithInvalidArguments() {
assertThrows(NullPointerException.class, () -> builder.addPoint(null));
double[] point = new double[9]; // wrong size of array
assertThrows(IllegalArgumentException.class, () -> builder.addPoint(point));
}
@Test
public void testShingleCopy() {
double[] buffer = new double[dimensions * shingleSize];
builder.addPoint(new double[] { 2, 1 });
builder.addPoint(new double[] { 4, 3 });
builder.addPoint(new double[] { 6, 5 });
double[] shingle = builder.getShingle();
assertArrayEquals(new double[] { 2, 1, 4, 3, 6, 5 }, shingle);
assertArrayEquals(new double[] { 0, 0, 0, 0, 0, 0 }, buffer);
builder.getShingle(buffer);
assertArrayEquals(shingle, buffer);
buffer[0] = 0;
assertEquals(2, shingle[0]);
}
@Test
public void testGetShingleWithInvalidArguments() {
assertThrows(NullPointerException.class, () -> builder.getShingle(null));
double[] buffer = new double[2]; // wrong size of array
assertThrows(IllegalArgumentException.class, () -> builder.getShingle(buffer));
}
}
| 412 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/runner/AnomalyAttributionRunnerTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.runner;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Arrays;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.returntypes.DiVector;
public class AnomalyAttributionRunnerTest {
private int numberOfTrees;
private int sampleSize;
private int shingleSize;
private int windowSize;
private String delimiter;
private boolean headerRow;
private AnomalyAttributionRunner runner;
private BufferedReader in;
private PrintWriter out;
@BeforeEach
public void setUp() {
numberOfTrees = 50;
sampleSize = 100;
shingleSize = 1;
windowSize = 10;
delimiter = ",";
headerRow = true;
runner = new AnomalyAttributionRunner();
runner.parse("--number-of-trees", Integer.toString(numberOfTrees), "--sample-size",
Integer.toString(sampleSize), "--shingle-size", Integer.toString(shingleSize), "--window-size",
Integer.toString(windowSize), "--delimiter", delimiter, "--header-row", Boolean.toString(headerRow));
in = mock(BufferedReader.class);
out = mock(PrintWriter.class);
}
@Test
public void testRun() throws IOException {
when(in.readLine()).thenReturn("a,b").thenReturn("1.0,2.0").thenReturn("4.0,5.0").thenReturn(null);
runner.run(in, out);
verify(out).println("a,b,anomaly_low_0,anomaly_high_0,anomaly_low_1,anomaly_high_1");
verify(out).println("1.0,2.0,0.0,0.0,0.0,0.0");
verify(out).println("4.0,5.0,0.0,0.0,0.0,0.0");
}
@Test
public void testWriteHeader() {
String[] line = new String[] { "a", "b" };
runner.prepareAlgorithm(2);
runner.writeHeader(line, out);
verify(out).println("a,b,anomaly_low_0,anomaly_high_0,anomaly_low_1,anomaly_high_1");
}
@Test
public void testProcessLine() {
String[] line = new String[] { "1.0", "2.0" };
runner.prepareAlgorithm(2);
runner.processLine(line, out);
verify(out).println("1.0,2.0,0.0,0.0,0.0,0.0");
}
@Test
public void testAnomalyAttributionTransformer() {
RandomCutForest forest = mock(RandomCutForest.class);
when(forest.getDimensions()).thenReturn(2);
AnomalyAttributionRunner.AnomalyAttributionTransformer transformer = new AnomalyAttributionRunner.AnomalyAttributionTransformer(
forest);
DiVector vector = new DiVector(2);
vector.low[0] = 1.1;
vector.high[1] = 2.2;
when(forest.getAnomalyAttribution(new double[] { 1.0, 2.0 })).thenReturn(vector);
assertEquals(Arrays.asList("1.1", "0.0", "0.0", "2.2"), transformer.getResultValues(1.0, 2.0));
assertEquals(Arrays.asList("anomaly_low_0", "anomaly_high_0", "anomaly_low_1", "anomaly_high_1"),
transformer.getResultColumnNames());
assertEquals(Arrays.asList("NA", "NA", "NA", "NA"), transformer.getEmptyResultValue());
}
}
| 413 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/runner/SimpleDensityRunnerTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.runner;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Arrays;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.returntypes.DensityOutput;
public class SimpleDensityRunnerTest {
private int numberOfTrees;
private int sampleSize;
private int shingleSize;
private int windowSize;
private String delimiter;
private boolean headerRow;
private SimpleDensityRunner runner;
private BufferedReader in;
private PrintWriter out;
@BeforeEach
public void setUp() {
numberOfTrees = 50;
sampleSize = 100;
shingleSize = 1;
windowSize = 10;
delimiter = ",";
headerRow = true;
runner = new SimpleDensityRunner();
runner.parse("--number-of-trees", Integer.toString(numberOfTrees), "--sample-size",
Integer.toString(sampleSize), "--shingle-size", Integer.toString(shingleSize), "--window-size",
Integer.toString(windowSize), "--delimiter", delimiter, "--header-row", Boolean.toString(headerRow));
in = mock(BufferedReader.class);
out = mock(PrintWriter.class);
}
@Test
public void testRun() throws IOException {
when(in.readLine()).thenReturn("a,b").thenReturn("1.0,2.0").thenReturn("4.0,5.0").thenReturn(null);
runner.run(in, out);
verify(out).println("a,b,prob_mass_0_up,prob_mass_0_down,prob_mass_1_up,prob_mass_1_down");
verify(out).println("1.0,2.0,0.000000,0.000000,0.000000,0.000000");
verify(out).println("4.0,5.0,0.000000,0.000000,0.000000,0.000000");
}
@Test
public void testWriteHeader() {
String[] line = new String[] { "a", "b" };
runner.prepareAlgorithm(2);
runner.writeHeader(line, out);
verify(out).println("a,b,prob_mass_0_up,prob_mass_0_down,prob_mass_1_up,prob_mass_1_down");
}
@Test
public void testProcessLine() {
String[] line = new String[] { "1.0", "2.0" };
runner.prepareAlgorithm(2);
runner.processLine(line, out);
verify(out).println("1.0,2.0,0.000000,0.000000,0.000000,0.000000");
}
@Test
public void testSimpleDensityTransformer() {
RandomCutForest forest = mock(RandomCutForest.class);
when(forest.getDimensions()).thenReturn(2);
SimpleDensityRunner.SimpleDensityTransformer transformer = new SimpleDensityRunner.SimpleDensityTransformer(
forest);
DensityOutput expected = new DensityOutput(2, 1);
expected.probMass.high[0] = 0.0;
expected.probMass.low[0] = 0.5;
expected.probMass.high[1] = 0.25;
expected.probMass.low[1] = 0.25;
expected.measure.high[0] = 0.0;
expected.measure.low[0] = 8.0;
expected.measure.high[1] = 8.0;
expected.measure.low[1] = 4.0;
when(forest.getSimpleDensity(new double[] { 1.0, 2.0 })).thenReturn(expected);
assertEquals(Arrays.asList("0.000000", "400.000000", "400.000000", "200.000000"),
transformer.getResultValues(1.0, 2.0));
assertEquals(Arrays.asList("prob_mass_0_up", "prob_mass_0_down", "prob_mass_1_up", "prob_mass_1_down"),
transformer.getResultColumnNames());
assertEquals(Arrays.asList("NA", "NA", "NA", "NA"), transformer.getEmptyResultValue());
}
}
| 414 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/runner/UpdateOnlyTransformerTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.runner;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import java.util.List;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.RandomCutForest;
public class UpdateOnlyTransformerTest {
private RandomCutForest forest;
private UpdateOnlyTransformer transformer;
@BeforeEach
public void setUp() {
forest = mock(RandomCutForest.class);
transformer = new UpdateOnlyTransformer(forest);
}
@Test
public void testGetResultValues() {
List<String> result = transformer.getResultValues(1.0, 2.0, 3.0);
assertTrue(result.isEmpty());
verify(forest).update(new double[] { 1.0, 2.0, 3.0 });
}
@Test
public void testGetEmptyResultValue() {
assertTrue(transformer.getEmptyResultValue().isEmpty());
}
@Test
public void testGetResultColumnNames() {
assertTrue(transformer.getResultColumnNames().isEmpty());
}
}
| 415 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/runner/AnomalyScoreRunnerTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.runner;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Collections;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.RandomCutForest;
public class AnomalyScoreRunnerTest {
private int numberOfTrees;
private int sampleSize;
private int shingleSize;
private int windowSize;
private String delimiter;
private boolean headerRow;
private AnomalyScoreRunner runner;
private BufferedReader in;
private PrintWriter out;
@BeforeEach
public void setUp() {
numberOfTrees = 50;
sampleSize = 100;
shingleSize = 1;
windowSize = 10;
delimiter = ",";
headerRow = true;
runner = new AnomalyScoreRunner();
runner.parse("--number-of-trees", Integer.toString(numberOfTrees), "--sample-size",
Integer.toString(sampleSize), "--shingle-size", Integer.toString(shingleSize), "--window-size",
Integer.toString(windowSize), "--delimiter", delimiter, "--header-row", Boolean.toString(headerRow));
in = mock(BufferedReader.class);
out = mock(PrintWriter.class);
}
@Test
public void testRun() throws IOException {
when(in.readLine()).thenReturn("a,b,c").thenReturn("1.0,2.0,3.0").thenReturn("4.0,5.0,6.0").thenReturn(null);
runner.run(in, out);
verify(out).println("a,b,c,anomaly_score");
verify(out).println("1.0,2.0,3.0,0.0");
verify(out).println("4.0,5.0,6.0,0.0");
}
@Test
public void testWriteHeader() {
String[] line = new String[] { "a", "b", "c" };
runner.prepareAlgorithm(3);
runner.writeHeader(line, out);
verify(out).println("a,b,c,anomaly_score");
}
@Test
public void testProcessLine() {
String[] line = new String[] { "1.0", "2.0", "3.0" };
runner.prepareAlgorithm(3);
runner.processLine(line, out);
verify(out).println("1.0,2.0,3.0,0.0");
}
@Test
public void testAnomalyScoreTransformer() {
RandomCutForest forest = mock(RandomCutForest.class);
AnomalyScoreRunner.AnomalyScoreTransformer transformer = new AnomalyScoreRunner.AnomalyScoreTransformer(forest);
when(forest.getAnomalyScore(new double[] { 1.0, 2.0, 3.0 })).thenReturn(11.0);
assertEquals(Collections.singletonList("11.0"), transformer.getResultValues(1.0, 2.0, 3.0));
assertEquals(Collections.singletonList("anomaly_score"), transformer.getResultColumnNames());
assertEquals(Collections.singletonList("NA"), transformer.getEmptyResultValue());
}
}
| 416 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/runner/ImputeRunnerTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.runner;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.BufferedReader;
import java.io.PrintWriter;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class ImputeRunnerTest {
private int numberOfTrees;
private int sampleSize;
private int windowSize;
private String delimiter;
private boolean headerRow;
private String missingValueMarker;
private ImputeRunner runner;
private BufferedReader in;
private PrintWriter out;
@BeforeEach
public void setUp() {
numberOfTrees = 50;
sampleSize = 100;
windowSize = 10;
delimiter = ",";
missingValueMarker = "X";
headerRow = true;
runner = new ImputeRunner();
runner.parse("--number-of-trees", Integer.toString(numberOfTrees), "--sample-size",
Integer.toString(sampleSize), "--window-size", Integer.toString(windowSize), "--delimiter", delimiter,
"--missing-value-marker", missingValueMarker, "--header-row", Boolean.toString(headerRow));
in = mock(BufferedReader.class);
out = mock(PrintWriter.class);
}
@Test
public void testRun() throws Exception {
when(in.readLine()).thenReturn("a,b").thenReturn("1.0,2.0").thenReturn("4.0,X").thenReturn(null);
runner.run(in, out);
verify(out).println("a,b");
verify(out).println("1.0,2.0");
verify(out).println("0.0,0.0");
}
@Test
public void testWriteHeader() {
String[] line = new String[] { "a", "b" };
runner.prepareAlgorithm(2);
runner.writeHeader(line, out);
verify(out).println("a,b");
}
@Test
public void testProcessLine() {
String[] line = new String[] { "1.0", "2.0" };
runner.prepareAlgorithm(2);
runner.processLine(line, out);
verify(out).println("1.0,2.0");
line = new String[] { missingValueMarker, "2.0" };
runner.processLine(line, out);
verify(out).println("0.0,0.0");
}
}
| 417 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/runner/ArgumentParserTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.runner;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class ArgumentParserTest {
private ArgumentParser parser;
@BeforeEach
public void setUp() {
parser = new ArgumentParser("runner-class", "runner-description");
}
@Test
public void testNew() {
assertEquals(100, parser.getNumberOfTrees());
assertEquals(256, parser.getSampleSize());
assertEquals(0, parser.getWindowSize());
assertEquals(0.0, parser.getTimeDecay());
assertEquals(1, parser.getShingleSize());
assertFalse(parser.getShingleCyclic());
assertEquals(",", parser.getDelimiter());
assertFalse(parser.getHeaderRow());
}
@Test
public void testParse() {
parser.parse("--number-of-trees", "222", "--sample-size", "123", "--window-size", "50", "--shingle-size", "4",
"--shingle-cyclic", "true", "--delimiter", "\t", "--header-row", "true");
assertEquals(222, parser.getNumberOfTrees());
assertEquals(123, parser.getSampleSize());
assertEquals(50, parser.getWindowSize());
assertEquals(0.02, parser.getTimeDecay());
assertEquals(4, parser.getShingleSize());
assertTrue(parser.getShingleCyclic());
assertEquals("\t", parser.getDelimiter());
assertTrue(parser.getHeaderRow());
}
@Test
public void testParseShortFlags() {
parser.parse("-n", "222", "-s", "123", "-w", "50", "-g", "4", "-c", "true", "-d", "\t");
assertEquals(222, parser.getNumberOfTrees());
assertEquals(123, parser.getSampleSize());
assertEquals(50, parser.getWindowSize());
assertEquals(0.02, parser.getTimeDecay());
assertEquals(4, parser.getShingleSize());
assertEquals("\t", parser.getDelimiter());
}
}
| 418 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/state/RandomCutForestMapperTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Random;
import java.util.stream.Stream;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.executor.PointStoreCoordinator;
import com.amazon.randomcutforest.store.PointStore;
import com.amazon.randomcutforest.testutils.NormalMixtureTestData;
public class RandomCutForestMapperTest {
private static int dimensions = 5;
private static int sampleSize = 128;
private static Stream<RandomCutForest> compactForestProvider() {
RandomCutForest.Builder<?> builder = RandomCutForest.builder().compact(true).dimensions(dimensions)
.sampleSize(sampleSize);
RandomCutForest cachedDouble = builder.boundingBoxCacheFraction(new Random().nextDouble())
.precision(Precision.FLOAT_64).build();
RandomCutForest cachedFloat = builder.boundingBoxCacheFraction(new Random().nextDouble())
.precision(Precision.FLOAT_32).build();
RandomCutForest uncachedDouble = builder.boundingBoxCacheFraction(0.0).precision(Precision.FLOAT_64).build();
RandomCutForest uncachedFloat = builder.boundingBoxCacheFraction(0.0).precision(Precision.FLOAT_32).build();
return Stream.of(cachedDouble, cachedFloat, uncachedDouble, uncachedFloat);
}
private RandomCutForestMapper mapper;
@BeforeEach
public void setUp() {
mapper = new RandomCutForestMapper();
mapper.setSaveExecutorContextEnabled(true);
}
public void assertCompactForestEquals(RandomCutForest forest, RandomCutForest forest2) {
assertEquals(forest.getDimensions(), forest2.getDimensions());
assertEquals(forest.getSampleSize(), forest2.getSampleSize());
assertEquals(forest.getOutputAfter(), forest2.getOutputAfter());
assertEquals(forest.getNumberOfTrees(), forest2.getNumberOfTrees());
assertEquals(forest.getTimeDecay(), forest2.getTimeDecay());
assertEquals(forest.isStoreSequenceIndexesEnabled(), forest2.isStoreSequenceIndexesEnabled());
assertEquals(forest.isCompact(), forest2.isCompact());
assertEquals(forest.getPrecision(), forest2.getPrecision());
assertEquals(forest.getBoundingBoxCacheFraction(), forest2.getBoundingBoxCacheFraction());
assertEquals(forest.isCenterOfMassEnabled(), forest2.isCenterOfMassEnabled());
assertEquals(forest.isParallelExecutionEnabled(), forest2.isParallelExecutionEnabled());
assertEquals(forest.getThreadPoolSize(), forest2.getThreadPoolSize());
PointStoreCoordinator coordinator = (PointStoreCoordinator) forest.getUpdateCoordinator();
PointStoreCoordinator coordinator2 = (PointStoreCoordinator) forest2.getUpdateCoordinator();
PointStore store = (PointStore) coordinator.getStore();
PointStore store2 = (PointStore) coordinator2.getStore();
assertArrayEquals(store.getRefCount(), store2.getRefCount());
assertArrayEquals(store.getStore(), store2.getStore());
assertEquals(store.getCapacity(), store2.getCapacity());
assertEquals(store.size(), store2.size());
}
@ParameterizedTest
@MethodSource("compactForestProvider")
public void testRoundTripForCompactForest(RandomCutForest forest) {
NormalMixtureTestData testData = new NormalMixtureTestData();
for (double[] point : testData.generateTestData(sampleSize, dimensions)) {
forest.update(point);
}
RandomCutForest forest2 = mapper.toModel(mapper.toState(forest));
assertCompactForestEquals(forest, forest2);
}
@ParameterizedTest
@MethodSource("compactForestProvider")
public void testRoundTripForCompactForestSaveTreeState(RandomCutForest forest) {
mapper.setSaveTreeStateEnabled(true);
testRoundTripForCompactForest(forest);
}
@ParameterizedTest
@MethodSource("compactForestProvider")
public void testRoundTripForCompactForestSaveTreeStatePartial(RandomCutForest forest) {
mapper.setSaveTreeStateEnabled(true);
mapper.setPartialTreeStateEnabled(true);
testRoundTripForCompactForest(forest);
}
@Test
public void testRoundTripForEmptyForest() {
Precision precision = Precision.FLOAT_64;
RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions).sampleSize(sampleSize)
.precision(precision).numberOfTrees(1).build();
mapper.setSaveTreeStateEnabled(true);
RandomCutForest forest2 = mapper.toModel(mapper.toState(forest));
assertCompactForestEquals(forest, forest2);
}
@Test
public void testRoundTripForSingleNodeForest() {
int dimensions = 10;
long seed = new Random().nextLong();
System.out.println(" Seed " + seed);
RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions).numberOfTrees(1)
.precision(Precision.FLOAT_32).internalShinglingEnabled(false).randomSeed(seed).build();
Random r = new Random(seed + 1);
double[] point = r.ints(dimensions, 0, 50).asDoubleStream().toArray();
for (int i = 0; i < new Random().nextInt(1000); i++) {
forest.update(point);
}
RandomCutForestMapper mapper = new RandomCutForestMapper();
mapper.setSaveExecutorContextEnabled(true);
mapper.setSaveTreeStateEnabled(true);
mapper.setPartialTreeStateEnabled(true);
RandomCutForest copyForest = mapper.toModel(mapper.toState(forest));
for (int i = 0; i < new Random(seed + 2).nextInt(1000); i++) {
double[] anotherPoint = r.ints(dimensions, 0, 50).asDoubleStream().toArray();
assertEquals(forest.getAnomalyScore(anotherPoint), copyForest.getAnomalyScore(anotherPoint), 1e-10);
forest.update(anotherPoint);
copyForest.update(anotherPoint);
}
}
private static float[] generate(int input) {
return new float[] { (float) (20 * Math.sin(input / 10.0)), (float) (20 * Math.cos(input / 10.0)) };
}
@Test
void benchmarkMappers() {
long seed = new Random().nextLong();
System.out.println(" Seed " + seed);
Random random = new Random(seed);
RandomCutForest rcf = RandomCutForest.builder().dimensions(2 * 10).shingleSize(10).sampleSize(628)
.internalShinglingEnabled(true).randomSeed(random.nextLong()).build();
for (int i = 0; i < 10000; i++) {
rcf.update(generate(i));
}
RandomCutForestMapper mapper = new RandomCutForestMapper();
mapper.setSaveExecutorContextEnabled(true);
mapper.setSaveTreeStateEnabled(true);
for (int j = 0; j < 1000; j++) {
RandomCutForest newRCF = mapper.toModel(mapper.toState(rcf));
float[] test = generate(10000 + j);
assertEquals(newRCF.getAnomalyScore(test), rcf.getAnomalyScore(test), 1e-6);
rcf.update(test);
}
}
}
| 419 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/state | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/state/sampler/CompactSamplerMapperTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state.sampler;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Random;
import java.util.stream.Stream;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import com.amazon.randomcutforest.sampler.CompactSampler;
public class CompactSamplerMapperTest {
private static int sampleSize = 20;
private static double lambda = 0.01;
private static long seed = 4444;
public static Stream<Arguments> nonemptySamplerProvider() {
CompactSampler fullSampler1 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda).randomSeed(seed)
.storeSequenceIndexesEnabled(false).build();
CompactSampler fullSampler2 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda).randomSeed(seed)
.storeSequenceIndexesEnabled(true).build();
Random random = new Random();
long baseIndex = 10_000;
for (int i = 0; i < 100; i++) {
int pointReference = random.nextInt();
fullSampler1.update(pointReference, baseIndex + i);
fullSampler2.update(pointReference, baseIndex + i);
}
CompactSampler partiallyFullSampler1 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda)
.randomSeed(seed).storeSequenceIndexesEnabled(false).build();
CompactSampler partiallyFullSampler2 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda)
.randomSeed(seed).storeSequenceIndexesEnabled(true).build();
for (int i = 0; i < sampleSize / 2; i++) {
int pointReference = random.nextInt();
partiallyFullSampler1.update(pointReference, baseIndex + i);
partiallyFullSampler2.update(pointReference, baseIndex + i);
}
return Stream.of(Arguments.of("full sampler without sequence indexes", fullSampler1),
Arguments.of("full sampler with sequence indexes", fullSampler2),
Arguments.of("partially full sampler without sequence indexes", partiallyFullSampler1),
Arguments.of("partially full sampler with sequence indexes", partiallyFullSampler2));
}
public static Stream<Arguments> samplerProvider() {
CompactSampler emptySampler1 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda).randomSeed(seed)
.storeSequenceIndexesEnabled(false).build();
CompactSampler emptySampler2 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda).randomSeed(seed)
.storeSequenceIndexesEnabled(true).build();
return Stream.concat(nonemptySamplerProvider(),
Stream.of(Arguments.of("empty sampler without sequence indexes", emptySampler1),
Arguments.of("empty sampler with sequence indexes", emptySampler2)));
}
private CompactSamplerMapper mapper;
@BeforeEach
public void setUp() {
mapper = new CompactSamplerMapper();
mapper.setValidateHeapEnabled(false);
}
private void assertValidMapping(CompactSampler original, CompactSampler mapped) {
assertArrayEquals(original.getWeightArray(), mapped.getWeightArray(), "different weight arrays");
assertArrayEquals(original.getPointIndexArray(), mapped.getPointIndexArray(), "different point index arrays");
assertEquals(original.getCapacity(), mapped.getCapacity());
assertEquals(original.size(), mapped.size());
assertEquals(original.getTimeDecay(), mapped.getTimeDecay());
assertFalse(mapped.getEvictedPoint().isPresent());
if (original.isStoreSequenceIndexesEnabled()) {
assertTrue(mapped.isStoreSequenceIndexesEnabled());
assertArrayEquals(original.getSequenceIndexArray(), mapped.getSequenceIndexArray(),
"different sequence index arrays");
} else {
assertFalse(mapped.isStoreSequenceIndexesEnabled());
assertNull(mapped.getSequenceIndexArray());
}
}
@ParameterizedTest
@MethodSource("nonemptySamplerProvider")
public void testRoundTripInvalidHeap(String description, CompactSampler sampler) {
mapper.setValidateHeapEnabled(true);
CompactSamplerState state = mapper.toState(sampler);
// swap to weights in the weight array in order to violate the heap property
float[] weights = state.getWeight();
int index = state.getSize() / 4;
float temp = weights[index];
weights[index] = weights[2 * index + 1];
weights[2 * index + 1] = temp;
assertThrows(IllegalStateException.class, () -> mapper.toModel(state));
mapper.setValidateHeapEnabled(false);
CompactSampler sampler2 = mapper.toModel(state);
assertArrayEquals(sampler.getWeightArray(), sampler2.getWeightArray());
}
}
| 420 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/state | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/state/store/PointStoreMapperTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state.store;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.store.PointStore;
import com.amazon.randomcutforest.store.PointStoreSmall;
public class PointStoreMapperTest {
private PointStoreMapper mapper;
@BeforeEach
public void setUp() {
mapper = new PointStoreMapper();
}
@Test
public void testRoundTrip() {
int dimensions = 2;
int capacity = 4;
PointStore store = new PointStoreSmall(dimensions, capacity);
float[] point1 = { 1.1f, -22.2f };
int index1 = store.add(point1, 1);
float[] point2 = { 3.3f, -4.4f };
int index2 = store.add(point2, 2);
float[] point3 = { 10.1f, 100.1f };
int index3 = store.add(point3, 3);
PointStore store2 = mapper.toModel(mapper.toState(store));
assertEquals(capacity, store2.getCapacity());
assertEquals(3, store2.size());
assertEquals(dimensions, store2.getDimensions());
assertArrayEquals(store.getStore(), store2.getStore());
}
}
| 421 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/sampler/CompactSamplerTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.sampler;
import static com.amazon.randomcutforest.TestUtils.EPSILON;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Random;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.ArgumentsProvider;
import org.junit.jupiter.params.provider.ArgumentsSource;
import com.amazon.randomcutforest.config.Config;
public class CompactSamplerTest {
private static int sampleSize = 256;
private static double lambda = 0.01;
private static long seed = 42L;
private static class SamplerProvider implements ArgumentsProvider {
@Override
public Stream<? extends Arguments> provideArguments(ExtensionContext context) throws Exception {
Random random1 = spy(new Random(seed));
CompactSampler sampler1 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda).random(random1)
.initialAcceptFraction(0.1).storeSequenceIndexesEnabled(false).build();
Random random2 = spy(new Random(seed));
CompactSampler sampler2 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda).random(random2)
.initialAcceptFraction(0.1).storeSequenceIndexesEnabled(true).build();
CompactSampler sampler3 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda).random(random1)
.initialAcceptFraction(1.0).storeSequenceIndexesEnabled(false).build();
return Stream.of(Arguments.of(random1, sampler1), Arguments.of(random2, sampler2),
Arguments.of(random1, sampler3));
}
}
@ParameterizedTest
@ArgumentsSource(SamplerProvider.class)
public void testNew(Random random, CompactSampler sampler) {
// test CompactSampler fields not defined in the IStreamSampler interface
assertEquals(lambda, sampler.getTimeDecay());
assertNotNull(sampler.getWeightArray());
assertNotNull(sampler.getPointIndexArray());
long seq = new Random().nextLong();
sampler.setMaxSequenceIndex(seq);
assertEquals(sampler.getMaxSequenceIndex(), seq);
assertFalse(sampler.isFull());
assertFalse(sampler.isReady());
double newLambda = new Random().nextDouble();
sampler.setTimeDecay(newLambda);
assertEquals(sampler.getConfig(Config.TIME_DECAY), newLambda);
sampler.setConfig(Config.TIME_DECAY, lambda + newLambda);
assertEquals(sampler.getTimeDecay(), lambda + newLambda, 1e-10);
assertEquals(sampler.getMostRecentTimeDecayUpdate(), seq);
sampler.setMostRecentTimeDecayUpdate(0L);
assertEquals(sampler.getMostRecentTimeDecayUpdate(), 0L);
assertThrows(IllegalArgumentException.class, () -> sampler.getConfig("foo"));
assertThrows(IllegalArgumentException.class, () -> sampler.setConfig("bar", 0L));
if (sampler.isStoreSequenceIndexesEnabled()) {
assertNotNull(sampler.getSequenceIndexArray());
} else {
assertNull(sampler.getSequenceIndexArray());
}
assertThrows(IllegalStateException.class, () -> sampler.addPoint(1));
assertDoesNotThrow(() -> sampler.addPoint(null));
}
@Test
public void testNewFromExistingWeightsParameters() {
int sampleSize = 3;
double lambda = 0.1;
// weight array is valid heap
float[] weight = { 0.4f, 0.3f, 0.2f };
int[] pointIndex = { 1, 2, 3 };
assertThrows(IllegalArgumentException.class,
() -> new CompactSampler.Builder<>().capacity(sampleSize).size(weight.length).timeDecay(lambda)
.random(new Random()).weight(weight).pointIndex(pointIndex).sequenceIndex(null)
.storeSequenceIndexesEnabled(true).validateHeap(true).build());
}
@Test
public void testNewFromExistingWeights() {
int sampleSize = 3;
double lambda = 0.1;
// weight array is valid heap
float[] weight = { 0.4f, 0.3f, 0.2f };
int[] pointIndex = { 1, 2, 3 };
CompactSampler sampler = new CompactSampler.Builder<>().capacity(sampleSize).size(weight.length)
.timeDecay(lambda).random(new Random()).weight(weight).pointIndex(pointIndex).sequenceIndex(null)
.validateHeap(true).build();
assertFalse(sampler.getEvictedPoint().isPresent());
assertFalse(sampler.isStoreSequenceIndexesEnabled());
assertEquals(3, sampler.size());
assertNull(sampler.getSequenceIndexArray());
for (int i = 0; i < 3; i++) {
assertEquals(weight[i], sampler.weight[i]);
assertEquals(pointIndex[i], sampler.pointIndex[i]);
}
sampler.setMaxSequenceIndex(10L);
sampler.setTimeDecay(lambda * 2);
assertNotEquals(sampler.accumuluatedTimeDecay, 0);
sampler.getWeightedSample();
assertEquals(sampler.accumuluatedTimeDecay, 0);
}
@Test
public void testUniformSampler() {
CompactSampler uniformSampler = CompactSampler.uniformSampler(sampleSize, seed, false);
assertFalse(uniformSampler.getEvictedPoint().isPresent());
assertFalse(uniformSampler.isReady());
assertFalse(uniformSampler.isFull());
assertEquals(sampleSize, uniformSampler.getCapacity());
assertEquals(0, uniformSampler.size());
assertEquals(0.0, uniformSampler.getTimeDecay());
}
@Test
public void testBuilderClass() {
assertThrows(IllegalArgumentException.class,
() -> new CompactSampler.Builder<>().capacity(0).initialAcceptFraction(0.5).build());
assertThrows(IllegalArgumentException.class,
() -> new CompactSampler.Builder<>().capacity(1).initialAcceptFraction(0).build());
assertThrows(IllegalArgumentException.class, () -> new CompactSampler.Builder<>().capacity(1).size(1).build());
assertThrows(IllegalArgumentException.class,
() -> new CompactSampler.Builder<>().capacity(1).validateHeap(true).build());
assertThrows(IllegalArgumentException.class,
() -> new CompactSampler.Builder<>().capacity(1).weight(new float[] { 0 }).build());
assertThrows(IllegalArgumentException.class, () -> new CompactSampler.Builder<>().capacity(1)
.sequenceIndex(new long[] { 0 }).storeSequenceIndexesEnabled(true).build());
assertThrows(IllegalArgumentException.class,
() -> new CompactSampler.Builder<>().capacity(1).pointIndex(new int[] { 0 }).build());
assertThrows(IllegalArgumentException.class, () -> new CompactSampler.Builder<>().capacity(1)
.weight(new float[0]).pointIndex(new int[] { 0 }).build());
assertThrows(IllegalArgumentException.class, () -> new CompactSampler.Builder<>().capacity(1)
.weight(new float[] { 0 }).pointIndex(new int[0]).build());
assertThrows(IllegalArgumentException.class,
() -> new CompactSampler.Builder<>().capacity(1).weight(new float[] { 0 }).pointIndex(new int[] { 0 })
.sequenceIndex(new long[0]).storeSequenceIndexesEnabled(true).build());
assertDoesNotThrow(() -> new CompactSampler.Builder<>().capacity(1).weight(new float[] { 0 })
.pointIndex(new int[] { 0 }).sequenceIndex(new long[] { 0 }).storeSequenceIndexesEnabled(true).build());
assertDoesNotThrow(() -> new CompactSampler.Builder<>().capacity(1).weight(new float[] { 0 })
.pointIndex(new int[] { 0 }).build());
}
@ParameterizedTest
@ArgumentsSource(SamplerProvider.class)
public void testAddPoint(Random random, CompactSampler sampler) {
when(random.nextDouble()).thenReturn(0.0).thenReturn(0.5).thenReturn(0.0).thenReturn(0.01).thenReturn(0.0)
.thenReturn(0.99);
sampler.acceptPoint(10L);
double weight1 = sampler.acceptPointState.getWeight();
sampler.addPoint(1);
sampler.acceptPoint(11L);
double weight2 = sampler.acceptPointState.getWeight();
// acceptstate is non-null
assertThrows(IllegalArgumentException.class, () -> sampler.addPoint(12, 2.0f, 0L));
sampler.addPoint(12);
assertThrows(IllegalArgumentException.class, () -> sampler.acceptPoint(12L, -1f));
sampler.acceptPoint(12L, 0f);
assertNull(sampler.acceptPointState);
sampler.acceptPoint(12L);
double weight3 = sampler.acceptPointState.getWeight();
sampler.addPoint(123);
assertEquals(3, sampler.size());
assertEquals(sampleSize, sampler.getCapacity());
List<Weighted<Integer>> samples = sampler.getWeightedSample();
samples.sort(Comparator.comparing(Weighted<Integer>::getWeight));
assertEquals(3, samples.size());
assertEquals(123, samples.get(0).getValue());
assertEquals(weight3, samples.get(0).getWeight());
assertEquals(1, samples.get(1).getValue());
assertEquals(weight1, samples.get(1).getWeight());
assertEquals(12, samples.get(2).getValue());
assertEquals(weight2, samples.get(2).getWeight());
}
@ParameterizedTest
@ArgumentsSource(SamplerProvider.class)
public void testAcceptPoint(Random random, CompactSampler sampler) {
assertThrows(IllegalArgumentException.class, () -> sampler.addPoint(null, 1, 0L));
assertThrows(IllegalArgumentException.class, () -> sampler.addPoint(null, -1, 0L));
assertDoesNotThrow(() -> sampler.addPoint(0, 0f, 0L));
assertEquals(sampler.size, 1);
// The sampler should accept all samples until initial fraction
for (int i = 0; i < sampleSize * sampler.initialAcceptFraction; i++) {
assertTrue(sampler.acceptPoint(i));
assertNotNull(sampler.acceptPointState);
sampler.addPoint(i);
}
assertTrue(sampler.initialAcceptProbability(sampler.size) < 1.0);
for (int i = 0; i < sampleSize * 10; i++) {
if (sampler.acceptPoint(i)) {
sampler.addPoint(i);
}
}
assertTrue(sampler.isFull());
assertTrue(sampler.isReady());
assertThrows(IllegalStateException.class, () -> sampler.addPoint(sampleSize));
assertThrows(IllegalArgumentException.class, () -> sampler.addPoint(sampleSize, 1.0f, 0L));
sampler.setTimeDecay(0);
// we should only accept sequences of value samplesize - 1 or higher
assertThrows(IllegalStateException.class, () -> sampler.acceptPoint(sampleSize - 2));
// In subsequent calls to sample, either the result is empty or else
// the new weight is smaller than the evicted weight
int numAccepted = 0;
for (int i = 10 * sampleSize; i < 12 * sampleSize; i++) {
if (sampler.acceptPoint(i)) {
numAccepted++;
assertTrue(sampler.getEvictedPoint().isPresent());
assertNotNull(sampler.acceptPointState);
Weighted<Integer> evictedPoint = (Weighted<Integer>) sampler.getEvictedPoint().get();
assertTrue(sampler.acceptPointState.getWeight() < evictedPoint.getWeight());
sampler.addPoint(i);
}
}
assertTrue(numAccepted > 0, "the sampler did not accept any points");
}
@ParameterizedTest
@ArgumentsSource(SamplerProvider.class)
public void testUpdate(Random random, CompactSampler compactSampler) {
CompactSampler sampler = spy(compactSampler);
for (int i = 0; i < sampleSize * sampler.initialAcceptFraction; i++) {
assertTrue(sampler.update(i, i));
}
int num = (int) Math.ceil(sampleSize * sampler.initialAcceptFraction);
// all points should be added to the sampler until the sampler is full
assertEquals(num, sampler.size());
verify(sampler, times(num)).addPoint(any());
reset(sampler);
int numSampled = 0;
for (int i = num; i < 2 * sampleSize; i++) {
if (sampler.update(i, i)) {
numSampled++;
}
}
assertTrue(numSampled > 0, "no new values were sampled");
assertTrue(sampler.initialAcceptFraction > 0.5 || numSampled < 2 * sampleSize - num, "all values were sampled");
verify(sampler, times(numSampled)).addPoint(any());
}
@ParameterizedTest
@ArgumentsSource(SamplerProvider.class)
public void testGetScore(Random random, CompactSampler sampler) {
when(random.nextDouble()).thenReturn(0.0).thenReturn(0.25).thenReturn(0.0).thenReturn(0.75).thenReturn(0.0)
.thenReturn(0.50).thenReturn(0.5).thenReturn(0.1).thenReturn(1.3);
sampler.update(1, 101);
sampler.update(2, 102);
sampler.update(3, 103);
double[] expectedScores = new double[3];
expectedScores[0] = -lambda * 101L + Math.log(-Math.log(0.25));
expectedScores[1] = -lambda * 102L + Math.log(-Math.log(0.75));
expectedScores[2] = -lambda * 103L + Math.log(-Math.log(0.50));
Arrays.sort(expectedScores);
assertFalse(sampler.acceptPoint(104));
List<Weighted<Integer>> samples = sampler.getWeightedSample();
samples.sort(Comparator.comparing(Weighted<Integer>::getWeight));
for (int i = 0; i < 3; i++) {
assertEquals(expectedScores[i], samples.get(i).getWeight(), EPSILON);
}
}
@ParameterizedTest
@ArgumentsSource(SamplerProvider.class)
public void testValidateHeap(Random random, CompactSampler sampler) {
// populate the heap
for (int i = 0; i < 2 * sampleSize; i++) {
sampler.update(i, i);
}
float[] weightArray = sampler.getWeightArray();
// swapping a weight value with one of its children will break the heap property
int i = sampleSize / 4;
float f = weightArray[i];
weightArray[i] = weightArray[2 * i + 1];
weightArray[2 * i + 1] = f;
assertThrows(IllegalStateException.class,
() -> new CompactSampler.Builder<>().capacity(sampleSize).size(sampleSize).timeDecay(lambda)
.random(random).weight(weightArray).pointIndex(sampler.getPointIndexArray())
.sequenceIndex(sampler.getSequenceIndexArray()).validateHeap(true).build());
}
}
| 422 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/imputation/ConditionalSampleSummarizerTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.imputation;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Random;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.returntypes.ConditionalTreeSample;
import com.amazon.randomcutforest.returntypes.SampleSummary;
import com.amazon.randomcutforest.summarization.Summarizer;
public class ConditionalSampleSummarizerTest {
private float[] queryPoint;
private int[] missingIndexes;
ConditionalSampleSummarizer summarizer;
@BeforeEach
public void setUp() {
queryPoint = new float[] { 50, 70, 90, 100 };
missingIndexes = new int[] { 2, 3 };
summarizer = new ConditionalSampleSummarizer(missingIndexes, queryPoint, 0.2, true);
}
@Test
public void testSummarize() {
assertThrows(IllegalArgumentException.class, () -> summarizer.summarize(Collections.emptyList()));
Random random = new Random(42);
ArrayList<ConditionalTreeSample> list = new ArrayList<>();
for (int i = 0; i < 999; i++) {
float[] point = new float[] { 50, 70, 90, 100 + 2 * random.nextFloat() };
list.add(new ConditionalTreeSample(i, null, Summarizer.L1distance(point, queryPoint), point));
}
list.add(new ConditionalTreeSample(999, null, 100, new float[] { 50, 70, 90, 200 }));
SampleSummary summary = summarizer.summarize(list, false);
assertNull(summary.summaryPoints);
SampleSummary summaryTwo = summarizer.summarize(list, true);
assertNotNull(summaryTwo.summaryPoints);
for (float[] element : summaryTwo.summaryPoints) {
assertEquals(element.length, 4);
assertEquals(element[0], 50);
assertEquals(element[1], 70);
assertEquals(element[2], 90);
assertTrue(100 < element[3] && element[3] < 102);
}
assertEquals(2, summaryTwo.mean.length);
assertEquals(0, summaryTwo.deviation[0]);
}
@Test
public void testZero() {
ArrayList<ConditionalTreeSample> list = new ArrayList<>();
for (int i = 0; i < 1000; i++) {
list.add(new ConditionalTreeSample(i, null, 0, queryPoint));
}
assert (summarizer.summarize(list, true).summaryPoints.length == 1);
}
}
| 423 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/imputation/ImputeVisitorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.imputation;
import static com.amazon.randomcutforest.CommonUtils.defaultScoreSeenFunction;
import static com.amazon.randomcutforest.CommonUtils.defaultScoreUnseenFunction;
import static com.amazon.randomcutforest.TestUtils.EPSILON;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.CommonUtils;
import com.amazon.randomcutforest.tree.BoundingBox;
import com.amazon.randomcutforest.tree.IBoundingBoxView;
import com.amazon.randomcutforest.tree.INodeView;
import com.amazon.randomcutforest.tree.NodeView;
public class ImputeVisitorTest {
private float[] queryPoint;
private int numberOfMissingValues;
private int[] missingIndexes;
private ImputeVisitor visitor;
private ImputeVisitor anotherVisitor;
@BeforeEach
public void setUp() {
// create a point where the 2nd value is missing
// The second value of queryPoint and the 2nd and 3rd values of missingIndexes
// should be ignored in all tests
queryPoint = new float[] { -1.0f, 1000.0f, 3.0f };
numberOfMissingValues = 1;
missingIndexes = new int[] { 1, 99, -888 };
visitor = new ImputeVisitor(queryPoint, numberOfMissingValues, missingIndexes);
anotherVisitor = new ImputeVisitor(queryPoint, queryPoint, null, null, 0.8, 42);
}
@Test
public void testNew() {
assertArrayEquals(queryPoint, visitor.getResult().leafPoint);
assertNotSame(queryPoint, visitor.getResult());
assertEquals(ImputeVisitor.DEFAULT_INIT_VALUE, visitor.getAnomalyRank());
assertEquals(ImputeVisitor.DEFAULT_INIT_VALUE, visitor.adjustedRank());
assertEquals(ImputeVisitor.DEFAULT_INIT_VALUE, anotherVisitor.getAnomalyRank());
assertNotEquals(ImputeVisitor.DEFAULT_INIT_VALUE, anotherVisitor.adjustedRank());
assertEquals(visitor.getDistance(), Double.MAX_VALUE);
assertFalse(visitor.isConverged());
assertThrows(IllegalArgumentException.class,
() -> new ImputeVisitor(queryPoint, queryPoint, null, null, -1.0, 42));
assertThrows(IllegalArgumentException.class,
() -> new ImputeVisitor(queryPoint, queryPoint, null, null, 2.0, 42));
assertThrows(IllegalArgumentException.class,
() -> new ImputeVisitor(queryPoint, queryPoint, null, new int[] { -1 }, 1.0, 42));
assertThrows(IllegalArgumentException.class,
() -> new ImputeVisitor(queryPoint, queryPoint, null, new int[] { 4 }, 1.0, 42));
}
@Test
public void testCopyConstructor() {
ImputeVisitor copy = new ImputeVisitor(visitor);
assertArrayEquals(queryPoint, copy.getResult().leafPoint);
assertNotSame(copy.getResult(), visitor.getResult());
assertEquals(ImputeVisitor.DEFAULT_INIT_VALUE, visitor.getAnomalyRank());
}
@Test
public void testAcceptLeafEquals() {
float[] point = { queryPoint[0], 2.0f, queryPoint[2] };
INodeView leafNode = mock(NodeView.class);
when(leafNode.getLeafPoint()).thenReturn(point);
when(leafNode.getLiftedLeafPoint()).thenReturn(point);
when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));
int leafDepth = 100;
int leafMass = 10;
when(leafNode.getMass()).thenReturn(leafMass);
visitor.acceptLeaf(leafNode, leafDepth);
anotherVisitor.acceptLeaf(leafNode, leafDepth);
float[] expected = new float[] { -1.0f, 2.0f, 3.0f };
assertArrayEquals(expected, visitor.getResult().leafPoint);
assertEquals(visitor.getDistance(), 0, 1e-6);
assertEquals(defaultScoreSeenFunction(leafDepth, leafMass), visitor.getAnomalyRank());
}
@Test
public void testAcceptLeafEqualsZeroDepth() {
float[] point = { queryPoint[0], 2.0f, queryPoint[2] };
INodeView leafNode = mock(NodeView.class);
when(leafNode.getLeafPoint()).thenReturn(point);
when(leafNode.getLiftedLeafPoint()).thenReturn(point);
when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));
int leafDepth = 0;
int leafMass = 10;
when(leafNode.getMass()).thenReturn(leafMass);
visitor.acceptLeaf(leafNode, leafDepth);
float[] expected = new float[] { -1.0f, 2.0f, 3.0f };
assertArrayEquals(expected, visitor.getResult().leafPoint);
assertEquals(0.0, visitor.getAnomalyRank());
}
@Test
public void testAcceptLeafNotEquals() {
float[] point = { queryPoint[0], 2.0f, -111.11f };
INodeView leafNode = mock(NodeView.class);
when(leafNode.getLeafPoint()).thenReturn(point);
when(leafNode.getLiftedLeafPoint()).thenReturn(point);
when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));
int leafDepth = 100;
int leafMass = 10;
when(leafNode.getMass()).thenReturn(leafMass);
visitor.acceptLeaf(leafNode, leafDepth);
float[] expected = new float[] { -1.0f, 2.0f, 3.0f };
assertArrayEquals(expected, visitor.getResult().leafPoint);
assertEquals(defaultScoreUnseenFunction(leafDepth, leafMass), visitor.getAnomalyRank());
}
@Test
public void testAccept() {
float[] point = { queryPoint[0], 2.0f, -111.11f };
INodeView node = mock(NodeView.class);
when(node.getLeafPoint()).thenReturn(point);
when(node.getLiftedLeafPoint()).thenReturn(point);
when(node.getBoundingBox()).thenReturn(new BoundingBox(point, point));
int depth = 100;
int leafMass = 10;
when(node.getMass()).thenReturn(leafMass);
visitor.acceptLeaf(node, depth);
float[] expected = new float[] { -1.0f, 2.0f, 3.0f };
assertArrayEquals(expected, visitor.getResult().leafPoint);
assertEquals(defaultScoreUnseenFunction(depth, leafMass), visitor.getAnomalyRank());
depth--;
IBoundingBoxView boundingBox = node.getBoundingBox().getMergedBox(new float[] { 99.0f, 4.0f, -19.0f });
when(node.getBoundingBox()).thenReturn(boundingBox);
when(node.probailityOfSeparation(any()))
.thenReturn(CommonUtils.getProbabilityOfSeparation(boundingBox, expected));
when(node.getMass()).thenReturn(leafMass + 2);
double oldRank = visitor.getAnomalyRank();
visitor.accept(node, depth);
assertArrayEquals(expected, visitor.getResult().leafPoint);
double p = CommonUtils.getProbabilityOfSeparation(boundingBox, expected);
double expectedRank = p * defaultScoreUnseenFunction(depth, node.getMass()) + (1 - p) * oldRank;
assertEquals(expectedRank, visitor.getAnomalyRank(), EPSILON);
}
@Test
public void testNewCopy() {
ImputeVisitor copy = (ImputeVisitor) visitor.newCopy();
assertArrayEquals(queryPoint, copy.getResult().leafPoint);
assertNotSame(copy.getResult(), visitor.getResult());
assertEquals(ImputeVisitor.DEFAULT_INIT_VALUE, visitor.getAnomalyRank());
}
@Test
public void testMerge() {
float[] otherPoint = new float[] { 99, 100, 101 };
ImputeVisitor other = new ImputeVisitor(otherPoint, 0, new int[0]);
// set other.rank to a small value
NodeView node = mock(NodeView.class);
when(node.getLeafPoint()).thenReturn(new float[] { 0, 0, 0 });
when(node.getLiftedLeafPoint()).thenReturn(new float[] { 0, 0, 0 });
when(node.getBoundingBox()).thenReturn(new BoundingBox(new float[] { 0, 0, 0 }));
other.acceptLeaf(node, 99);
assertTrue(other.getAnomalyRank() < visitor.getAnomalyRank());
other.combine(visitor);
assertArrayEquals(otherPoint, other.getResult().leafPoint);
visitor.combine(other);
assertArrayEquals(otherPoint, visitor.getResult().leafPoint);
}
}
| 424 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/executor/UpdateResultTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import static org.junit.jupiter.api.Assertions.assertFalse;
import org.junit.jupiter.api.Test;
public class UpdateResultTest {
@Test
public void testNoop() {
UpdateResult<Integer> result = UpdateResult.noop();
assertFalse(result.getAddedPoint().isPresent());
assertFalse(result.getDeletedPoint().isPresent());
assertFalse(result.isStateChange());
}
}
| 425 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/executor/SamplerPlusTreeTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Optional;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import com.amazon.randomcutforest.sampler.ISampled;
import com.amazon.randomcutforest.sampler.IStreamSampler;
import com.amazon.randomcutforest.tree.ITree;
@ExtendWith(MockitoExtension.class)
public class SamplerPlusTreeTest {
@Mock
private ITree<Integer, double[]> tree;
@Mock
private IStreamSampler<Integer> sampler;
private SamplerPlusTree<Integer, double[]> samplerPlusTree;
@BeforeEach
public void setUp() {
samplerPlusTree = new SamplerPlusTree<>(sampler, tree);
}
@Test
public void testUpdateAddPoint() {
int pointReference = 2;
long sequenceIndex = 100L;
int existingPointReference = 222;
when(sampler.acceptPoint(sequenceIndex)).thenReturn(true);
when(sampler.getEvictedPoint()).thenReturn(Optional.empty());
when(tree.addPoint(pointReference, sequenceIndex)).thenReturn(existingPointReference);
UpdateResult<Integer> result = samplerPlusTree.update(pointReference, sequenceIndex);
assertTrue(result.getAddedPoint().isPresent());
assertEquals(existingPointReference, result.getAddedPoint().get());
assertFalse(result.getDeletedPoint().isPresent());
verify(tree, never()).deletePoint(any(), anyLong());
verify(sampler, times(1)).addPoint(existingPointReference);
}
@Test
public void testUpdateAddAndDeletePoint() {
int pointReference = 2;
long sequenceIndex = 100L;
int existingPointReference = 222;
int evictedPoint = 333;
long evictedSequenceIndex = 50L;
ISampled<Integer> evictedPointSampled = mock(ISampled.class);
when(evictedPointSampled.getValue()).thenReturn(evictedPoint);
when(evictedPointSampled.getSequenceIndex()).thenReturn(evictedSequenceIndex);
when(sampler.acceptPoint(sequenceIndex)).thenReturn(true);
when(sampler.getEvictedPoint()).thenReturn(Optional.of(evictedPointSampled));
when(tree.addPoint(pointReference, sequenceIndex)).thenReturn(existingPointReference);
UpdateResult<Integer> result = samplerPlusTree.update(pointReference, sequenceIndex);
assertTrue(result.getAddedPoint().isPresent());
assertEquals(existingPointReference, result.getAddedPoint().get());
assertTrue(result.getDeletedPoint().isPresent());
assertEquals(evictedPoint, result.getDeletedPoint().get());
verify(tree, times(1)).deletePoint(evictedPoint, evictedSequenceIndex);
verify(sampler, times(1)).addPoint(existingPointReference);
}
@Test
public void testRejectPoint() {
when(sampler.acceptPoint(anyLong())).thenReturn(false);
UpdateResult<Integer> result = samplerPlusTree.update(2, 100L);
assertFalse(result.isStateChange());
verify(tree, never()).addPoint(any(), anyLong());
verify(tree, never()).deletePoint(any(), anyLong());
verify(sampler, never()).addPoint(any());
}
}
| 426 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/executor/ForestUpdateExecutorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import static com.amazon.randomcutforest.util.ArrayUtils.cleanCopy;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.List;
import java.util.stream.Stream;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.ArgumentsProvider;
import org.junit.jupiter.params.provider.ArgumentsSource;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.junit.jupiter.MockitoExtension;
import com.amazon.randomcutforest.ComponentList;
import com.amazon.randomcutforest.IComponentModel;
import com.amazon.randomcutforest.store.PointStore;
@ExtendWith(MockitoExtension.class)
public class ForestUpdateExecutorTest {
private static final int numberOfTrees = 10;
private static final int threadPoolSize = 2;
@Captor
private ArgumentCaptor<List<UpdateResult<Integer>>> updateResultCaptor;
private static class TestExecutorProvider implements ArgumentsProvider {
@Override
public Stream<? extends Arguments> provideArguments(ExtensionContext context) throws Exception {
ComponentList<Integer, float[]> sequentialComponents = new ComponentList<>();
ComponentList<Integer, float[]> parallelComponents = new ComponentList<>();
for (int i = 0; i < numberOfTrees; i++) {
sequentialComponents.add(mock(IComponentModel.class));
parallelComponents.add(mock(IComponentModel.class));
}
PointStore pointStore = mock(PointStore.class);
IStateCoordinator<Integer, float[]> sequentialUpdateCoordinator = spy(
new PointStoreCoordinator<>(pointStore));
AbstractForestUpdateExecutor<Integer, float[]> sequentialExecutor = new SequentialForestUpdateExecutor<>(
sequentialUpdateCoordinator, sequentialComponents);
IStateCoordinator<Integer, float[]> parallelUpdateCoordinator = spy(
new PointStoreCoordinator<>(pointStore));
AbstractForestUpdateExecutor<Integer, float[]> parallelExecutor = new ParallelForestUpdateExecutor<>(
parallelUpdateCoordinator, parallelComponents, threadPoolSize);
return Stream.of(sequentialExecutor, parallelExecutor).map(Arguments::of);
}
}
@ParameterizedTest
@ArgumentsSource(TestExecutorProvider.class)
public void testUpdate(AbstractForestUpdateExecutor<Integer, float[]> executor) {
int addAndDelete = 4;
int addOnly = 4;
ComponentList<Integer, ?> components = executor.components;
for (int i = 0; i < addAndDelete; i++) {
IComponentModel<Integer, ?> model = components.get(i);
UpdateResult<Integer> result = new UpdateResult<>(i, 2 * i);
when(model.update(any(), anyLong())).thenReturn(result);
}
for (int i = addAndDelete; i < addAndDelete + addOnly; i++) {
IComponentModel<Integer, ?> model = components.get(i);
UpdateResult<Integer> result = UpdateResult.<Integer>builder().addedPoint(i).build();
when(model.update(any(), anyLong())).thenReturn(result);
}
for (int i = addAndDelete + addOnly; i < numberOfTrees; i++) {
IComponentModel<Integer, ?> model = components.get(i);
when(model.update(any(), anyLong())).thenReturn(UpdateResult.noop());
}
float[] point = new float[] { 1.0f };
executor.update(point);
executor.components.forEach(model -> verify(model).update(any(), eq(0L)));
IStateCoordinator<Integer, ?> coordinator = executor.updateCoordinator;
verify(coordinator, times(1)).completeUpdate(updateResultCaptor.capture(), any());
List<UpdateResult<Integer>> updateResults = updateResultCaptor.getValue();
assertEquals(addAndDelete + addOnly, updateResults.size());
int actualAddAndAndDelete = 0;
int actualAddOnly = 0;
for (int i = 0; i < updateResults.size(); i++) {
UpdateResult<Integer> result = updateResults.get(i);
if (result.getDeletedPoint().isPresent()) {
actualAddAndAndDelete++;
} else {
actualAddOnly++;
}
}
assertEquals(addAndDelete, actualAddAndAndDelete);
assertEquals(addOnly, actualAddOnly);
}
@ParameterizedTest
@ArgumentsSource(TestExecutorProvider.class)
public void testCleanCopy(AbstractForestUpdateExecutor<double[], ?> executor) {
float[] point1 = new float[] { 1.0f, -22.2f, 30.9f };
float[] point1Copy = cleanCopy(point1);
assertNotSame(point1, point1Copy);
assertArrayEquals(point1, point1Copy);
float[] point2 = new float[] { -0.0f, -22.2f, 30.9f };
float[] point2Copy = cleanCopy(point2);
assertNotSame(point2, point2Copy);
assertEquals(0.0, point2Copy[0]);
point2Copy[0] = -0.0f;
assertArrayEquals(point2, point2Copy);
}
}
| 427 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/executor/ForestTraversalExecutorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import static com.amazon.randomcutforest.TestUtils.EPSILON;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.AdditionalMatchers.aryEq;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.atMost;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Stream;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.ArgumentsProvider;
import org.junit.jupiter.params.provider.ArgumentsSource;
import com.amazon.randomcutforest.ComponentList;
import com.amazon.randomcutforest.IComponentModel;
import com.amazon.randomcutforest.TestUtils;
import com.amazon.randomcutforest.returntypes.ConvergingAccumulator;
import com.amazon.randomcutforest.sampler.CompactSampler;
import com.amazon.randomcutforest.tree.ITree;
import com.amazon.randomcutforest.tree.RandomCutTree;
public class ForestTraversalExecutorTest {
private static int numberOfTrees = 10;
private static int threadPoolSize = 2;
private static class TestExecutorProvider implements ArgumentsProvider {
@Override
public Stream<? extends Arguments> provideArguments(ExtensionContext context) throws Exception {
ComponentList<Integer, float[]> sequentialExecutors = new ComponentList<>();
ComponentList<Integer, float[]> parallelExecutors = new ComponentList<>();
for (int i = 0; i < numberOfTrees; i++) {
CompactSampler sampler = mock(CompactSampler.class);
RandomCutTree tree = mock(RandomCutTree.class);
sequentialExecutors.add(spy(new SamplerPlusTree<>(sampler, tree)));
}
for (int i = 0; i < numberOfTrees; i++) {
CompactSampler sampler = mock(CompactSampler.class);
RandomCutTree tree = mock(RandomCutTree.class);
parallelExecutors.add(spy(new SamplerPlusTree<>(sampler, tree)));
}
SequentialForestTraversalExecutor sequentialExecutor = new SequentialForestTraversalExecutor(
sequentialExecutors);
ParallelForestTraversalExecutor parallelExecutor = new ParallelForestTraversalExecutor(parallelExecutors,
threadPoolSize);
return Stream.of(sequentialExecutor, parallelExecutor).map(Arguments::of);
}
}
@ParameterizedTest
@ArgumentsSource(TestExecutorProvider.class)
public void testTraverseForestBinaryAccumulator(AbstractForestTraversalExecutor executor) {
float[] point = new float[] { 1.2f, -3.4f };
double expectedResult = 0.0;
for (int i = 0; i < numberOfTrees; i++) {
double treeResult = Math.random();
ITree<?, ?> tree = ((SamplerPlusTree<?, ?>) executor.components.get(i)).getTree();
when(tree.traverse(aryEq(point), any())).thenReturn(treeResult);
expectedResult += treeResult;
}
expectedResult /= numberOfTrees;
double result = executor.traverseForest(point, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, Double::sum,
x -> x / 10.0);
for (IComponentModel<?, ?> component : executor.components) {
verify(component, times(1)).traverse(aryEq(point), any());
}
assertEquals(expectedResult, result, EPSILON);
}
@ParameterizedTest
@ArgumentsSource(TestExecutorProvider.class)
public void testTraverseForestCollector(AbstractForestTraversalExecutor executor) {
float[] point = new float[] { 1.2f, -3.4f };
double[] expectedResult = new double[numberOfTrees];
for (int i = 0; i < numberOfTrees; i++) {
double treeResult = Math.random();
ITree<?, ?> tree = ((SamplerPlusTree<?, ?>) executor.components.get(i)).getTree();
when(tree.traverse(aryEq(point), any())).thenReturn(treeResult);
expectedResult[i] = treeResult;
}
Arrays.sort(expectedResult);
List<Double> result = executor.traverseForest(point, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY,
TestUtils.SORTED_LIST_COLLECTOR);
for (IComponentModel<?, ?> component : executor.components) {
verify(component, times(1)).traverse(aryEq(point), any());
}
assertEquals(numberOfTrees, result.size());
for (int i = 0; i < numberOfTrees; i++) {
assertEquals(expectedResult[i], result.get(i), EPSILON);
}
}
@ParameterizedTest
@ArgumentsSource(TestExecutorProvider.class)
public void testTraverseForestConverging(AbstractForestTraversalExecutor executor) {
float[] point = new float[] { 1.2f, -3.4f };
for (int i = 0; i < numberOfTrees; i++) {
double treeResult = Math.random();
ITree<?, ?> tree = ((SamplerPlusTree<?, ?>) executor.components.get(i)).getTree();
when(tree.traverse(aryEq(point), any())).thenReturn(treeResult);
}
int convergenceThreshold = numberOfTrees / 2;
ConvergingAccumulator<Double> accumulator = TestUtils.convergeAfter(convergenceThreshold);
double result = executor.traverseForest(point, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, accumulator,
x -> x / accumulator.getValuesAccepted());
for (IComponentModel<?, ?> component : executor.components) {
verify(component, atMost(1)).traverse(aryEq(point), any());
}
assertTrue(accumulator.getValuesAccepted() >= convergenceThreshold);
assertTrue(accumulator.getValuesAccepted() < numberOfTrees);
assertEquals(accumulator.getAccumulatedValue() / accumulator.getValuesAccepted(), result, EPSILON);
}
@ParameterizedTest
@ArgumentsSource(TestExecutorProvider.class)
public void testTraverseForestMultiBinaryAccumulator(AbstractForestTraversalExecutor executor) {
float[] point = new float[] { 1.2f, -3.4f };
double expectedResult = 0.0;
for (int i = 0; i < numberOfTrees; i++) {
double treeResult = Math.random();
ITree<?, ?> tree = ((SamplerPlusTree<?, ?>) executor.components.get(i)).getTree();
when(tree.traverseMulti(aryEq(point), any())).thenReturn(treeResult);
expectedResult += treeResult;
}
expectedResult /= numberOfTrees;
double result = executor.traverseForestMulti(point, TestUtils.DUMMY_GENERIC_MULTI_VISITOR_FACTORY, Double::sum,
x -> x / 10.0);
for (IComponentModel<?, ?> component : executor.components) {
verify(component, times(1)).traverseMulti(aryEq(point), any());
}
assertEquals(expectedResult, result, EPSILON);
}
@ParameterizedTest
@ArgumentsSource(TestExecutorProvider.class)
public void testTraverseForestMultiCollector(AbstractForestTraversalExecutor executor) {
float[] point = new float[] { 1.2f, -3.4f };
double[] expectedResult = new double[numberOfTrees];
for (int i = 0; i < numberOfTrees; i++) {
double treeResult = Math.random();
ITree<?, ?> tree = ((SamplerPlusTree<?, ?>) executor.components.get(i)).getTree();
when(tree.traverseMulti(aryEq(point), any())).thenReturn(treeResult);
expectedResult[i] = treeResult;
}
Arrays.sort(expectedResult);
List<Double> result = executor.traverseForestMulti(point, TestUtils.DUMMY_GENERIC_MULTI_VISITOR_FACTORY,
TestUtils.SORTED_LIST_COLLECTOR);
for (IComponentModel<?, ?> component : executor.components) {
verify(component, times(1)).traverseMulti(aryEq(point), any());
}
assertEquals(numberOfTrees, result.size());
for (int i = 0; i < numberOfTrees; i++) {
assertEquals(expectedResult[i], result.get(i), EPSILON);
}
}
}
| 428 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/executor/PointStoreCoordinatorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.executor;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import com.amazon.randomcutforest.store.PointStore;
public class PointStoreCoordinatorTest {
private PointStore store;
private PointStoreCoordinator coordinator;
@BeforeEach
public void setUp() {
store = mock(PointStore.class);
coordinator = new PointStoreCoordinator(store);
}
@Test
public void testInitUpdate() {
float[] point = { 1.2f, -3.4f };
int index = 123;
ArgumentCaptor<float[]> captor = ArgumentCaptor.forClass(float[].class);
when(store.add(captor.capture(), anyLong())).thenReturn(index);
int result = coordinator.initUpdate(point, 0);
verify(store, times(1)).add(point, 0);
assertEquals(result, index);
}
@Test
public void testCompleteUpdate() {
List<UpdateResult<Integer>> updateResults = new ArrayList<>();
UpdateResult<Integer> result1 = UpdateResult.<Integer>builder().addedPoint(1).deletedPoint(100).build();
updateResults.add(result1);
UpdateResult<Integer> result2 = UpdateResult.<Integer>builder().addedPoint(2).deletedPoint(200).build();
updateResults.add(result2);
UpdateResult<Integer> result3 = UpdateResult.<Integer>builder().addedPoint(3).build();
updateResults.add(result3);
UpdateResult<Integer> result4 = UpdateResult.noop();
updateResults.add(result4);
// order shouldn't matter
Collections.shuffle(updateResults);
Integer updateInput = 1000;
coordinator.completeUpdate(updateResults, updateInput);
ArgumentCaptor<Integer> captor1 = ArgumentCaptor.forClass(Integer.class);
verify(store, times(3)).incrementRefCount(captor1.capture());
List<Integer> arguments = captor1.getAllValues();
Collections.sort(arguments);
assertEquals(1, arguments.get(0));
assertEquals(2, arguments.get(1));
assertEquals(3, arguments.get(2));
ArgumentCaptor<Integer> captor2 = ArgumentCaptor.forClass(Integer.class);
verify(store, times(3)).decrementRefCount(captor2.capture());
arguments = captor2.getAllValues();
Collections.sort(arguments);
assertEquals(100, arguments.get(0));
assertEquals(200, arguments.get(1));
assertEquals(1000, arguments.get(2));
}
}
| 429 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/returntypes/RangeVectorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertThrows;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class RangeVectorTest {
int dimensions;
private RangeVector vector;
@BeforeEach
public void setUp() {
dimensions = 3;
vector = new RangeVector(dimensions);
}
@Test
public void testNew() {
assertThrows(IllegalArgumentException.class, () -> new RangeVector(0));
assertThrows(IllegalArgumentException.class, () -> new RangeVector(new float[0]));
float[] expected = new float[dimensions];
assertArrayEquals(expected, vector.values);
assertArrayEquals(expected, vector.upper);
assertArrayEquals(expected, vector.lower);
float[] another = new float[0];
assertThrows(IllegalArgumentException.class, () -> new RangeVector(another, another, another));
assertThrows(IllegalArgumentException.class,
() -> new RangeVector(expected, expected, new float[dimensions + 1]));
assertThrows(IllegalArgumentException.class,
() -> new RangeVector(expected, new float[dimensions + 1], expected));
assertThrows(IllegalArgumentException.class,
() -> new RangeVector(new float[dimensions + 1], expected, expected));
assertDoesNotThrow(() -> new RangeVector(expected, expected, expected));
assertThrows(IllegalArgumentException.class,
() -> new RangeVector(expected, new float[] { -1f, 0f, 0f }, expected));
assertDoesNotThrow(() -> new RangeVector(expected, expected, new float[] { -1f, 0f, 0f }));
assertThrows(IllegalArgumentException.class,
() -> new RangeVector(expected, new float[] { 1f, 0f, 0f }, new float[] { 1f, 0f, 0f }));
assertDoesNotThrow(() -> new RangeVector(expected, new float[] { 1f, 0f, 0f }, new float[] { -1f, 0f, 0f }));
}
@Test
public void testScale() {
vector.upper[0] = 1.1f;
vector.upper[2] = 3.1f;
vector.upper[1] = 3.1f;
vector.lower[1] = -2.2f;
float z = 9.9f;
assertThrows(IllegalArgumentException.class, () -> vector.scale(0, -1.0f));
assertThrows(IllegalArgumentException.class, () -> vector.scale(-1, 1.0f));
assertThrows(IllegalArgumentException.class, () -> vector.scale(dimensions + 1, 1.0f));
vector.scale(0, z);
float[] expected = new float[] { 1.1f * 9.9f, 3.1f, 3.1f };
assertArrayEquals(expected, vector.upper, 1e-6f);
expected = new float[] { 0.0f, -2.2f, 0.0f };
assertArrayEquals(expected, vector.lower);
vector.scale(1, 2 * z);
assertArrayEquals(new float[] { 1.1f * 9.9f, 3.1f * 2 * z, 3.1f }, vector.upper, 1e-6f);
assertArrayEquals(new float[] { 0f, -2.2f * 2 * z, 0f }, vector.lower, 1e-6f);
}
@Test
public void testShift() {
vector.upper[0] = 1.1f;
vector.upper[2] = 3.1f;
vector.lower[1] = -2.2f;
float z = -9.9f;
assertThrows(IllegalArgumentException.class, () -> vector.shift(-1, z));
assertThrows(IllegalArgumentException.class, () -> vector.shift(dimensions + 1, z));
vector.shift(0, z);
float[] expected = new float[] { 1.1f - 9.9f, 0.0f, 3.1f };
assertArrayEquals(expected, vector.upper, 1e-6f);
expected = new float[] { z, -2.2f, 0.0f };
assertArrayEquals(expected, vector.lower);
assertArrayEquals(new float[] { z, 0, 0 }, vector.values, 1e-6f);
}
}
| 430 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/returntypes/DiVectorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
import static com.amazon.randomcutforest.TestUtils.EPSILON;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.Arrays;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.state.returntypes.DiVectorMapper;
import com.amazon.randomcutforest.state.returntypes.DiVectorState;
public class DiVectorTest {
int dimensions;
private DiVector vector;
@BeforeEach
public void setUp() {
dimensions = 3;
vector = new DiVector(dimensions);
}
@Test
public void testNew() {
double[] expected = new double[dimensions];
assertEquals(dimensions, vector.getDimensions());
assertArrayEquals(expected, vector.high);
assertArrayEquals(expected, vector.low);
assertThrows(IllegalArgumentException.class, () -> new DiVector(0));
assertThrows(IllegalArgumentException.class, () -> new DiVector(new double[10], new double[9]));
assertDoesNotThrow(() -> new DiVector(new double[10], new double[10]));
}
@Test
public void testAddToLeft() {
DiVector left = new DiVector(dimensions);
DiVector right = new DiVector(dimensions);
for (int i = 0; i < dimensions; i++) {
left.low[i] = Math.random();
left.high[i] = Math.random();
right.low[i] = Math.random();
right.high[i] = Math.random();
}
assertThrows(IllegalArgumentException.class, () -> DiVector.addToLeft(left, new DiVector(dimensions + 1)));
DiVector leftCopy = new DiVector(dimensions);
System.arraycopy(left.low, 0, leftCopy.low, 0, dimensions);
System.arraycopy(left.high, 0, leftCopy.high, 0, dimensions);
DiVector rightCopy = new DiVector(dimensions);
System.arraycopy(right.low, 0, rightCopy.low, 0, dimensions);
System.arraycopy(right.high, 0, rightCopy.high, 0, dimensions);
DiVector result = DiVector.addToLeft(left, right);
assertSame(result, left);
assertArrayEquals(rightCopy.low, right.low);
assertArrayEquals(rightCopy.high, right.high);
for (int i = 0; i < dimensions; i++) {
assertEquals(leftCopy.low[i] + right.low[i], left.low[i]);
assertEquals(leftCopy.high[i] + right.high[i], left.high[i]);
}
}
@Test
public void testScale() {
vector.high[0] = 1.1;
vector.high[2] = 3.1;
vector.low[1] = 2.2;
double z = 9.9;
DiVector result = vector.scale(z);
double[] expected = new double[] { 1.1 * 9.9, 0.0, 3.1 * 9.9 };
assertArrayEquals(expected, result.high);
expected = new double[] { 0.0, 2.2 * 9.9, 0.0 };
assertArrayEquals(expected, result.low);
DiVector emptyVector = new DiVector(dimensions);
emptyVector.scale(123.0);
expected = new double[dimensions];
assertArrayEquals(expected, emptyVector.low);
assertArrayEquals(expected, emptyVector.high);
}
@Test
public void testGetHighLowSum() {
vector.high[2] = 3.1;
vector.low[1] = 2.2;
assertEquals(3.1 + 2.2, vector.getHighLowSum());
}
@Test
public void testRenormalize() {
DiVector testVector = new DiVector(10);
// cannot renormalize really
testVector.renormalize(100);
assertEquals(testVector.getHighLowSum(), 0);
vector.high[0] = 1.1;
vector.high[2] = 3.1;
vector.low[1] = 2.2;
assertEquals(1.1 + 3.1 + 2.2, vector.getHighLowSum());
vector.renormalize(100.0);
assertEquals(100.0, vector.getHighLowSum());
}
@Test
public void testComponentwiseTransform() {
vector.high[0] = 1.1;
vector.high[1] = 2.1;
vector.high[2] = 3.1;
vector.low[0] = 101.1;
vector.low[1] = 202.1;
vector.low[2] = 303.1;
double[] highCopy = Arrays.copyOf(vector.high, dimensions);
double[] lowCopy = Arrays.copyOf(vector.low, dimensions);
vector.componentwiseTransform(x -> 2 * x - 1);
for (int i = 0; i < dimensions; i++) {
assertEquals(2 * highCopy[i] - 1, vector.high[i], EPSILON);
assertEquals(2 * lowCopy[i] - 1, vector.low[i], EPSILON);
}
}
@Test
public void testMapper() {
DiVector left = new DiVector(dimensions);
for (int i = 0; i < dimensions; i++) {
left.low[i] = Math.random();
left.high[i] = Math.random();
}
DiVectorMapper mapper = new DiVectorMapper();
DiVector another = mapper.toModel(mapper.toState(left));
assertArrayEquals(another.high, left.high, 1e-10);
assertArrayEquals(another.low, left.low, 1e-10);
assertNull(mapper.toModel(mapper.toState(null)));
DiVectorState state = new DiVectorState();
state.setHigh(left.high);
assertNull(mapper.toModel(state));
state.setHigh(null);
state.setLow(left.low);
assertNull(mapper.toModel(state));
}
}
| 431 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/returntypes/OneSidedConvergingDoubleAccumulatorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
/**
* This test doubles as a test of the abstract OneSidedStdDevAccumulator class
*/
public class OneSidedConvergingDoubleAccumulatorTest {
private boolean highIsCritical;
private double precision;
private int minValuesAccepted;
private int maxValuesAccepted;
private OneSidedConvergingDoubleAccumulator accumulator;
@BeforeEach
public void setUp() {
highIsCritical = true;
precision = 0.1;
minValuesAccepted = 5;
maxValuesAccepted = 100;
accumulator = new OneSidedConvergingDoubleAccumulator(highIsCritical, precision, minValuesAccepted,
maxValuesAccepted);
}
@Test
public void testGetConvergingValue() {
assertEquals(1.23, accumulator.getConvergingValue(1.23));
assertEquals(-1001.1001, accumulator.getConvergingValue(-1001.1001));
}
@Test
public void testAccumulateValue() {
double sum = 0.0;
for (int i = 0; i < 10; i++) {
double value = Math.random();
accumulator.accept(value);
sum += value;
assertEquals(sum, accumulator.getAccumulatedValue());
}
}
@Test
public void testConvergenceHighIsCritical() {
accumulator.accept(0.0);
accumulator.accept(10.0);
accumulator.accept(0.0);
accumulator.accept(10.0);
// less than minValuesAccepted
assertEquals(4, accumulator.getValuesAccepted());
assertFalse(accumulator.isConverged());
double expectedSum = 20.0;
assertEquals(expectedSum, accumulator.getAccumulatedValue());
// each high value should result in a witness to convergence
// we need 1.0 / precision witnesses in order to converge
for (int i = 0; i < 1.0 / precision - 1; i++) {
accumulator.accept(0.0);
accumulator.accept(10.0);
assertEquals(6 + 2 * i, accumulator.getValuesAccepted());
assertFalse(accumulator.isConverged());
expectedSum += 10.0;
assertEquals(expectedSum, accumulator.getAccumulatedValue());
}
accumulator.accept(0.0);
assertFalse(accumulator.isConverged());
// the last required high value
accumulator.accept(10.0);
assertTrue(accumulator.isConverged());
expectedSum += 10.0;
assertEquals(expectedSum, accumulator.getAccumulatedValue());
}
@Test
public void testConvergenceLowIsCritical() {
highIsCritical = false;
accumulator = new OneSidedConvergingDoubleAccumulator(highIsCritical, precision, minValuesAccepted,
maxValuesAccepted);
accumulator.accept(0.0);
accumulator.accept(10.0);
accumulator.accept(0.0);
accumulator.accept(10.0);
// less than minValuesAccepted
assertFalse(accumulator.isConverged());
double expectedSum = 20.0;
assertEquals(expectedSum, accumulator.getAccumulatedValue());
// each high value should result in a witness to convergence
// we need 1.0 / precision witnesses in order to converge
for (int i = 0; i < 1.0 / precision - 1; i++) {
accumulator.accept(0.0);
accumulator.accept(10.0);
assertFalse(accumulator.isConverged());
expectedSum += 10.0;
assertEquals(expectedSum, accumulator.getAccumulatedValue());
}
accumulator.accept(10.0);
assertFalse(accumulator.isConverged());
// the last required low value
accumulator.accept(0.0);
assertTrue(accumulator.isConverged());
expectedSum += 10.0;
assertEquals(expectedSum, accumulator.getAccumulatedValue());
}
}
| 432 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/returntypes/SampleSummaryTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
import static com.amazon.randomcutforest.CommonUtils.toFloatArray;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Random;
import java.util.function.BiFunction;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.summarization.Summarizer;
import com.amazon.randomcutforest.testutils.NormalMixtureTestData;
import com.amazon.randomcutforest.util.Weighted;
public class SampleSummaryTest {
/**
* this class tests the return type data structure whereas
* randomcutforest.SampleSummaryTest tests tha summarization algorithms.
*/
int dataSize = 20000;
int newDimensions = 2;
Random random = new Random();
@Test
public void testConstructor() {
assertThrows(IllegalArgumentException.class, () -> new SampleSummary(Collections.emptyList(), 0.6));
float[][] points = getData(dataSize, newDimensions, random.nextInt(), Summarizer::L2distance);
ArrayList<Weighted<float[]>> weighted = new ArrayList<>();
for (float[] point : points) {
// testing 0 weight
weighted.add(new Weighted<>(point, 0.0f));
}
assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted, 0.1));
assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted, 1.3));
assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted));
weighted.get(0).weight = Float.NaN;
assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted));
weighted.get(0).weight = Float.POSITIVE_INFINITY;
assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted));
weighted.get(0).weight = -1.0f;
assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted));
weighted.get(0).weight = 1.0f;
assertDoesNotThrow(() -> new SampleSummary(weighted));
weighted.get(1).index = new float[newDimensions + 1];
assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted));
weighted.get(1).index = new float[newDimensions];
weighted.get(1).index[0] = Float.NaN;
assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted));
weighted.get(1).index[0] = Float.NEGATIVE_INFINITY;
assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted));
weighted.get(1).index[0] = -1.0f;
SampleSummary summary = new SampleSummary(weighted);
}
@Test
public void addTypicalTest() {
float[][] points = getData(dataSize, newDimensions, random.nextInt(), Summarizer::L2distance);
ArrayList<Weighted<float[]>> weighted = new ArrayList<>();
for (float[] point : points) {
// testing 0 weight
weighted.add(new Weighted<>(point, 1.0f));
}
SampleSummary summary = new SampleSummary(weighted);
assertThrows(IllegalArgumentException.class, () -> summary.addTypical(new float[1][2], new float[2]));
assertDoesNotThrow(() -> summary.addTypical(new float[0][2], new float[0]));
assertDoesNotThrow(() -> summary.addTypical(new float[2][4], new float[2]));
assertThrows(IllegalArgumentException.class,
() -> summary.addTypical(new float[][] { new float[2], new float[3] }, new float[2]));
assertThrows(IllegalArgumentException.class,
() -> summary.addTypical(new float[][] { new float[2], new float[3] }, new float[2]));
}
public float[][] getData(int dataSize, int newDimensions, int seed, BiFunction<float[], float[], Double> distance) {
double baseMu = 0.0;
double baseSigma = 1.0;
double anomalyMu = 0.0;
double anomalySigma = 1.0;
double transitionToAnomalyProbability = 0.0;
// ignoring anomaly cluster for now
double transitionToBaseProbability = 1.0;
Random prg = new Random(0);
NormalMixtureTestData generator = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,
transitionToAnomalyProbability, transitionToBaseProbability);
double[][] data = generator.generateTestData(dataSize, newDimensions, seed);
float[][] floatData = new float[dataSize][];
float[] allZero = new float[newDimensions];
float[] sigma = new float[newDimensions];
Arrays.fill(sigma, 1f);
double scale = distance.apply(allZero, sigma);
for (int i = 0; i < dataSize; i++) {
// shrink, shift at random
int nextD = prg.nextInt(newDimensions);
for (int j = 0; j < newDimensions; j++) {
data[i][j] *= 1.0 / (3.0);
// standard deviation adds up across dimension; taking square root
// and using s 3 sigma ball
if (j == nextD) {
if (prg.nextDouble() < 0.5)
data[i][j] += 2.0 * scale;
else
data[i][j] -= 2.0 * scale;
}
}
floatData[i] = toFloatArray(data[i]);
}
return floatData;
}
}
| 433 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/returntypes/InterpolationMeasureTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class InterpolationMeasureTest {
private int dimensions;
private int sampleSize;
private InterpolationMeasure output;
@BeforeEach
public void setUp() {
dimensions = 3;
sampleSize = 99;
output = new InterpolationMeasure(dimensions, sampleSize);
}
@Test
public void testNew() {
double[] zero = new double[3];
assertArrayEquals(zero, output.measure.high);
assertArrayEquals(zero, output.distances.high);
assertArrayEquals(zero, output.probMass.high);
assertArrayEquals(zero, output.measure.low);
assertArrayEquals(zero, output.distances.low);
assertArrayEquals(zero, output.probMass.low);
assertEquals(output.getSampleSize(), sampleSize);
assertThrows(IllegalArgumentException.class, () -> new InterpolationMeasure(0, sampleSize));
assertThrows(IllegalArgumentException.class,
() -> new InterpolationMeasure(1, new DiVector(1), new DiVector(2), new DiVector(3)));
assertThrows(IllegalArgumentException.class,
() -> new InterpolationMeasure(1, new DiVector(2), new DiVector(2), new DiVector(3)));
assertDoesNotThrow(() -> new InterpolationMeasure(1, new DiVector(2), new DiVector(2), new DiVector(2)));
}
@Test
public void testAddToLeft() {
InterpolationMeasure other1 = new InterpolationMeasure(dimensions, sampleSize);
InterpolationMeasure other2 = new InterpolationMeasure(dimensions, sampleSize);
assertThrows(IllegalArgumentException.class,
() -> InterpolationMeasure.addToLeft(other1, new InterpolationMeasure(dimensions + 1, sampleSize)));
for (int i = 0; i < dimensions; i++) {
output.probMass.high[i] = 2 * i;
output.probMass.low[i] = 2 * i + 1;
output.distances.high[i] = 4 * i;
output.distances.low[i] = 4 * i + 2;
output.measure.high[i] = 6 * i;
output.measure.low[i] = 6 * i + 3;
other1.probMass.high[i] = other2.probMass.high[i] = 8 * i;
other1.distances.high[i] = other2.distances.high[i] = 10 * i;
other1.measure.high[i] = other2.measure.high[i] = 12 * i;
other1.probMass.low[i] = other2.probMass.low[i] = 8 * i + 4;
other1.distances.low[i] = other2.distances.low[i] = 10 * i + 5;
other1.measure.low[i] = other2.measure.low[i] = 12 * i + 6;
}
assertArrayEquals(other1.probMass.high, other2.probMass.high);
assertArrayEquals(other1.distances.high, other2.distances.high);
assertArrayEquals(other1.measure.high, other2.measure.high);
assertArrayEquals(other1.probMass.low, other2.probMass.low);
assertArrayEquals(other1.distances.low, other2.distances.low);
assertArrayEquals(other1.measure.low, other2.measure.low);
InterpolationMeasure.addToLeft(output, other1);
for (int i = 0; i < dimensions; i++) {
assertEquals(2 * i + 8 * i, output.probMass.high[i]);
assertEquals(4 * i + 10 * i, output.distances.high[i]);
assertEquals(6 * i + 12 * i, output.measure.high[i]);
assertEquals(2 * i + 8 * i + 5, output.probMass.low[i]);
assertEquals(4 * i + 10 * i + 7, output.distances.low[i]);
assertEquals(6 * i + 12 * i + 9, output.measure.low[i]);
}
assertArrayEquals(other1.probMass.high, other2.probMass.high);
assertArrayEquals(other1.distances.high, other2.distances.high);
assertArrayEquals(other1.measure.high, other2.measure.high);
assertArrayEquals(other1.probMass.low, other2.probMass.low);
assertArrayEquals(other1.distances.low, other2.distances.low);
assertArrayEquals(other1.measure.low, other2.measure.low);
}
@Test
public void testScale() {
InterpolationMeasure copy = new InterpolationMeasure(dimensions, sampleSize);
for (int i = 0; i < dimensions; i++) {
output.probMass.high[i] = copy.probMass.high[i] = 2 * i;
output.distances.high[i] = copy.distances.high[i] = 4 * i;
output.measure.high[i] = copy.measure.high[i] = 6 * i;
output.probMass.low[i] = copy.probMass.low[i] = 2 * i + 1;
output.distances.low[i] = copy.distances.low[i] = 4 * i + 2;
output.measure.low[i] = copy.measure.low[i] = 6 * i + 3;
}
assertArrayEquals(copy.probMass.high, output.probMass.high);
assertArrayEquals(copy.distances.high, output.distances.high);
assertArrayEquals(copy.measure.high, output.measure.high);
assertArrayEquals(copy.probMass.low, output.probMass.low);
assertArrayEquals(copy.distances.low, output.distances.low);
assertArrayEquals(copy.measure.low, output.measure.low);
InterpolationMeasure result = output.scale(0.9);
assertArrayEquals(copy.probMass.low, output.probMass.low);
assertArrayEquals(copy.distances.low, output.distances.low);
assertArrayEquals(copy.measure.low, output.measure.low);
assertArrayEquals(copy.probMass.high, output.probMass.high);
assertArrayEquals(copy.distances.high, output.distances.high);
assertArrayEquals(copy.measure.high, output.measure.high);
for (int i = 0; i < dimensions; i++) {
assertEquals(2 * i * 0.9, result.probMass.high[i]);
assertEquals(4 * i * 0.9, result.distances.high[i]);
assertEquals(6 * i * 0.9, result.measure.high[i]);
assertEquals((2 * i + 1) * 0.9, result.probMass.low[i]);
assertEquals((4 * i + 2) * 0.9, result.distances.low[i]);
assertEquals((6 * i + 3) * 0.9, result.measure.low[i]);
}
}
}
| 434 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/returntypes/NeighborTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.ArrayList;
import java.util.List;
import org.junit.jupiter.api.Test;
public class NeighborTest {
@Test
public void testNew() {
float[] point = new float[] { 1.0f, -2.0f, 3.3f };
double distance = 1234.5;
List<Long> timestamps = new ArrayList<>();
timestamps.add(99999L);
timestamps.add(99L);
Neighbor neighbor = new Neighbor(point, distance, timestamps);
assertArrayEquals(point, neighbor.point);
assertEquals(distance, neighbor.distance);
assertThat(neighbor.sequenceIndexes, containsInAnyOrder(timestamps.toArray()));
}
}
| 435 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/returntypes/DensityOutputTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
import static com.amazon.randomcutforest.TestUtils.EPSILON;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class DensityOutputTest {
private int dimensions;
private int sampleSize;
private DensityOutput output;
@BeforeEach
public void setUp() {
dimensions = 3;
sampleSize = 99;
output = new DensityOutput(dimensions, sampleSize);
}
@Test
public void testNew() {
double[] zero = new double[3];
assertArrayEquals(zero, output.measure.high);
assertArrayEquals(zero, output.distances.high);
assertArrayEquals(zero, output.probMass.high);
assertArrayEquals(zero, output.measure.low);
assertArrayEquals(zero, output.distances.low);
assertArrayEquals(zero, output.probMass.low);
}
@Test
public void testAddToLeft() {
DensityOutput other1 = new DensityOutput(dimensions, sampleSize);
DensityOutput other2 = new DensityOutput(dimensions, sampleSize);
for (int i = 0; i < dimensions; i++) {
output.probMass.high[i] = 2 * i;
output.probMass.low[i] = 2 * i + 1;
output.distances.high[i] = 4 * i;
output.distances.low[i] = 4 * i + 2;
output.measure.high[i] = 6 * i;
output.measure.low[i] = 6 * i + 3;
other1.probMass.high[i] = other2.probMass.high[i] = 8 * i;
other1.distances.high[i] = other2.distances.high[i] = 10 * i;
other1.measure.high[i] = other2.measure.high[i] = 12 * i;
other1.probMass.low[i] = other2.probMass.low[i] = 8 * i + 4;
other1.distances.low[i] = other2.distances.low[i] = 10 * i + 5;
other1.measure.low[i] = other2.measure.low[i] = 12 * i + 6;
}
assertArrayEquals(other1.probMass.high, other2.probMass.high);
assertArrayEquals(other1.distances.high, other2.distances.high);
assertArrayEquals(other1.measure.high, other2.measure.high);
assertArrayEquals(other1.probMass.low, other2.probMass.low);
assertArrayEquals(other1.distances.low, other2.distances.low);
assertArrayEquals(other1.measure.low, other2.measure.low);
DensityOutput.addToLeft(output, other1);
for (int i = 0; i < dimensions; i++) {
assertEquals(2 * i + 8 * i, output.probMass.high[i]);
assertEquals(4 * i + 10 * i, output.distances.high[i]);
assertEquals(6 * i + 12 * i, output.measure.high[i]);
assertEquals(2 * i + 8 * i + 5, output.probMass.low[i]);
assertEquals(4 * i + 10 * i + 7, output.distances.low[i]);
assertEquals(6 * i + 12 * i + 9, output.measure.low[i]);
}
assertArrayEquals(other1.probMass.high, other2.probMass.high);
assertArrayEquals(other1.distances.high, other2.distances.high);
assertArrayEquals(other1.measure.high, other2.measure.high);
assertArrayEquals(other1.probMass.low, other2.probMass.low);
assertArrayEquals(other1.distances.low, other2.distances.low);
assertArrayEquals(other1.measure.low, other2.measure.low);
}
@Test
public void testGetDensity() {
assertTrue(output.getDensity(0.5, 3) == 0);
for (int i = 0; i < dimensions; i++) {
output.probMass.high[i] = 2 * i;
output.distances.high[i] = 4 * i;
output.measure.high[i] = 6 * i;
output.probMass.low[i] = 2 * i;
output.distances.low[i] = 4 * i + 2;
output.measure.low[i] = 6 * i + 3;
}
double q = 0.5;
double density = output.getDensity(q, 3);
DiVector densityVector = output.getDirectionalDensity(q, 3);
double sumOfPoints = output.measure.getHighLowSum() / sampleSize;
double sumOfFactors = 0.0;
for (int i = 0; i < dimensions; i++) {
double mass = output.probMass.getHighLowSum(i);
double distance = output.distances.getHighLowSum(i);
double t = (mass != 0) ? distance / mass : 0;
t = Math.pow(t, dimensions) * mass;
sumOfFactors += t;
}
assertEquals(sumOfPoints / (q * sumOfPoints + sumOfFactors), density, EPSILON);
// for contrib, do not scale sum of points by sample size
sumOfPoints = output.measure.getHighLowSum();
for (int i = 0; i < dimensions; i++) {
assertEquals(output.measure.high[i] * density / sumOfPoints, densityVector.high[i], EPSILON);
assertEquals(output.measure.low[i] * density / sumOfPoints, densityVector.low[i], EPSILON);
}
assertEquals(output.getDensity(DensityOutput.DEFAULT_SUM_OF_POINTS_SCALING_FACTOR, dimensions),
output.getDensity());
densityVector = output.getDirectionalDensity(DensityOutput.DEFAULT_SUM_OF_POINTS_SCALING_FACTOR, dimensions);
DiVector defaultDensityVector = output.getDirectionalDensity();
for (int i = 0; i < dimensions; i++) {
assertEquals(densityVector.high[i], defaultDensityVector.high[i], EPSILON);
assertEquals(densityVector.low[i], defaultDensityVector.low[i], EPSILON);
}
}
}
| 436 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/returntypes/OneSidedConvergingDiVectorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.returntypes;
import static com.amazon.randomcutforest.TestUtils.EPSILON;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class OneSidedConvergingDiVectorTest {
private boolean highIsCritical;
private double precision;
private int minValuesAccepted;
private int maxValuesAccepted;
private int dimensions;
private OneSidedConvergingDiVectorAccumulator accumulator;
@BeforeEach
public void setUp() {
highIsCritical = true;
precision = 0.1;
minValuesAccepted = 5;
maxValuesAccepted = 100;
dimensions = 2;
accumulator = new OneSidedConvergingDiVectorAccumulator(dimensions, highIsCritical, precision,
minValuesAccepted, maxValuesAccepted);
}
@Test
public void testGetConvergingValue() {
DiVector vector = new DiVector(dimensions);
vector.high[0] = 1.1;
vector.low[1] = 2.3;
vector.high[1] = 9.6;
assertEquals(1.1 + 2.3 + 9.6, accumulator.getConvergingValue(vector), EPSILON);
}
@Test
public void testAccumulateValue() {
assertEquals(accumulator.getWitnesses(), 0);
assertEquals(accumulator.getMean(), 0);
assertEquals(accumulator.getDeviation(), 0);
DiVector vector1 = new DiVector(dimensions);
vector1.high[0] = 1.1;
vector1.low[1] = 2.3;
vector1.high[1] = 9.6;
accumulator.accept(vector1);
DiVector result = accumulator.getAccumulatedValue();
assertArrayEquals(vector1.high, result.high, EPSILON);
assertArrayEquals(vector1.low, result.low, EPSILON);
DiVector vector2 = new DiVector(dimensions);
vector2.high[0] = 1.1;
vector2.low[1] = 2.3;
vector2.high[1] = 9.6;
accumulator.accept(vector2);
result = accumulator.getAccumulatedValue();
DiVector.addToLeft(vector1, vector2);
assertArrayEquals(vector1.high, result.high, EPSILON);
assertArrayEquals(vector1.low, result.low, EPSILON);
for (int i = 0; i < 5; i++) {
accumulator.accept(vector2);
}
assertEquals(accumulator.getWitnesses(), 3);
assertEquals(accumulator.getDeviation(), 0, 1e-6f);
assertEquals(accumulator.getMean(), 13, 1e-6f);
}
}
| 437 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/interpolation/SimpleInterpolationVisitorTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.interpolation;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import org.junit.jupiter.api.Test;
import com.amazon.randomcutforest.returntypes.InterpolationMeasure;
import com.amazon.randomcutforest.tree.BoundingBox;
import com.amazon.randomcutforest.tree.INodeView;
import com.amazon.randomcutforest.tree.NodeView;
public class SimpleInterpolationVisitorTest {
private static final int SEED = 1002;
@Test
public void testNew() {
float[] point = { 1.0f, 2.0f };
int sampleSize = 9;
SimpleInterpolationVisitor visitor = new SimpleInterpolationVisitor(point, sampleSize, 1, false);
assertFalse(visitor.pointInsideBox);
assertEquals(2, visitor.coordInsideBox.length);
for (int i = 0; i < point.length; i++) {
assertFalse(visitor.coordInsideBox[i]);
}
InterpolationMeasure output = visitor.getResult();
double[] zero = new double[point.length];
assertArrayEquals(zero, output.measure.high);
assertArrayEquals(zero, output.distances.high);
assertArrayEquals(zero, output.probMass.high);
assertArrayEquals(zero, output.measure.low);
assertArrayEquals(zero, output.distances.low);
assertArrayEquals(zero, output.probMass.low);
}
@Test
public void testAcceptLeafEquals() {
float[] point = { 1.0f, 2.0f, 3.0f };
INodeView leafNode = mock(NodeView.class);
when(leafNode.getLeafPoint()).thenReturn(point);
when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));
int leafDepth = 100;
int leafMass = 10;
when(leafNode.getMass()).thenReturn(leafMass);
int sampleSize = 21;
SimpleInterpolationVisitor visitor = new SimpleInterpolationVisitor(point, sampleSize, 1, false);
visitor.acceptLeaf(leafNode, leafDepth);
InterpolationMeasure result = visitor.getResult();
double[] expected = new double[point.length];
Arrays.fill(expected, 0.5 * (1 + leafMass) / point.length);
assertArrayEquals(expected, result.measure.high);
assertArrayEquals(expected, result.measure.low);
Arrays.fill(expected, 0.5 / point.length);
assertArrayEquals(expected, result.probMass.high);
assertArrayEquals(expected, result.probMass.low);
Arrays.fill(expected, 0.0);
assertArrayEquals(expected, result.distances.high);
assertArrayEquals(expected, result.distances.low);
}
@Test
public void testAcceptLeafNotEquals() {
float[] point = { 1.0f, 9.0f, 4.0f };
float[] anotherPoint = { 4.0f, 5.0f, 6.0f };
INodeView leafNode = mock(NodeView.class);
when(leafNode.getLeafPoint()).thenReturn(anotherPoint);
when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(anotherPoint, anotherPoint));
when(leafNode.getMass()).thenReturn(4);
int leafDepth = 100;
int sampleSize = 99;
SimpleInterpolationVisitor visitor = new SimpleInterpolationVisitor(point, sampleSize, 1, false);
visitor.acceptLeaf(leafNode, leafDepth);
InterpolationMeasure result = visitor.getResult();
double expectedSumOfNewRange = 3.0 + 4.0 + 2.0;
double[] expectedDifferenceInRangeVector = { 0.0, 3.0, 4.0, 0.0, 0.0, 2.0 };
double[] expectedProbVector = Arrays.stream(expectedDifferenceInRangeVector).map(x -> x / expectedSumOfNewRange)
.toArray();
double[] expectedmeasure = Arrays.stream(expectedProbVector).toArray();
double[] expectedDistances = new double[2 * point.length];
for (int i = 0; i < 2 * point.length; i++) {
expectedDistances[i] = expectedProbVector[i] * expectedDifferenceInRangeVector[i];
}
for (int i = 0; i < 2 * point.length; i++) {
expectedmeasure[i] = expectedmeasure[i] * 5;
}
for (int i = 0; i < point.length; i++) {
assertEquals(expectedProbVector[2 * i], result.probMass.high[i]);
assertEquals(expectedProbVector[2 * i + 1], result.probMass.low[i]);
assertEquals(expectedmeasure[2 * i], result.measure.high[i]);
assertEquals(expectedmeasure[2 * i + 1], result.measure.low[i]);
assertEquals(expectedDistances[2 * i], result.distances.high[i]);
assertEquals(expectedDistances[2 * i + 1], result.distances.low[i]);
}
}
@Test
public void testAcceptEqualsLeafPoint() {
float[] pointToScore = { 0.0f, 0.0f };
int sampleSize = 50;
SimpleInterpolationVisitor visitor = new SimpleInterpolationVisitor(pointToScore, sampleSize, 1, false);
float[] point = Arrays.copyOf(pointToScore, pointToScore.length);
INodeView node = mock(NodeView.class);
when(node.getLeafPoint()).thenReturn(point);
when(node.getBoundingBox()).thenReturn(new BoundingBox(point, point));
when(node.getMass()).thenReturn(1);
int depth = 2;
visitor.acceptLeaf(node, depth);
InterpolationMeasure result = visitor.getResult();
double[] expected = new double[point.length];
Arrays.fill(expected, 0.5 * (1 + node.getMass()) / point.length);
assertArrayEquals(expected, result.measure.high);
assertArrayEquals(expected, result.measure.low);
Arrays.fill(expected, 0.5 / point.length);
assertArrayEquals(expected, result.probMass.high);
assertArrayEquals(expected, result.probMass.low);
Arrays.fill(expected, 0.0);
assertArrayEquals(expected, result.distances.high);
assertArrayEquals(expected, result.distances.low);
depth--;
float[] siblingPoint = { 1.0f, -2.0f };
INodeView sibling = mock(NodeView.class);
int siblingMass = 2;
when(sibling.getMass()).thenReturn(siblingMass);
INodeView parent = mock(NodeView.class);
when(parent.getMass()).thenReturn(1 + siblingMass);
BoundingBox boundingBox = new BoundingBox(point, siblingPoint);
when(parent.getBoundingBox()).thenReturn(boundingBox);
when(parent.getSiblingBoundingBox(any())).thenReturn(new BoundingBox(siblingPoint));
visitor.accept(parent, depth);
result = visitor.getResult();
// compute using shadow box (sibling leaf node at {1.0, -2.0} and parent
// bounding box
double[] directionalDistance = { 0.0, 1.0, 2.0, 0.0 };
double[] differenceInRange = { 0.0, 1.0, 2.0, 0.0 };
double sumOfNewRange = 1.0 + 2.0;
double[] probVector = Arrays.stream(differenceInRange).map(x -> x / sumOfNewRange).toArray();
expected = new double[2 * pointToScore.length];
for (int i = 0; i < expected.length; i++) {
expected[i] = probVector[i] * (1 + node.getMass() + parent.getMass());
}
for (int i = 0; i < pointToScore.length; i++) {
assertEquals(expected[2 * i], result.measure.high[i]);
assertEquals(expected[2 * i + 1], result.measure.low[i]);
}
for (int i = 0; i < expected.length; i++) {
expected[i] = probVector[i];
}
for (int i = 0; i < pointToScore.length; i++) {
assertEquals(expected[2 * i], result.probMass.high[i]);
assertEquals(expected[2 * i + 1], result.probMass.low[i]);
}
for (int i = 0; i < expected.length; i++) {
expected[i] = probVector[i] * directionalDistance[i];
}
for (int i = 0; i < pointToScore.length; i++) {
assertEquals(expected[2 * i], result.distances.high[i]);
assertEquals(expected[2 * i + 1], result.distances.low[i]);
}
// reset to probmass
for (int i = 0; i < expected.length; i++) {
expected[i] = probVector[i];
}
// testing shawbox setup for grandparent
INodeView uncle = mock(NodeView.class);
int uncleMass = 2;
when(sibling.getMass()).thenReturn(uncleMass);
INodeView grandParent = mock(NodeView.class);
when(grandParent.getMass()).thenReturn(1 + siblingMass + uncleMass);
BoundingBox grandBox = boundingBox.getMergedBox(new float[] { 2.0f, 2.0f });
when(grandParent.getBoundingBox()).thenReturn(grandBox);
when(grandParent.getSiblingBoundingBox(any())).thenReturn(new BoundingBox(new float[] { 2.0f, 2.0f }));
visitor.accept(grandParent, depth - 1);
result = visitor.getResult();
directionalDistance = new double[] { 0.0, 2.0, 0.0, 0.0 };
differenceInRange = new double[] { 0.0, 1.0, 0.0, 0.0 };
double newSumOfNewRange = 1.0 + 2.0 + 1.0 + 2.0;
probVector = Arrays.stream(differenceInRange).map(x -> x / newSumOfNewRange).toArray();
double prob = Arrays.stream(probVector).sum();
for (int i = 0; i < expected.length; i++) {
expected[i] = probVector[i] + (1 - prob) * expected[i];
}
for (int i = 0; i < pointToScore.length; i++) {
System.out.println(i);
assertEquals(expected[2 * i], result.probMass.high[i]);
assertEquals(expected[2 * i + 1], result.probMass.low[i]);
}
}
@Test
public void testAccept() {
float[] pointToScore = { 0.0f, 0.0f };
int sampleSize = 50;
SimpleInterpolationVisitor visitor = new SimpleInterpolationVisitor(pointToScore, sampleSize, 1, false);
INodeView leafNode = mock(NodeView.class);
float[] point = new float[] { 1.0f, -2.0f };
when(leafNode.getLeafPoint()).thenReturn(point);
when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));
int leafMass = 3;
when(leafNode.getMass()).thenReturn(leafMass);
int depth = 4;
visitor.acceptLeaf(leafNode, depth);
InterpolationMeasure result = visitor.getResult();
double expectedSumOfNewRange = 1.0 + 2.0;
double[] expectedDifferenceInRangeVector = { 0.0, 1.0, 2.0, 0.0 };
double[] expectedProbVector = Arrays.stream(expectedDifferenceInRangeVector).map(x -> x / expectedSumOfNewRange)
.toArray();
double[] expectedNumPts = Arrays.stream(expectedProbVector).toArray();
double[] expectedDistances = new double[2 * pointToScore.length];
for (int i = 0; i < 2 * pointToScore.length; i++) {
expectedDistances[i] = expectedProbVector[i] * expectedDifferenceInRangeVector[i];
}
for (int i = 0; i < 2 * pointToScore.length; i++) {
expectedNumPts[i] = expectedNumPts[i] * 4;
}
for (int i = 0; i < pointToScore.length; i++) {
assertEquals(expectedProbVector[2 * i], result.probMass.high[i]);
assertEquals(expectedProbVector[2 * i + 1], result.probMass.low[i]);
assertEquals(expectedNumPts[2 * i], result.measure.high[i]);
assertEquals(expectedNumPts[2 * i + 1], result.measure.low[i]);
assertEquals(expectedDistances[2 * i], result.distances.high[i]);
assertEquals(expectedDistances[2 * i + 1], result.distances.low[i]);
}
// parent does not contain pointToScore
depth--;
INodeView sibling = mock(NodeView.class);
int siblingMass = 2;
when(sibling.getMass()).thenReturn(siblingMass);
INodeView parent = mock(NodeView.class);
int parentMass = leafMass + siblingMass;
when(parent.getMass()).thenReturn(parentMass);
when(parent.getBoundingBox()).thenReturn(new BoundingBox(point, new float[] { 2.0f, -0.5f }));
visitor.accept(parent, depth);
result = visitor.getResult();
double expectedSumOfNewRange2 = 2.0 + 2.0;
double expectedProbOfCut2 = (1.0 + 0.5) / expectedSumOfNewRange2;
double[] expectedDifferenceInRangeVector2 = { 0.0, 1.0, 0.5, 0.0 };
double[] expectedDirectionalDistanceVector2 = { 0.0, 2.0, 2.0, 0.0 };
for (int i = 0; i < 2 * pointToScore.length; i++) {
double prob = expectedDifferenceInRangeVector2[i] / expectedSumOfNewRange2;
expectedProbVector[i] = prob + (1 - expectedProbOfCut2) * expectedProbVector[i];
expectedNumPts[i] = prob * (1 + parent.getMass()) + (1 - expectedProbOfCut2) * expectedNumPts[i];
expectedDistances[i] = prob * expectedDirectionalDistanceVector2[i]
+ (1 - expectedProbOfCut2) * expectedDistances[i];
}
for (int i = 0; i < pointToScore.length; i++) {
assertEquals(expectedProbVector[2 * i], result.probMass.high[i]);
assertEquals(expectedProbVector[2 * i + 1], result.probMass.low[i]);
assertEquals(expectedNumPts[2 * i], result.measure.high[i]);
assertEquals(expectedNumPts[2 * i + 1], result.measure.low[i]);
assertEquals(expectedDistances[2 * i], result.distances.high[i]);
assertEquals(expectedDistances[2 * i + 1], result.distances.low[i]);
}
// grandparent contains pointToScore
assertFalse(visitor.pointInsideBox);
depth--;
}
}
| 438 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/test/java/com/amazon/randomcutforest/store/PointStoreTest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.store;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Arrays;
import java.util.Random;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class PointStoreTest {
private int dimensions;
private int capacity;
private PointStore pointStore;
@BeforeEach
public void setUp() {
dimensions = 2;
capacity = 4;
pointStore = new PointStoreSmall(dimensions, capacity);
}
@Test
public void testNew() {
assertEquals(dimensions, pointStore.getDimensions());
assertEquals(capacity, pointStore.getCapacity());
assertEquals(0, pointStore.size());
for (int i = 0; i < pointStore.getIndexCapacity(); i++) {
assertEquals(0, pointStore.getRefCount(i));
}
}
@Test
public void testAdd() {
float[] point1 = { 1.2f, -3.4f };
int offset1 = pointStore.add(point1, 1);
assertTrue(offset1 >= 0 && offset1 < capacity);
assertEquals(1, pointStore.getRefCount(offset1));
assertEquals(1, pointStore.size());
float[] retrievedPoint1 = pointStore.getNumericVector(offset1);
assertNotSame(point1, retrievedPoint1);
assertArrayEquals(point1, retrievedPoint1);
float[] point2 = { 111.2f, -333.4f };
int offset2 = pointStore.add(point2, 2);
assertTrue(offset2 >= 0 && offset2 < capacity);
assertEquals(1, pointStore.getRefCount(offset2));
assertEquals(2, pointStore.size());
assertNotEquals(offset1, offset2);
float[] retrievedPoint2 = pointStore.getNumericVector(offset2);
assertNotSame(point2, retrievedPoint2);
assertArrayEquals(point2, retrievedPoint2);
// check that adding a second point didn't change the first stored point's value
retrievedPoint1 = pointStore.getNumericVector(offset1);
assertNotSame(point1, retrievedPoint1);
assertArrayEquals(point1, retrievedPoint1);
}
@Test
public void testAddInvalid() {
assertThrows(IllegalArgumentException.class, () -> pointStore.add(new float[] { 1.1f, -2.2f, 3.0f }, 0));
for (int i = 0; i < capacity; i++) {
float[] point = new float[dimensions];
point[0] = (float) Math.random();
point[1] = (float) Math.random();
pointStore.add(point, i + 2);
}
// point store is full
assertThrows(IllegalStateException.class, () -> pointStore.add(new float[] { 1.1f, -2.2f }, 0));
}
@Test
public void testGetInvalid() {
assertThrows(IllegalArgumentException.class, () -> pointStore.getNumericVector(-1));
assertThrows(IllegalArgumentException.class, () -> pointStore.getNumericVector(capacity));
}
@Test
public void testIncrementRefCount() {
float[] point = { 1.2f, -3.4f };
int offset = pointStore.add(point, 0);
assertEquals(1, pointStore.getRefCount(offset));
pointStore.incrementRefCount(offset);
assertEquals(2, pointStore.getRefCount(offset));
}
@Test
public void testIncrementRefCountInvalid() {
assertThrows(IllegalArgumentException.class, () -> pointStore.incrementRefCount(-1));
assertThrows(IllegalArgumentException.class, () -> pointStore.incrementRefCount(0));
}
@Test
public void testDecrementRefCount() {
float[] point = { 1.2f, -3.4f };
int offset = pointStore.add(point, 0);
pointStore.incrementRefCount(offset);
assertEquals(2, pointStore.getRefCount(offset));
assertEquals(1, pointStore.size());
pointStore.decrementRefCount(offset);
assertEquals(1, pointStore.getRefCount(offset));
assertEquals(1, pointStore.size());
pointStore.decrementRefCount(offset);
assertEquals(0, pointStore.getRefCount(offset));
assertEquals(0, pointStore.size());
}
@Test
public void testDecrementRefCountInvalid() {
assertThrows(IllegalArgumentException.class, () -> pointStore.decrementRefCount(-1));
assertThrows(IllegalArgumentException.class, () -> pointStore.decrementRefCount(0));
}
@Test
public void testPointEquals() {
float[] point = { 1.2f, -3.4f };
int offset = pointStore.add(point, 0);
assertArrayEquals(pointStore.getNumericVector(offset), point);
assertNotEquals(pointStore.getNumericVector(offset), new float[] { 5.6f, -7.8f });
}
@Test
public void testPointEqualsInvalid() {
float[] point = { 1.2f, -3.4f };
assertThrows(IllegalArgumentException.class, () -> pointStore.getNumericVector(-1));
assertThrows(IllegalArgumentException.class, () -> pointStore.getNumericVector(0));
}
@Test
public void internalshinglingTestNoRotation() {
int shinglesize = 10;
PointStore store = new PointStore.Builder().capacity(20 * shinglesize).dimensions(shinglesize)
.shingleSize(shinglesize).indexCapacity(shinglesize).internalShinglingEnabled(true)
.currentStoreCapacity(1).build();
assertFalse(store.isInternalRotationEnabled());
Random random = new Random(0);
float[] shingle = new float[shinglesize];
for (int i = 0; i < 10 * shinglesize - 3; i++) {
shingle[(i + 3) % shinglesize] = (float) random.nextDouble();
store.add(new float[] { shingle[(i + 3) % shinglesize] }, i);
}
assertArrayEquals(store.getNumericVector(9 * shinglesize - 3), shingle, (float) 1e-6);
assertArrayEquals(store.getInternalShingle(), shingle, (float) 1e-6);
assertArrayEquals(store.transformIndices(new int[] { 0 }), new int[] { shinglesize - 1 });
assertThrows(IllegalArgumentException.class, () -> store.transformIndices(new int[] { 1 }));
assertThrows(IllegalArgumentException.class, () -> store.transformIndices(new int[] { 0, 0 }));
assertArrayEquals(store.transformToShingledPoint(new float[] { 0.0f }),
store.transformToShingledPoint(new float[] { -0.0f }), (float) 1e-6);
assertThrows(IllegalArgumentException.class, () -> store.add(new float[] { 0, 0 }, 0));
}
@Test
public void internalshinglingTestWithRotation() {
int shinglesize = 10;
PointStore store = new PointStore.Builder().capacity(20 * shinglesize).dimensions(shinglesize)
.shingleSize(shinglesize).indexCapacity(shinglesize).internalShinglingEnabled(true)
.internalRotationEnabled(true).currentStoreCapacity(1).build();
assertTrue(store.isInternalRotationEnabled());
Random random = new Random(0);
float[] shingle = new float[shinglesize];
float[] temp = null;
for (int i = 0; i < 10 * shinglesize + 5; i++) {
shingle[i % shinglesize] = (float) random.nextDouble();
temp = store.transformToShingledPoint(new float[] { shingle[i % shinglesize] });
store.add(new float[] { shingle[i % shinglesize] }, i);
}
assertEquals(store.getNextSequenceIndex(), 10 * shinglesize + 5);
assertArrayEquals(temp, shingle, (float) 1e-6);
assertArrayEquals(store.getNumericVector(9 * shinglesize + 5), shingle, (float) 1e-6);
assertNotEquals(store.internalShingle, store.getInternalShingle());
assertArrayEquals(store.getNumericVector(9 * shinglesize + 5), shingle);
assertNotEquals(store.getNumericVector(9 * shinglesize + 4), shingle);
assertArrayEquals(store.getInternalShingle(), shingle, (float) 1e-6);
assertArrayEquals(store.transformIndices(new int[] { 0 }), new int[] { 5 });
assertThrows(IllegalArgumentException.class, () -> store.transformIndices(new int[] { 1 }));
assertEquals(store.transformToShingledPoint(new float[] { 1, 2 }).length, 2);
assertArrayEquals(store.transformToShingledPoint(new float[] { 0.0f }),
store.transformToShingledPoint(new float[] { -0.0f }), (float) 1e-6);
}
@Test
public void checkRotationAndCompact() {
int shinglesize = 4;
PointStore store = new PointStore.Builder().capacity(2 * shinglesize).dimensions(shinglesize)
.shingleSize(shinglesize).indexCapacity(shinglesize).internalShinglingEnabled(true)
.internalRotationEnabled(true).currentStoreCapacity(1).build();
for (int i = 0; i < 2 * shinglesize; i++) {
store.add(new float[] { -i - 1 }, i);
}
for (int i = 0; i < 2 * shinglesize - shinglesize + 1; i++) {
if (i != shinglesize - 1) {
store.decrementRefCount(i);
}
}
assertThrows(IllegalArgumentException.class, () -> store.getNumericVector(0));
float[] test = new float[shinglesize];
for (int i = 0; i < shinglesize; i++) {
test[i] = -(i + shinglesize + 1);
}
test[shinglesize - 1] = -shinglesize;
assertArrayEquals(store.getNumericVector(shinglesize - 1), test, 1e-6f);
store.compact();
for (int i = 2 * shinglesize; i < 4 * shinglesize - 1; i++) {
store.add(new float[] { -i - 1 }, i);
}
assertThrows(IllegalStateException.class, () -> store.add(new float[] { -4 * shinglesize }, 0));
for (int i = 0; i < 2 * shinglesize; i++) {
if (i != shinglesize - 1) {
store.decrementRefCount(i);
}
}
assertEquals(store.toString(shinglesize - 1), Arrays.toString(test));
for (int i = 4 * shinglesize; i < 6 * shinglesize - 1; i++) {
store.add(new float[] { -i - 1 }, i);
}
assertThrows(IllegalStateException.class,
() -> store.add(new float[] { -6 * shinglesize }, 6 * shinglesize - 1));
store.decrementRefCount(shinglesize - 1);
store.add(new float[] { -6 * shinglesize }, 6 * shinglesize - 1);
store.decrementRefCount(shinglesize);
store.compact();
}
@Test
void CompactionTest() {
int shinglesize = 2;
PointStore store = new PointStore.Builder().capacity(6).dimensions(shinglesize).shingleSize(shinglesize)
.indexCapacity(6).directLocationEnabled(false).internalShinglingEnabled(true).build();
store.add(new float[] { 0 }, 0L);
for (int i = 0; i < 5; i++) {
store.add(new float[] { i + 1 }, 0L);
}
int finalIndex = store.add(new float[] { 4 + 2 }, 0L);
assertArrayEquals(store.getNumericVector(finalIndex), new float[] { 5, 6 });
store.decrementRefCount(1);
store.decrementRefCount(2);
int index = store.add(new float[] { 7 }, 0L);
assertArrayEquals(store.getNumericVector(index), new float[] { 6, 7 });
store.decrementRefCount(index);
assertTrue(store.size() < store.capacity);
index = store.add(new float[] { 8 }, 0L);
assertArrayEquals(store.getNumericVector(index), new float[] { 7, 8 });
}
}
| 439 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/VisitorFactory.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest;
import java.util.function.BiFunction;
import com.amazon.randomcutforest.tree.ITree;
/**
* This is the interface for a visitor factory the factory corresponds to
* mapping a (tree,point) pair to a visitor and a mapping for the inverse result
*/
public class VisitorFactory<R> implements IVisitorFactory<R> {
private final BiFunction<ITree<?, ?>, float[], Visitor<R>> newVisitor;
private final BiFunction<ITree<?, ?>, R, R> liftResult;
public VisitorFactory(BiFunction<ITree<?, ?>, float[], Visitor<R>> newVisitor,
BiFunction<ITree<?, ?>, R, R> liftResult) {
this.newVisitor = newVisitor;
this.liftResult = liftResult;
}
public VisitorFactory(BiFunction<ITree<?, ?>, float[], Visitor<R>> newVisitor) {
this(newVisitor, (tree, x) -> x);
}
@Override
public Visitor<R> newVisitor(ITree<?, ?> tree, float[] point) {
return newVisitor.apply(tree, point);
}
@Override
public R liftResult(ITree<?, ?> tree, R result) {
return liftResult.apply(tree, result);
}
}
| 440 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/RandomCutForest.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.checkNotNull;
import static com.amazon.randomcutforest.CommonUtils.toDoubleArray;
import static com.amazon.randomcutforest.CommonUtils.toFloatArray;
import static com.amazon.randomcutforest.summarization.Summarizer.DEFAULT_SEPARATION_RATIO_FOR_MERGE;
import static java.lang.Math.max;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.Random;
import java.util.function.BiFunction;
import java.util.function.BinaryOperator;
import java.util.function.Function;
import java.util.stream.Collector;
import com.amazon.randomcutforest.anomalydetection.AnomalyAttributionVisitor;
import com.amazon.randomcutforest.anomalydetection.AnomalyScoreVisitor;
import com.amazon.randomcutforest.anomalydetection.DynamicAttributionVisitor;
import com.amazon.randomcutforest.anomalydetection.DynamicScoreVisitor;
import com.amazon.randomcutforest.anomalydetection.SimulatedTransductiveScalarScoreVisitor;
import com.amazon.randomcutforest.config.Config;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.executor.AbstractForestTraversalExecutor;
import com.amazon.randomcutforest.executor.AbstractForestUpdateExecutor;
import com.amazon.randomcutforest.executor.IStateCoordinator;
import com.amazon.randomcutforest.executor.ParallelForestTraversalExecutor;
import com.amazon.randomcutforest.executor.ParallelForestUpdateExecutor;
import com.amazon.randomcutforest.executor.PointStoreCoordinator;
import com.amazon.randomcutforest.executor.SamplerPlusTree;
import com.amazon.randomcutforest.executor.SequentialForestTraversalExecutor;
import com.amazon.randomcutforest.executor.SequentialForestUpdateExecutor;
import com.amazon.randomcutforest.imputation.ConditionalSampleSummarizer;
import com.amazon.randomcutforest.imputation.ImputeVisitor;
import com.amazon.randomcutforest.inspect.NearNeighborVisitor;
import com.amazon.randomcutforest.interpolation.SimpleInterpolationVisitor;
import com.amazon.randomcutforest.returntypes.ConditionalTreeSample;
import com.amazon.randomcutforest.returntypes.ConvergingAccumulator;
import com.amazon.randomcutforest.returntypes.DensityOutput;
import com.amazon.randomcutforest.returntypes.DiVector;
import com.amazon.randomcutforest.returntypes.InterpolationMeasure;
import com.amazon.randomcutforest.returntypes.Neighbor;
import com.amazon.randomcutforest.returntypes.OneSidedConvergingDiVectorAccumulator;
import com.amazon.randomcutforest.returntypes.OneSidedConvergingDoubleAccumulator;
import com.amazon.randomcutforest.returntypes.RangeVector;
import com.amazon.randomcutforest.returntypes.SampleSummary;
import com.amazon.randomcutforest.sampler.CompactSampler;
import com.amazon.randomcutforest.sampler.IStreamSampler;
import com.amazon.randomcutforest.store.IPointStore;
import com.amazon.randomcutforest.store.PointStore;
import com.amazon.randomcutforest.summarization.ICluster;
import com.amazon.randomcutforest.summarization.Summarizer;
import com.amazon.randomcutforest.tree.IBoundingBoxView;
import com.amazon.randomcutforest.tree.ITree;
import com.amazon.randomcutforest.tree.RandomCutTree;
import com.amazon.randomcutforest.util.ShingleBuilder;
/**
* The RandomCutForest class is the interface to the algorithms in this package,
* and includes methods for anomaly detection, anomaly detection with
* attribution, density estimation, imputation, and forecasting. A Random Cut
* Forest is a collection of Random Cut Trees and stream samplers. When an
* update call is made to a Random Cut Forest, each sampler is independently
* updated with the submitted (and if the point is accepted by the sampler, then
* the corresponding Random Cut Tree is also updated. Similarly, when an
* algorithm method is called, the Random Cut Forest proxies to the trees which
* implement the actual scoring logic. The Random Cut Forest then combines
* partial results into a final results.
*/
public class RandomCutForest {
/**
* Default sample size. This is the number of points retained by the stream
* sampler.
*/
public static final int DEFAULT_SAMPLE_SIZE = 256;
/**
* Default fraction used to compute the amount of points required by stream
* samplers before results are returned.
*/
public static final double DEFAULT_OUTPUT_AFTER_FRACTION = 0.25;
/**
* If the user doesn't specify an explicit time decay value, then we set it to
* the inverse of this coefficient times sample size.
*/
public static final double DEFAULT_SAMPLE_SIZE_COEFFICIENT_IN_TIME_DECAY = 10.0;
/**
* Default number of trees to use in the forest.
*/
public static final int DEFAULT_NUMBER_OF_TREES = 50;
/**
* By default, trees will not store sequence indexes.
*/
public static final boolean DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED = false;
/**
* By default, trees will accept every point until full.
*/
public static final double DEFAULT_INITIAL_ACCEPT_FRACTION = 1.0;
/**
* By default, the collection of points stored in the forest will increase from
* a small size, as needed to maximum capacity
*/
public static final boolean DEFAULT_DYNAMIC_RESIZING_ENABLED = true;
/**
* By default, shingling will be external
*/
public static final boolean DEFAULT_INTERNAL_SHINGLING_ENABLED = false;
/**
* By default, shingles will be a sliding window and not a cyclic buffer
*/
public static final boolean DEFAULT_INTERNAL_ROTATION_ENABLED = false;
/**
* By default, point stores will favor speed of size for larger shingle sizes
*/
public static final boolean DEFAULT_DIRECT_LOCATION_MAP = false;
/**
* Default floating-point precision for internal data structures.
*/
public static final Precision DEFAULT_PRECISION = Precision.FLOAT_64;
/**
* fraction of bounding boxes maintained by each tree
*/
public static final double DEFAULT_BOUNDING_BOX_CACHE_FRACTION = 1.0;
/**
* By default, nodes will not store center of mass.
*/
public static final boolean DEFAULT_CENTER_OF_MASS_ENABLED = false;
/**
* By default RCF is unaware of shingle size
*/
public static final int DEFAULT_SHINGLE_SIZE = 1;
/**
* Parallel execution is enabled by default.
*/
public static final boolean DEFAULT_PARALLEL_EXECUTION_ENABLED = false;
public static final boolean DEFAULT_APPROXIMATE_ANOMALY_SCORE_HIGH_IS_CRITICAL = true;
public static final double DEFAULT_APPROXIMATE_DYNAMIC_SCORE_PRECISION = 0.1;
public static final int DEFAULT_APPROXIMATE_DYNAMIC_SCORE_MIN_VALUES_ACCEPTED = 5;
/**
* Random number generator used by the forest.
*/
protected Random random;
/**
* The number of dimensions in the input data.
*/
protected final int dimensions;
/**
* The sample size used by stream samplers in this forest.
*/
protected final int sampleSize;
/**
* The shingle size (if known)
*/
protected final int shingleSize;
/**
* The input dimensions for known shingle size and internal shingling
*/
protected final int inputDimensions;
/**
* The number of points required by stream samplers before results are returned.
*/
protected final int outputAfter;
/**
* The number of trees in this forest.
*/
protected final int numberOfTrees;
/**
* The decay factor used by stream samplers in this forest.
*/
protected double timeDecay;
/**
* Store the time information
*/
protected final boolean storeSequenceIndexesEnabled;
/**
* enables internal shingling
*/
protected final boolean internalShinglingEnabled;
/**
* The following can be set between 0 and 1 (inclusive) to achieve tradeoff
* between smaller space, lower throughput and larger space, larger throughput
*/
protected final double boundingBoxCacheFraction;
/**
* Enable center of mass at internal nodes
*/
protected final boolean centerOfMassEnabled;
/**
* Enable parallel execution.
*/
protected final boolean parallelExecutionEnabled;
/**
* Number of threads to use in the thread pool if parallel execution is enabled.
*/
protected final int threadPoolSize;
/**
* A string to define an "execution mode" that can be used to set multiple
* configuration options. This field is not currently in use.
*/
protected String executionMode;
protected IStateCoordinator<?, float[]> stateCoordinator;
protected ComponentList<?, float[]> components;
/**
* This flag is initialized to false. It is set to true when all component
* models are ready.
*/
private boolean outputReady;
/**
* used for initializing the compact forests
*/
private final int initialPointStoreSize;
private final int pointStoreCapacity;
/**
* An implementation of forest traversal algorithms.
*/
protected AbstractForestTraversalExecutor traversalExecutor;
/**
* An implementation of forest update algorithms.
*/
protected AbstractForestUpdateExecutor<?, float[]> updateExecutor;
public <P> RandomCutForest(Builder<?> builder, IStateCoordinator<P, float[]> stateCoordinator,
ComponentList<P, float[]> components, Random random) {
this(builder, false);
checkNotNull(stateCoordinator, "updateCoordinator must not be null");
checkNotNull(components, "componentModels must not be null");
checkNotNull(random, "random must not be null");
this.stateCoordinator = stateCoordinator;
this.components = components;
this.random = random;
initExecutors(stateCoordinator, components);
}
public RandomCutForest(Builder<?> builder) {
this(builder, false);
random = builder.getRandom();
PointStore tempStore = PointStore.builder().internalRotationEnabled(builder.internalRotationEnabled)
.capacity(pointStoreCapacity).initialSize(initialPointStoreSize)
.internalShinglingEnabled(internalShinglingEnabled).shingleSize(shingleSize).dimensions(dimensions)
.build();
IStateCoordinator<Integer, float[]> stateCoordinator = new PointStoreCoordinator<>(tempStore);
ComponentList<Integer, float[]> components = new ComponentList<>(numberOfTrees);
for (int i = 0; i < numberOfTrees; i++) {
ITree<Integer, float[]> tree = new RandomCutTree.Builder().capacity(sampleSize)
.randomSeed(random.nextLong()).pointStoreView(tempStore)
.boundingBoxCacheFraction(boundingBoxCacheFraction).centerOfMassEnabled(centerOfMassEnabled)
.storeSequenceIndexesEnabled(storeSequenceIndexesEnabled).outputAfter(1).build();
IStreamSampler<Integer> sampler = CompactSampler.builder().capacity(sampleSize).timeDecay(timeDecay)
.randomSeed(random.nextLong()).storeSequenceIndexesEnabled(storeSequenceIndexesEnabled)
.initialAcceptFraction(builder.initialAcceptFraction).build();
components.add(new SamplerPlusTree<>(sampler, tree));
}
this.stateCoordinator = stateCoordinator;
this.components = components;
initExecutors(stateCoordinator, components);
}
protected <PointReference> void initExecutors(IStateCoordinator<PointReference, float[]> updateCoordinator,
ComponentList<PointReference, float[]> components) {
if (parallelExecutionEnabled) {
traversalExecutor = new ParallelForestTraversalExecutor(components, threadPoolSize);
updateExecutor = new ParallelForestUpdateExecutor<>(updateCoordinator, components, threadPoolSize);
} else {
traversalExecutor = new SequentialForestTraversalExecutor(components);
updateExecutor = new SequentialForestUpdateExecutor<>(updateCoordinator, components);
}
}
/**
* This constructor is responsible for initializing a forest's configuration
* variables from a builder. The method signature contains a boolean argument
* that isn't used. This argument exists only to create a distinct method
* signature so that we can expose {@link #RandomCutForest(Builder)} as a
* protected constructor.
*
* @param builder A Builder instance giving the desired random cut forest
* configuration.
* @param notUsed This parameter is not used.
*/
protected RandomCutForest(Builder<?> builder, boolean notUsed) {
checkArgument(builder.numberOfTrees > 0, "numberOfTrees must be greater than 0");
checkArgument(builder.sampleSize > 0, "sampleSize must be greater than 0");
builder.outputAfter.ifPresent(n -> {
checkArgument(n > 0, "outputAfter must be greater than 0");
});
checkArgument(builder.dimensions > 0, "dimensions must be greater than 0");
builder.timeDecay.ifPresent(timeDecay -> {
checkArgument(timeDecay >= 0, "timeDecay must be greater than or equal to 0");
});
builder.threadPoolSize.ifPresent(n -> checkArgument((n > 0) || ((n == 0) && !builder.parallelExecutionEnabled),
"threadPoolSize must be greater/equal than 0. To disable thread pool, set parallel execution to 'false'."));
checkArgument(builder.internalShinglingEnabled || builder.shingleSize == 1
|| builder.dimensions % builder.shingleSize == 0, "wrong shingle size");
// checkArgument(!builder.internalShinglingEnabled || builder.shingleSize > 1,
// " need shingle size > 1 for internal shingling");
if (builder.internalRotationEnabled) {
checkArgument(builder.internalShinglingEnabled, " enable internal shingling");
}
builder.initialPointStoreSize.ifPresent(n -> {
checkArgument(n > 0, "initial point store must be greater than 0");
checkArgument(n > builder.sampleSize * builder.numberOfTrees || builder.dynamicResizingEnabled,
" enable dynamic resizing ");
});
checkArgument(builder.boundingBoxCacheFraction >= 0 && builder.boundingBoxCacheFraction <= 1,
"incorrect cache fraction range");
numberOfTrees = builder.numberOfTrees;
sampleSize = builder.sampleSize;
outputAfter = builder.outputAfter.orElse(max(1, (int) (sampleSize * DEFAULT_OUTPUT_AFTER_FRACTION)));
internalShinglingEnabled = builder.internalShinglingEnabled;
shingleSize = builder.shingleSize;
dimensions = builder.dimensions;
timeDecay = builder.timeDecay.orElse(1.0 / (DEFAULT_SAMPLE_SIZE_COEFFICIENT_IN_TIME_DECAY * sampleSize));
storeSequenceIndexesEnabled = builder.storeSequenceIndexesEnabled;
centerOfMassEnabled = builder.centerOfMassEnabled;
parallelExecutionEnabled = builder.parallelExecutionEnabled;
boundingBoxCacheFraction = builder.boundingBoxCacheFraction;
builder.directLocationMapEnabled = builder.directLocationMapEnabled || shingleSize == 1;
inputDimensions = (internalShinglingEnabled) ? dimensions / shingleSize : dimensions;
pointStoreCapacity = max(sampleSize * numberOfTrees + 1, 2 * sampleSize);
initialPointStoreSize = builder.initialPointStoreSize.orElse(2 * sampleSize);
if (parallelExecutionEnabled) {
threadPoolSize = builder.threadPoolSize.orElse(Runtime.getRuntime().availableProcessors() - 1);
} else {
threadPoolSize = 0;
}
}
/**
* @return a new RandomCutForest builder.
*/
public static Builder builder() {
return new Builder();
}
/**
* Create a new RandomCutForest with optional arguments set to default values.
*
* @param dimensions The number of dimension in the input data.
* @param randomSeed The random seed to use to create the forest random number
* generator
* @return a new RandomCutForest with optional arguments set to default values.
*/
public static RandomCutForest defaultForest(int dimensions, long randomSeed) {
return builder().dimensions(dimensions).randomSeed(randomSeed).build();
}
/**
* Create a new RandomCutForest with optional arguments set to default values.
*
* @param dimensions The number of dimension in the input data.
* @return a new RandomCutForest with optional arguments set to default values.
*/
public static RandomCutForest defaultForest(int dimensions) {
return builder().dimensions(dimensions).build();
}
/**
* @return the number of trees in the forest.
*/
public int getNumberOfTrees() {
return numberOfTrees;
}
/**
* @return the sample size used by stream samplers in this forest.
*/
public int getSampleSize() {
return sampleSize;
}
/**
* @return the shingle size used by the point store.
*/
public int getShingleSize() {
return shingleSize;
}
/**
* @return the number of points required by stream samplers before results are
* returned.
*/
public int getOutputAfter() {
return outputAfter;
}
/**
* @return the number of dimensions in the data points accepted by this forest.
*/
public int getDimensions() {
return dimensions;
}
/**
* @return return the decay factor used by stream samplers in this forest.
*/
public double getTimeDecay() {
return timeDecay;
}
/**
* @return true if points are saved with sequence indexes, false otherwise.
*/
public boolean isStoreSequenceIndexesEnabled() {
return storeSequenceIndexesEnabled;
}
/**
* For compact forests, users can choose to specify the desired floating-point
* precision to use internally to store points. Choosing single-precision will
* reduce the memory size of the model at the cost of requiring double/float
* conversions.
*
* @return the desired precision to use internally to store points.
*/
public Precision getPrecision() {
return Precision.FLOAT_32;
}
@Deprecated
public boolean isCompact() {
return true;
}
/**
* @return true if internal shingling is performed, false otherwise.
*/
public boolean isInternalShinglingEnabled() {
return internalShinglingEnabled;
}
/**
* @return true if tree nodes retain the center of mass, false otherwise.
*/
public boolean isCenterOfMassEnabled() {
return centerOfMassEnabled;
}
/**
* @return true if parallel execution is enabled, false otherwise.
*/
public boolean isParallelExecutionEnabled() {
return parallelExecutionEnabled;
}
public double getBoundingBoxCacheFraction() {
return boundingBoxCacheFraction;
}
/**
* @return the number of threads in the thread pool if parallel execution is
* enabled, 0 otherwise.
*/
public int getThreadPoolSize() {
return threadPoolSize;
}
public IStateCoordinator<?, ?> getUpdateCoordinator() {
return stateCoordinator;
}
public ComponentList<?, ?> getComponents() {
return components;
}
/**
* used for scoring and other function, expands to a shingled point in either
* case performs a clean copy
*
* @param point input point
* @return a shingled copy or a clean copy
*/
public float[] transformToShingledPoint(float[] point) {
return stateCoordinator.getStore().transformToShingledPoint(point);
}
/**
* does the pointstore use rotated shingles
*
* @return true/false based on pointstore
*/
public boolean isRotationEnabled() {
return stateCoordinator.getStore().isInternalRotationEnabled();
}
/**
* transforms the missing indices on the input point to the corresponding
* indices of a shingled point
*
* @param indexList input array of missing values
* @param length length of the input array
* @return output array of missing values corresponding to shingle
*/
protected int[] transformIndices(int[] indexList, int length) {
return (internalShinglingEnabled && length == inputDimensions)
? stateCoordinator.getStore().transformIndices(indexList)
: indexList;
}
/**
*
* @return the last known shingled point seen
*/
public float[] lastShingledPoint() {
checkArgument(internalShinglingEnabled, "incorrect use");
return stateCoordinator.getStore().getInternalShingle();
}
/**
*
* @return the sequence index of the last known shingled point. If internal
* shingling is not enabled, then this would correspond to the number of
* updates
*/
public long nextSequenceIndex() {
return stateCoordinator.getStore().getNextSequenceIndex();
}
/**
* Update the forest with the given point. The point is submitted to each
* sampler in the forest. If the sampler accepts the point, the point is
* submitted to the update method in the corresponding Random Cut Tree.
*
* @param point The point used to update the forest.
*/
public void update(double[] point) {
update(toFloatArray(point));
}
public void update(float[] point) {
checkNotNull(point, "point must not be null");
checkArgument(internalShinglingEnabled || point.length == dimensions,
String.format("point.length must equal %d", dimensions));
checkArgument(!internalShinglingEnabled || point.length == inputDimensions,
String.format("point.length must equal %d for internal shingling", inputDimensions));
updateExecutor.update(point);
}
/**
* Update the forest with the given point and a timestamp. The point is
* submitted to each sampler in the forest as if that timestamp was the correct
* stamp. storeSequenceIndexes must be false since the algorithm will not verify
* the correctness of the timestamp.
*
* @param point The point used to update the forest.
* @param sequenceNum The timestamp of the corresponding point
*/
public void update(double[] point, long sequenceNum) {
checkNotNull(point, "point must not be null");
update(toFloatArray(point), sequenceNum);
}
public void update(float[] point, long sequenceNum) {
checkNotNull(point, "point must not be null");
checkArgument(!internalShinglingEnabled, "cannot be applied with internal shingling");
checkArgument(point.length == dimensions, () -> "point.length must equal to " + dimensions);
updateExecutor.update(point, sequenceNum);
}
/**
* Update the forest such that each tree caches a fraction of the bounding
* boxes. This allows for a tradeoff between speed and storage.
*
* @param cacheFraction The (approximate) fraction of bounding boxes used in
* caching.
*/
public void setBoundingBoxCacheFraction(double cacheFraction) {
checkArgument(0 <= cacheFraction && cacheFraction <= 1, "cacheFraction must be between 0 and 1 (inclusive)");
updateExecutor.getComponents().forEach(c -> c.setConfig(Config.BOUNDING_BOX_CACHE_FRACTION, cacheFraction));
}
/**
* changes the setting of time dependent sampling on the fly
*
* @param timeDecay new value of sampling rate
*/
public void setTimeDecay(double timeDecay) {
checkArgument(0 <= timeDecay, "timeDecay must be greater than or equal to 0");
this.timeDecay = timeDecay;
updateExecutor.getComponents().forEach(c -> c.setConfig(Config.TIME_DECAY, timeDecay));
}
/**
* Visit each of the trees in the forest and combine the individual results into
* an aggregate result. A visitor is constructed for each tree using the visitor
* factory, and then submitted to
* {@link RandomCutTree#traverse(float[], IVisitorFactory)}. The results from
* all the trees are combined using the accumulator and then transformed using
* the finisher before being returned. Trees are visited in parallel using
* {@link java.util.Collection#parallelStream()}.
*
* @param point The point that defines the traversal path.
* @param visitorFactory A factory method which is invoked for each tree to
* construct a visitor.
* @param accumulator A function that combines the results from individual
* trees into an aggregate result.
* @param finisher A function called on the aggregate result in order to
* produce the final result.
* @param <R> The visitor result type. This is the type that will be
* returned after traversing each individual tree.
* @param <S> The final type, after any final normalization at the
* forest level.
* @return The aggregated and finalized result after sending a visitor through
* each tree in the forest.
*/
public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory, BinaryOperator<R> accumulator,
Function<R, S> finisher) {
checkNotNull(point, "point must not be null");
checkArgument(point.length == dimensions, () -> "point.length must equal to " + dimensions);
checkNotNull(visitorFactory, "visitorFactory must not be null");
checkNotNull(accumulator, "accumulator must not be null");
checkNotNull(finisher, "finisher must not be null");
return traversalExecutor.traverseForest(point, visitorFactory, accumulator, finisher);
}
/**
* Visit each of the trees in the forest and combine the individual results into
* an aggregate result. A visitor is constructed for each tree using the visitor
* factory, and then submitted to
* {@link RandomCutTree#traverse(float[], IVisitorFactory)}. The results from
* individual trees are collected using the {@link java.util.stream.Collector}
* and returned. Trees are visited in parallel using
* {@link java.util.Collection#parallelStream()}.
*
* @param point The point that defines the traversal path.
* @param visitorFactory A factory method which is invoked for each tree to
* construct a visitor.
* @param collector A collector used to aggregate individual tree results
* into a final result.
* @param <R> The visitor result type. This is the type that will be
* returned after traversing each individual tree.
* @param <S> The final type, after any final normalization at the
* forest level.
* @return The aggregated and finalized result after sending a visitor through
* each tree in the forest.
*/
public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory, Collector<R, ?, S> collector) {
checkNotNull(point, "point must not be null");
checkArgument(point.length == dimensions, () -> "point.length must equal to " + dimensions);
checkNotNull(visitorFactory, "visitorFactory must not be null");
checkNotNull(collector, "collector must not be null");
return traversalExecutor.traverseForest(point, visitorFactory, collector);
}
/**
* Visit each of the trees in the forest sequentially and combine the individual
* results into an aggregate result. A visitor is constructed for each tree
* using the visitor factory, and then submitted to
* {@link RandomCutTree#traverse(float[], IVisitorFactory)}. The results from
* all the trees are combined using the {@link ConvergingAccumulator}, and the
* method stops visiting trees after convergence is reached. The result is
* transformed using the finisher before being returned.
*
* @param point The point that defines the traversal path.
* @param visitorFactory A factory method which is invoked for each tree to
* construct a visitor.
* @param accumulator An accumulator that combines the results from
* individual trees into an aggregate result and checks to
* see if the result can be returned without further
* processing.
* @param finisher A function called on the aggregate result in order to
* produce the final result.
* @param <R> The visitor result type. This is the type that will be
* returned after traversing each individual tree.
* @param <S> The final type, after any final normalization at the
* forest level.
* @return The aggregated and finalized result after sending a visitor through
* each tree in the forest.
*/
public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory,
ConvergingAccumulator<R> accumulator, Function<R, S> finisher) {
checkNotNull(point, "point must not be null");
checkArgument(point.length == dimensions, () -> "point.length must equal to " + dimensions);
checkNotNull(visitorFactory, "visitorFactory must not be null");
checkNotNull(accumulator, "accumulator must not be null");
checkNotNull(finisher, "finisher must not be null");
return traversalExecutor.traverseForest(point, visitorFactory, accumulator, finisher);
}
/**
* Visit each of the trees in the forest and combine the individual results into
* an aggregate result. A multi-visitor is constructed for each tree using the
* visitor factory, and then submitted to
* {@link RandomCutTree#traverseMulti(float[], IMultiVisitorFactory)}. The
* results from all the trees are combined using the accumulator and then
* transformed using the finisher before being returned.
*
* @param point The point that defines the traversal path.
* @param visitorFactory A factory method which is invoked for each tree to
* construct a multi-visitor.
* @param accumulator A function that combines the results from individual
* trees into an aggregate result.
* @param finisher A function called on the aggregate result in order to
* produce the final result.
* @param <R> The visitor result type. This is the type that will be
* returned after traversing each individual tree.
* @param <S> The final type, after any final normalization at the
* forest level.
* @return The aggregated and finalized result after sending a visitor through
* each tree in the forest.
*/
public <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,
BinaryOperator<R> accumulator, Function<R, S> finisher) {
checkNotNull(point, "point must not be null");
checkArgument(point.length == dimensions, () -> "point.length must equal to " + dimensions);
checkNotNull(visitorFactory, "visitorFactory must not be null");
checkNotNull(accumulator, "accumulator must not be null");
checkNotNull(finisher, "finisher must not be null");
return traversalExecutor.traverseForestMulti(point, visitorFactory, accumulator, finisher);
}
/**
* Visit each of the trees in the forest and combine the individual results into
* an aggregate result. A multi-visitor is constructed for each tree using the
* visitor factory, and then submitted to
* {@link RandomCutTree#traverseMulti(float[], IMultiVisitorFactory)}. The
* results from individual trees are collected using the
* {@link java.util.stream.Collector} and returned. Trees are visited in
* parallel using {@link java.util.Collection#parallelStream()}.
*
* @param point The point that defines the traversal path.
* @param visitorFactory A factory method which is invoked for each tree to
* construct a visitor.
* @param collector A collector used to aggregate individual tree results
* into a final result.
* @param <R> The visitor result type. This is the type that will be
* returned after traversing each individual tree.
* @param <S> The final type, after any final normalization at the
* forest level.
* @return The aggregated and finalized result after sending a visitor through
* each tree in the forest.
*/
public <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,
Collector<R, ?, S> collector) {
checkNotNull(point, "point must not be null");
checkArgument(point.length == dimensions, () -> "point.length must equal to " + dimensions);
checkNotNull(visitorFactory, "visitorFactory must not be null");
checkNotNull(collector, "collector must not be null");
return traversalExecutor.traverseForestMulti(point, visitorFactory, collector);
}
/**
* Compute an anomaly score for the given point. The point being scored is
* compared with the points in the sample to compute a measure of how anomalous
* it is. Scores are greater than 0, with higher scores corresponding to bing
* more anomalous. A threshold of 1.0 is commonly used to distinguish anomalous
* points from non-anomalous ones.
* <p>
* See {@link AnomalyScoreVisitor} for more details about the anomaly score
* algorithm.
*
* @param point The point being scored.
* @return an anomaly score for the given point.
*/
@Deprecated
public double getAnomalyScore(double[] point) {
return getAnomalyScore(toFloatArray(point));
}
public double getAnomalyScore(float[] point) {
if (!isOutputReady()) {
return 0.0;
}
IVisitorFactory<Double> visitorFactory = (tree, x) -> new AnomalyScoreVisitor(tree.projectToTree(x),
tree.getMass());
BinaryOperator<Double> accumulator = Double::sum;
Function<Double, Double> finisher = x -> x / numberOfTrees;
return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);
}
/**
* Anomaly score evaluated sequentially with option of early stopping the early
* stopping parameter precision gives an approximate solution in the range
* (1-precision)*score(q)- precision, (1+precision)*score(q) + precision for the
* score of a point q. In this function z is hardcoded to 0.1. If this function
* is used, then not all the trees will be used in evaluation (but they have to
* be updated anyways, because they may be used for the next q). The advantage
* is that "almost certainly" anomalies/non-anomalies can be detected easily
* with few trees.
*
* @param point input point q
* @return anomaly score with early stopping with z=0.1
*/
@Deprecated
public double getApproximateAnomalyScore(double[] point) {
return getApproximateAnomalyScore(toFloatArray(point));
}
public double getApproximateAnomalyScore(float[] point) {
if (!isOutputReady()) {
return 0.0;
}
IVisitorFactory<Double> visitorFactory = (tree, x) -> new AnomalyScoreVisitor(tree.projectToTree(x),
tree.getMass());
ConvergingAccumulator<Double> accumulator = new OneSidedConvergingDoubleAccumulator(
DEFAULT_APPROXIMATE_ANOMALY_SCORE_HIGH_IS_CRITICAL, DEFAULT_APPROXIMATE_DYNAMIC_SCORE_PRECISION,
DEFAULT_APPROXIMATE_DYNAMIC_SCORE_MIN_VALUES_ACCEPTED, numberOfTrees);
Function<Double, Double> finisher = x -> x / accumulator.getValuesAccepted();
return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);
}
/**
* Compute an anomaly score attribution DiVector for the given point. The point
* being scored is compared with the points in the sample to compute a measure
* of how anomalous it is. The result DiVector will contain an anomaly score in
* both the positive and negative directions for each dimension of the data.
* <p>
* See {@link AnomalyAttributionVisitor} for more details about the anomaly
* score algorithm.
*
* @param point The point being scored.
* @return an anomaly score for the given point.
*/
public DiVector getAnomalyAttribution(double[] point) {
return getAnomalyAttribution(toFloatArray(point));
}
public DiVector getAnomalyAttribution(float[] point) {
// this will return the same (modulo floating point summation) L1Norm as
// getAnomalyScore
if (!isOutputReady()) {
return new DiVector(dimensions);
}
IVisitorFactory<DiVector> visitorFactory = new VisitorFactory<>(
(tree, y) -> new AnomalyAttributionVisitor(tree.projectToTree(y), tree.getMass()),
(tree, x) -> x.lift(tree::liftFromTree));
BinaryOperator<DiVector> accumulator = DiVector::addToLeft;
Function<DiVector, DiVector> finisher = x -> x.scale(1.0 / numberOfTrees);
return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);
}
/**
* Sequential version of attribution corresponding to getAnomalyScoreSequential;
* The high-low sum in the result should be the same as the scalar score
* computed by {@link #getAnomalyScore(double[])}.
*
* @param point The point being scored.
* @return anomaly attribution for the given point.
*/
public DiVector getApproximateAnomalyAttribution(double[] point) {
return getApproximateAnomalyAttribution(toFloatArray(point));
}
public DiVector getApproximateAnomalyAttribution(float[] point) {
if (!isOutputReady()) {
return new DiVector(dimensions);
}
IVisitorFactory<DiVector> visitorFactory = new VisitorFactory<>(
(tree, y) -> new AnomalyAttributionVisitor(tree.projectToTree(y), tree.getMass()),
(tree, x) -> x.lift(tree::liftFromTree));
ConvergingAccumulator<DiVector> accumulator = new OneSidedConvergingDiVectorAccumulator(dimensions,
DEFAULT_APPROXIMATE_ANOMALY_SCORE_HIGH_IS_CRITICAL, DEFAULT_APPROXIMATE_DYNAMIC_SCORE_PRECISION,
DEFAULT_APPROXIMATE_DYNAMIC_SCORE_MIN_VALUES_ACCEPTED, numberOfTrees);
Function<DiVector, DiVector> finisher = x -> x.scale(1.0 / accumulator.getValuesAccepted());
return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);
}
/**
* Compute a density estimate at the given point.
* <p>
* See {@link SimpleInterpolationVisitor} and {@link DensityOutput} for more
* details about the density computation.
*
* @param point The point where the density estimate is made.
* @return A density estimate.
*/
@Deprecated
public DensityOutput getSimpleDensity(double[] point) {
return getSimpleDensity(toFloatArray(point));
}
public DensityOutput getSimpleDensity(float[] point) {
// density estimation should use sufficiently larger number of samples
// and only return answers when full
if (!samplersFull()) {
return new DensityOutput(dimensions, sampleSize);
}
IVisitorFactory<InterpolationMeasure> visitorFactory = new VisitorFactory<>((tree,
y) -> new SimpleInterpolationVisitor(tree.projectToTree(y), sampleSize, 1.0, centerOfMassEnabled),
(tree, x) -> x.lift(tree::liftFromTree));
Collector<InterpolationMeasure, ?, InterpolationMeasure> collector = InterpolationMeasure.collector(dimensions,
sampleSize, numberOfTrees);
return new DensityOutput(traverseForest(transformToShingledPoint(point), visitorFactory, collector));
}
/**
* Given a point with missing values, return a new point with the missing values
* imputed. Each tree in the forest individual produces an imputed value. For
* 1-dimensional points, the median imputed value is returned. For points with
* more than 1 dimension, the imputed point with the 25th percentile anomaly
* score is returned.
*
* The first function exposes the distribution.
*
* @param point A point with missing values.
* @param numberOfMissingValues The number of missing values in the point.
* @param missingIndexes An array containing the indexes of the missing
* values in the point. The length of the array
* should be greater than or equal to the number of
* missing values.
* @param centrality a parameter that provides a central estimation
* versus a more random estimation
* @return A point with the missing values imputed.
*/
public List<ConditionalTreeSample> getConditionalField(float[] point, int numberOfMissingValues,
int[] missingIndexes, double centrality) {
checkArgument(numberOfMissingValues > 0, "numberOfMissingValues must be greater than 0");
checkNotNull(missingIndexes, "missingIndexes must not be null");
checkArgument(numberOfMissingValues <= missingIndexes.length,
"numberOfMissingValues must be less than or equal to missingIndexes.length");
checkArgument(centrality >= 0 && centrality <= 1, "centrality needs to be in range [0,1]");
if (!isOutputReady()) {
return new ArrayList<>();
}
int[] liftedIndices = transformIndices(missingIndexes, point.length);
IMultiVisitorFactory<ConditionalTreeSample> visitorFactory = (tree, y) -> new ImputeVisitor(y,
tree.projectToTree(y), liftedIndices, tree.projectMissingIndices(liftedIndices), centrality,
tree.getRandomSeed());
return traverseForestMulti(transformToShingledPoint(point), visitorFactory, ConditionalTreeSample.collector);
}
public SampleSummary getConditionalFieldSummary(float[] point, int numberOfMissingValues, int[] missingIndexes,
double centrality) {
checkArgument(numberOfMissingValues >= 0, "cannot be negative");
checkNotNull(missingIndexes, "missingIndexes must not be null");
checkArgument(numberOfMissingValues <= missingIndexes.length,
"numberOfMissingValues must be less than or equal to missingIndexes.length");
checkArgument(centrality >= 0 && centrality <= 1, "centrality needs to be in range [0,1]");
checkArgument(point != null, " cannot be null");
if (!isOutputReady()) {
return new SampleSummary(dimensions);
}
int[] liftedIndices = transformIndices(missingIndexes, point.length);
ConditionalSampleSummarizer summarizer = new ConditionalSampleSummarizer(liftedIndices,
transformToShingledPoint(point), centrality);
return summarizer.summarize(getConditionalField(point, numberOfMissingValues, missingIndexes, centrality));
}
public float[] imputeMissingValues(float[] point, int numberOfMissingValues, int[] missingIndexes) {
return getConditionalFieldSummary(point, numberOfMissingValues, missingIndexes, 1.0).median;
}
/**
* Given a point with missing values, return a new point with the missing values
* imputed. Each tree in the forest individual produces an imputed value. For
* 1-dimensional points, the median imputed value is returned. For points with
* more than 1 dimension, the imputed point with the 25th percentile anomaly
* score is returned.
*
* @param point A point with missing values.
* @param numberOfMissingValues The number of missing values in the point.
* @param missingIndexes An array containing the indexes of the missing
* values in the point. The length of the array
* should be greater than or equal to the number of
* missing values.
* @return A point with the missing values imputed.
*/
@Deprecated
public double[] imputeMissingValues(double[] point, int numberOfMissingValues, int[] missingIndexes) {
return toDoubleArray(imputeMissingValues(toFloatArray(point), numberOfMissingValues, missingIndexes));
}
/**
* Given an initial shingled point, extrapolate the stream into the future to
* produce a forecast. This method is intended to be called when the input data
* is being shingled, and it works by imputing forward one shingle block at a
* time.
*
* @param point The starting point for extrapolation.
* @param horizon The number of blocks to forecast.
* @param blockSize The number of entries in a block. This should be the same
* as the size of a single input to the shingle.
* @param cyclic If true then the shingling is cyclic, otherwise it's a
* sliding shingle.
* @param shingleIndex If cyclic is true, then this should be the current index
* in the shingle. That is, the index where the next point
* added to the shingle would be written. If cyclic is false
* then this value is not used.
* @return a forecasted time series.
*/
@Deprecated
public double[] extrapolateBasic(double[] point, int horizon, int blockSize, boolean cyclic, int shingleIndex) {
return toDoubleArray(extrapolateBasic(toFloatArray(point), horizon, blockSize, cyclic, shingleIndex));
}
public float[] extrapolateBasic(float[] point, int horizon, int blockSize, boolean cyclic, int shingleIndex) {
return extrapolateWithRanges(point, horizon, blockSize, cyclic, shingleIndex, 1.0).values;
}
// the following is provided for maximum flexibilty from the calling entity;
// but likely use is extrapolateFromShingle(), which abstracts away rotation
// etc.
public RangeVector extrapolateWithRanges(float[] point, int horizon, int blockSize, boolean cyclic,
int shingleIndex, double centrality) {
checkArgument(0 < blockSize && blockSize < dimensions,
"blockSize must be between 0 and dimensions (exclusive)");
checkArgument(dimensions % blockSize == 0, "dimensions must be evenly divisible by blockSize");
checkArgument(0 <= shingleIndex && shingleIndex < dimensions / blockSize,
"shingleIndex must be between 0 (inclusive) and dimensions / blockSize");
RangeVector result = new RangeVector(blockSize * horizon);
int[] missingIndexes = new int[blockSize];
float[] queryPoint = Arrays.copyOf(point, dimensions);
if (cyclic) {
extrapolateBasicCyclic(result, horizon, blockSize, shingleIndex, queryPoint, missingIndexes, centrality);
} else {
extrapolateBasicSliding(result, horizon, blockSize, queryPoint, missingIndexes, centrality);
}
return result;
}
// external management of shingle; can function for both internal and external
// shingling
// however blocksize has to be externally managed
public RangeVector extrapolateFromShingle(float[] shingle, int horizon, int blockSize, double centrality) {
return extrapolateWithRanges(shingle, horizon, blockSize, isRotationEnabled(),
((int) nextSequenceIndex()) % shingleSize, centrality);
}
/**
* Given an initial shingled point, extrapolate the stream into the future to
* produce a forecast. This method is intended to be called when the input data
* is being shingled, and it works by imputing forward one shingle block at a
* time. If the shingle is cyclic, then this method uses 0 as the shingle index.
*
* @param point The starting point for extrapolation.
* @param horizon The number of blocks to forecast.
* @param blockSize The number of entries in a block. This should be the same as
* the size of a single input to the shingle.
* @param cyclic If true then the shingling is cyclic, otherwise it's a
* sliding shingle.
* @return a forecasted time series.
*/
@Deprecated
public double[] extrapolateBasic(double[] point, int horizon, int blockSize, boolean cyclic) {
return extrapolateBasic(point, horizon, blockSize, cyclic, 0);
}
public float[] extrapolateBasic(float[] point, int horizon, int blockSize, boolean cyclic) {
return extrapolateBasic(point, horizon, blockSize, cyclic, 0);
}
/**
* Given a shingle builder, extrapolate the stream into the future to produce a
* forecast. This method assumes you are passing in the shingle builder used to
* preprocess points before adding them to this forest.
*
* @param builder The shingle builder used to process points before adding them
* to the forest.
* @param horizon The number of blocks to forecast.
* @return a forecasted time series.
*/
@Deprecated
public double[] extrapolateBasic(ShingleBuilder builder, int horizon) {
return extrapolateBasic(builder.getShingle(), horizon, builder.getInputPointSize(), builder.isCyclic(),
builder.getShingleIndex());
}
void extrapolateBasicSliding(RangeVector result, int horizon, int blockSize, float[] queryPoint,
int[] missingIndexes, double centrality) {
int resultIndex = 0;
Arrays.fill(missingIndexes, 0);
for (int y = 0; y < blockSize; y++) {
missingIndexes[y] = dimensions - blockSize + y;
}
for (int k = 0; k < horizon; k++) {
// shift all entries in the query point left by 1 block
System.arraycopy(queryPoint, blockSize, queryPoint, 0, dimensions - blockSize);
SampleSummary imputedSummary = getConditionalFieldSummary(queryPoint, blockSize, missingIndexes,
centrality);
for (int y = 0; y < blockSize; y++) {
result.values[resultIndex] = queryPoint[dimensions - blockSize + y] = imputedSummary.median[dimensions
- blockSize + y];
result.lower[resultIndex] = imputedSummary.lower[dimensions - blockSize + y];
result.upper[resultIndex] = imputedSummary.upper[dimensions - blockSize + y];
resultIndex++;
}
}
}
void extrapolateBasicCyclic(RangeVector result, int horizon, int blockSize, int shingleIndex, float[] queryPoint,
int[] missingIndexes, double centrality) {
int resultIndex = 0;
int currentPosition = shingleIndex;
Arrays.fill(missingIndexes, 0);
for (int k = 0; k < horizon; k++) {
for (int y = 0; y < blockSize; y++) {
missingIndexes[y] = (currentPosition + y) % dimensions;
}
SampleSummary imputedSummary = getConditionalFieldSummary(queryPoint, blockSize, missingIndexes,
centrality);
for (int y = 0; y < blockSize; y++) {
result.values[resultIndex] = queryPoint[(currentPosition + y)
% dimensions] = imputedSummary.median[(currentPosition + y) % dimensions];
result.lower[resultIndex] = imputedSummary.lower[(currentPosition + y) % dimensions];
result.upper[resultIndex] = imputedSummary.upper[(currentPosition + y) % dimensions];
resultIndex++;
}
currentPosition = (currentPosition + blockSize) % dimensions;
}
}
/**
* Extrapolate the stream into the future to produce a forecast. This method is
* intended to be called when the input data is being shingled internally, and
* it works by imputing forward one shingle block at a time.
*
* @param horizon The number of blocks to forecast.
* @return a forecasted time series.
*/
public double[] extrapolate(int horizon) {
return toDoubleArray(extrapolateFromCurrentTime(horizon));
}
public float[] extrapolateFromCurrentTime(int horizon) {
checkArgument(internalShinglingEnabled, "incorrect use");
IPointStore<?, ?> store = stateCoordinator.getStore();
return extrapolateBasic(lastShingledPoint(), horizon, inputDimensions, store.isInternalRotationEnabled(),
((int) nextSequenceIndex()) % shingleSize);
}
/**
* For each tree in the forest, follow the tree traversal path and return the
* leaf node if the standard Euclidean distance between the query point and the
* leaf point is smaller than the given threshold. Note that this will not
* necessarily be the nearest point in the tree, because the traversal path is
* determined by the random cuts in the tree. If the same leaf point is found in
* multiple trees, those results will be combined into a single Neighbor in the
* result.
*
* If sequence indexes are disabled for this forest, then the list of sequence
* indexes will be empty in returned Neighbors.
*
* @param point A point whose neighbors we want to find.
* @param distanceThreshold The maximum Euclidean distance for a point to be
* considered a neighbor.
* @return a list of Neighbors, ordered from closest to furthest.
*/
@Deprecated
public List<Neighbor> getNearNeighborsInSample(double[] point, double distanceThreshold) {
return getNearNeighborsInSample(toFloatArray(point), distanceThreshold);
}
public List<Neighbor> getNearNeighborsInSample(float[] point, double distanceThreshold) {
checkNotNull(point, "point must not be null");
checkArgument(distanceThreshold > 0, "distanceThreshold must be greater than 0");
if (!isOutputReady()) {
return Collections.emptyList();
}
IVisitorFactory<Optional<Neighbor>> visitorFactory = (tree, x) -> new NearNeighborVisitor(x, distanceThreshold);
return traverseForest(transformToShingledPoint(point), visitorFactory, Neighbor.collector());
}
/**
* For each tree in the forest, follow the tree traversal path and return the
* leaf node. Note that this will not necessarily be the nearest point in the
* tree, because the traversal path is determined by the random cuts in the
* tree. If the same leaf point is found in multiple trees, those results will
* be combined into a single Neighbor in the result.
*
* If sequence indexes are disabled for this forest, then sequenceIndexes will
* be empty in the returned Neighbors.
*
* @param point A point whose neighbors we want to find.
* @return a list of Neighbors, ordered from closest to furthest.
*/
@Deprecated
public List<Neighbor> getNearNeighborsInSample(double[] point) {
return getNearNeighborsInSample(toFloatArray(point));
}
public List<Neighbor> getNearNeighborsInSample(float[] point) {
return getNearNeighborsInSample(point, Double.POSITIVE_INFINITY);
}
/**
* @return true if all samplers are ready to output results.
*/
public boolean isOutputReady() {
return outputReady || (outputReady = stateCoordinator.getTotalUpdates() >= outputAfter
&& components.stream().allMatch(IComponentModel::isOutputReady));
}
/**
* @return true if all samplers in the forest are full.
*/
public boolean samplersFull() {
return stateCoordinator.getTotalUpdates() >= sampleSize;
}
/**
* Returns the total number updates to the forest.
*
* The count of updates is represented with long type and may overflow.
*
* @return the total number of updates to the forest.
*/
public long getTotalUpdates() {
return stateCoordinator.getTotalUpdates();
}
/**
* an L1 clustering primitive that shows the aggregation of the points stored in
* RCF the clustering uses multi-centroid clustering introduced in CURE
* https://en.wikipedia.org/wiki/CURE_algorithm However CURE also shrunk the
* well scattered points by a fraction alpha (there by creating new points);
* while that concept is used herein, the (multi) summarization algorithm
* changes the distance metric as opposed to creating new points since
* continuity of values is not an useful assumption in context of RCFs. The
* usage of distance metric is similar to the discussion in
* https://en.wikipedia.org/wiki/Data_stream_clustering See the examples package
* for an example of dynamic summarization. /
*
* @param maxAllowed maximum number of clusters one is willing to
* see
* @param shrinkage a parameter that controls between spherical
* nature (=1) and MST (=0), this corresponds to
* the parameter alpha in the description above
* @param numberOfRepresentatives number of centroids used to represent a
* cluster, this is the parameter c in the
* description of CURE
* @param separationRatio a parameter in [0,1] that controls how
* zealously should the algorithm reduce the
* number of clusters a default value of 0.8 is a
* reasonable value for many settings. A value
* close to 0 would tend to merge eveything into
* a single cluster. The option is provided since
* it can be of use in the future to produce
* dendograms and similar information.
* @param distance a distance function for points
* @param previous a (possibly null) list of previous clustering
* obtained. If the list is non-null then the
* representatives of the previous cluster would
* be added as zero weight points, ensuring that
* the summarization is more smooth (in contrast
* to two independent summarizations). The zero
* weight points of the past can serve as
* representatives of the current clustering.
* @return a list of clusters
*/
public List<ICluster<float[]>> summarize(int maxAllowed, double shrinkage, int numberOfRepresentatives,
double separationRatio, BiFunction<float[], float[], Double> distance, List<ICluster<float[]>> previous) {
return stateCoordinator.getStore().summarize(maxAllowed, shrinkage, numberOfRepresentatives, separationRatio,
distance, previous);
}
// same as above with default filled in
public List<ICluster<float[]>> summarize(int maxAllowed, double shrinkage, int numberOfRepresentatives,
List<ICluster<float[]>> previous) {
return stateCoordinator.getStore().summarize(maxAllowed, shrinkage, numberOfRepresentatives,
DEFAULT_SEPARATION_RATIO_FOR_MERGE, Summarizer::L1distance, previous);
}
public static class Builder<T extends Builder<T>> {
// We use Optional types for optional primitive fields when it doesn't make
// sense to use a constant default.
private int dimensions;
private int sampleSize = DEFAULT_SAMPLE_SIZE;
private Optional<Integer> outputAfter = Optional.empty();
private int numberOfTrees = DEFAULT_NUMBER_OF_TREES;
private Optional<Double> timeDecay = Optional.empty();
private Optional<Long> randomSeed = Optional.empty();
private boolean storeSequenceIndexesEnabled = DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;
private boolean centerOfMassEnabled = DEFAULT_CENTER_OF_MASS_ENABLED;
private boolean parallelExecutionEnabled = DEFAULT_PARALLEL_EXECUTION_ENABLED;
private Optional<Integer> threadPoolSize = Optional.empty();
private boolean directLocationMapEnabled = DEFAULT_DIRECT_LOCATION_MAP;
private double boundingBoxCacheFraction = DEFAULT_BOUNDING_BOX_CACHE_FRACTION;
private int shingleSize = DEFAULT_SHINGLE_SIZE;
protected boolean dynamicResizingEnabled = DEFAULT_DYNAMIC_RESIZING_ENABLED;
private boolean internalShinglingEnabled = DEFAULT_INTERNAL_SHINGLING_ENABLED;
protected boolean internalRotationEnabled = DEFAULT_INTERNAL_ROTATION_ENABLED;
protected Optional<Integer> initialPointStoreSize = Optional.empty();
protected double initialAcceptFraction = DEFAULT_INITIAL_ACCEPT_FRACTION;
public T dimensions(int dimensions) {
this.dimensions = dimensions;
return (T) this;
}
public T sampleSize(int sampleSize) {
this.sampleSize = sampleSize;
return (T) this;
}
public T outputAfter(int outputAfter) {
this.outputAfter = Optional.of(outputAfter);
return (T) this;
}
public T numberOfTrees(int numberOfTrees) {
this.numberOfTrees = numberOfTrees;
return (T) this;
}
public T shingleSize(int shingleSize) {
this.shingleSize = shingleSize;
return (T) this;
}
public T timeDecay(double timeDecay) {
this.timeDecay = Optional.of(timeDecay);
return (T) this;
}
public T randomSeed(long randomSeed) {
this.randomSeed = Optional.of(randomSeed);
return (T) this;
}
public T centerOfMassEnabled(boolean centerOfMassEnabled) {
this.centerOfMassEnabled = centerOfMassEnabled;
return (T) this;
}
public T parallelExecutionEnabled(boolean parallelExecutionEnabled) {
this.parallelExecutionEnabled = parallelExecutionEnabled;
return (T) this;
}
public T threadPoolSize(int threadPoolSize) {
this.threadPoolSize = Optional.of(threadPoolSize);
return (T) this;
}
public T initialPointStoreSize(int initialPointStoreSize) {
this.initialPointStoreSize = Optional.of(initialPointStoreSize);
return (T) this;
}
public T storeSequenceIndexesEnabled(boolean storeSequenceIndexesEnabled) {
this.storeSequenceIndexesEnabled = storeSequenceIndexesEnabled;
return (T) this;
}
@Deprecated
public T compact(boolean compact) {
return (T) this;
}
public T internalShinglingEnabled(boolean internalShinglingEnabled) {
this.internalShinglingEnabled = internalShinglingEnabled;
return (T) this;
}
public T internalRotationEnabled(boolean internalRotationEnabled) {
this.internalRotationEnabled = internalRotationEnabled;
return (T) this;
}
public T dynamicResizingEnabled(boolean dynamicResizingEnabled) {
this.dynamicResizingEnabled = dynamicResizingEnabled;
return (T) this;
}
@Deprecated
public T precision(Precision precision) {
return (T) this;
}
public T boundingBoxCacheFraction(double boundingBoxCacheFraction) {
this.boundingBoxCacheFraction = boundingBoxCacheFraction;
return (T) this;
}
public T initialAcceptFraction(double initialAcceptFraction) {
this.initialAcceptFraction = initialAcceptFraction;
return (T) this;
}
public RandomCutForest build() {
return new RandomCutForest(this);
}
public Random getRandom() {
// If a random seed was given, use it to create a new Random. Otherwise, call
// the 0-argument constructor
return randomSeed.map(Random::new).orElseGet(Random::new);
}
}
/**
* Score a point using the given scoring functions.
*
* @param point input point being scored
* @param ignoreLeafMassThreshold said threshold
* @param seen the function that applies if input is equal to
* a previously seen sample in a leaf
* @param unseen if the input does not have a match in the
* leaves
* @param damp damping function based on the duplicity of the
* previously seen samples
* @return anomaly score
*/
public double getDynamicScore(float[] point, int ignoreLeafMassThreshold, BiFunction<Double, Double, Double> seen,
BiFunction<Double, Double, Double> unseen, BiFunction<Double, Double, Double> damp) {
checkArgument(ignoreLeafMassThreshold >= 0, "ignoreLeafMassThreshold should be greater than or equal to 0");
if (!isOutputReady()) {
return 0.0;
}
VisitorFactory<Double> visitorFactory = new VisitorFactory<>((tree, y) -> new DynamicScoreVisitor(
tree.projectToTree(y), tree.getMass(), ignoreLeafMassThreshold, seen, unseen, damp));
BinaryOperator<Double> accumulator = Double::sum;
Function<Double, Double> finisher = sum -> sum / numberOfTrees;
return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);
}
/**
* Similar to above but now the scoring takes in a function of Bounding Box to
* probabilities (vector over the dimensions); and produces a score af-if the
* tree were built using that function (when in reality the tree is an RCF).
* Changing the defaultRCFgVec function to some other function f() will provide
* a mechanism of dynamic scoring for trees that are built using f() which is
* the purpose of TransductiveScalarScore visitor. Note that the answer is an
* MCMC simulation and is not normalized (because the scoring functions are
* flexible and unknown) and over a small number of trees the errors can be
* large specially if vecSep is very far from defaultRCFgVec
*
* Given the large number of possible sources of distortion, ignoreLeafThreshold
* is not supported.
*
* @param point point to be scored
* @param seen the score function for seen point
* @param unseen score function for unseen points
* @param damp dampening the score for duplicates
* @param vecSep the function of (BoundingBox) -> array of probabilities
* @return the simuated score
*/
public double getDynamicSimulatedScore(float[] point, BiFunction<Double, Double, Double> seen,
BiFunction<Double, Double, Double> unseen, BiFunction<Double, Double, Double> damp,
Function<IBoundingBoxView, double[]> vecSep) {
if (!isOutputReady()) {
return 0.0;
}
VisitorFactory<Double> visitorFactory = new VisitorFactory<>(
(tree, y) -> new SimulatedTransductiveScalarScoreVisitor(tree.projectToTree(y), tree.getMass(), seen,
unseen, damp, CommonUtils::defaultRCFgVecFunction, vecSep));
BinaryOperator<Double> accumulator = Double::sum;
Function<Double, Double> finisher = sum -> sum / numberOfTrees;
return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);
}
/**
* Score a point using the given scoring functions. This method will
* short-circuit before visiting all trees if the scores that are returned from
* a subset of trees appears to be converging to a given value. See
* {@link OneSidedConvergingDoubleAccumulator} for more about convergence.
*
* @param point input point
* @param precision controls early convergence
* @param highIsCritical this is true for the default scoring function.
* If the user wishes to use a different scoring
* function where anomaly scores are low values
* (for example, height in tree) then this should
* be set to false.
* @param ignoreLeafMassThreshold said threshold
* @param seen scoring function when the input matches some
* tuple in the leaves
* @param unseen scoring function when the input is not found
* @param damp dampening function for duplicates which are
* same as input (applies with seen)
* @return the dynamic score under sequential early stopping
*/
public double getApproximateDynamicScore(float[] point, double precision, boolean highIsCritical,
int ignoreLeafMassThreshold, BiFunction<Double, Double, Double> seen,
BiFunction<Double, Double, Double> unseen, BiFunction<Double, Double, Double> damp) {
checkArgument(ignoreLeafMassThreshold >= 0, "ignoreLeafMassThreshold should be greater than or equal to 0");
if (!isOutputReady()) {
return 0.0;
}
VisitorFactory<Double> visitorFactory = new VisitorFactory<>((tree, y) -> new DynamicScoreVisitor(
tree.projectToTree(y), tree.getMass(), ignoreLeafMassThreshold, seen, unseen, damp));
ConvergingAccumulator<Double> accumulator = new OneSidedConvergingDoubleAccumulator(highIsCritical, precision,
DEFAULT_APPROXIMATE_DYNAMIC_SCORE_MIN_VALUES_ACCEPTED, numberOfTrees);
Function<Double, Double> finisher = x -> x / accumulator.getValuesAccepted();
return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);
}
/**
* Same as above, but for dynamic scoring. See the params of
* getDynamicScoreParallel
*
* @param point point to be scored
* @param ignoreLeafMassThreshold said threshold
* @param seen score function for seen points
* @param unseen score function for unseen points
* @param newDamp dampening function for duplicates in the seen
* function
* @return dynamic scoring attribution DiVector
*/
public DiVector getDynamicAttribution(float[] point, int ignoreLeafMassThreshold,
BiFunction<Double, Double, Double> seen, BiFunction<Double, Double, Double> unseen,
BiFunction<Double, Double, Double> newDamp) {
if (!isOutputReady()) {
return new DiVector(dimensions);
}
VisitorFactory<DiVector> visitorFactory = new VisitorFactory<>(
(tree, y) -> new DynamicAttributionVisitor(tree.projectToTree(y), tree.getMass(),
ignoreLeafMassThreshold, seen, unseen, newDamp),
(tree, x) -> x.lift(tree::liftFromTree));
BinaryOperator<DiVector> accumulator = DiVector::addToLeft;
Function<DiVector, DiVector> finisher = x -> x.scale(1.0 / numberOfTrees);
return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);
}
/**
* Atrribution for dynamic sequential scoring; getL1Norm() should agree with
* getDynamicScoringSequential
*
* @param point input
* @param precision parameter to stop early stopping
* @param highIsCritical are high values anomalous (otherwise low
* values are anomalous)
* @param ignoreLeafMassThreshold we ignore leaves with mass equal/below *
* threshold
* @param seen function for scoring points that have been
* seen before
* @param unseen function for scoring points not seen in tree
* @param newDamp dampening function based on duplicates
* @return attribution DiVector of the score
*/
public DiVector getApproximateDynamicAttribution(float[] point, double precision, boolean highIsCritical,
int ignoreLeafMassThreshold, BiFunction<Double, Double, Double> seen,
BiFunction<Double, Double, Double> unseen, BiFunction<Double, Double, Double> newDamp) {
if (!isOutputReady()) {
return new DiVector(dimensions);
}
VisitorFactory<DiVector> visitorFactory = new VisitorFactory<>((tree, y) -> new DynamicAttributionVisitor(y,
tree.getMass(), ignoreLeafMassThreshold, seen, unseen, newDamp),
(tree, x) -> x.lift(tree::liftFromTree));
ConvergingAccumulator<DiVector> accumulator = new OneSidedConvergingDiVectorAccumulator(dimensions,
highIsCritical, precision, DEFAULT_APPROXIMATE_DYNAMIC_SCORE_MIN_VALUES_ACCEPTED, numberOfTrees);
Function<DiVector, DiVector> finisher = vector -> vector.scale(1.0 / accumulator.getValuesAccepted());
return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);
}
}
| 441 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/ComponentList.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest;
import java.util.ArrayList;
import java.util.Collection;
/**
* A ComponentList is an ArrayList specialized to contain IComponentModel
* instances. Executor classes operate on ComponentLists.
*
* @param <PointReference> The internal point representation expected by the
* component models in this list.
* @param <Point> The explicit data type of points being passed
*/
public class ComponentList<PointReference, Point> extends ArrayList<IComponentModel<PointReference, Point>> {
public ComponentList() {
super();
}
public ComponentList(Collection<? extends IComponentModel<PointReference, Point>> collection) {
super(collection);
}
public ComponentList(int initialCapacity) {
super(initialCapacity);
}
}
| 442 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/IVisitorFactory.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest;
import com.amazon.randomcutforest.tree.ITree;
@FunctionalInterface
public interface IVisitorFactory<R> {
Visitor<R> newVisitor(ITree<?, ?> tree, float[] point);
default R liftResult(ITree<?, ?> tree, R result) {
return result;
}
}
| 443 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/MultiVisitor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest;
import com.amazon.randomcutforest.tree.INodeView;
/**
* This is the interface for a visitor which can be used with
* {RandomCutTree::traversePathToLeafAndVisitNodesMulti}. In this traversal
* method, we optionally choose to split the visitor into two copies when
* visiting nodes. Each copy then visits one of the paths down from that node.
* The results from both visitors are combined before returning back up the
* tree.
*/
public interface MultiVisitor<R> extends Visitor<R> {
/**
* Returns true of the traversal method should split the visitor (i.e., create a
* copy) at this node.
*
* @param node A node in the tree traversal
* @return true if the traversal should split the visitor into two copies at
* this node, false otherwise.
*/
boolean trigger(final INodeView node);
/**
* Return a copy of this visitor. The original visitor plus the copy will each
* traverse one branch of the tree.
*
* @return a copy of this visitor
*/
MultiVisitor<R> newCopy();
/**
* Combine two visitors. The state of the argument visitor should be combined
* with the state of this instance. This method is called after both visitors
* have traversed one branch of the tree.
*
* @param other A second visitor
*/
void combine(MultiVisitor<R> other);
}
| 444 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/Visitor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest;
import com.amazon.randomcutforest.tree.INodeView;
/**
* This is the interface for a visitor which can be used to query a ITraversable
* to produce a result. A visitor is submitted to
* ITraversable#traverse(double[], Visitor), and during the traversal the
* {@link #acceptLeaf} and {@link #accept} methods are invoked on the nodes in
* the traversal path.
* <p>
* See ITraversable#traverse(double[], Visitor) for details about the traversal
* path.
*/
public interface Visitor<R> {
/**
* Visit a node in the traversal path.
*
* @param node the node being visited
* @param depthOfNode the depth of the node being visited
*/
void accept(INodeView node, int depthOfNode);
/**
* Visit the leaf node in the traversal path. By default, this method proxies to
* {@link #accept(INodeView, int)}.
*
* @param leafNode the leaf node being visited
* @param depthOfNode the depth of the leaf node
*/
default void acceptLeaf(INodeView leafNode, final int depthOfNode) {
accept(leafNode, depthOfNode);
}
/**
* At the end of the traversal, this method is called to obtain the result
* computed by the visitor.
*
* @return the result value computed by the visitor.
*/
R getResult();
/**
* This method short-circuits the evaluation of the Visitor at nodes on the
* traversal path. By default, the accept (or acceptLeaf) method will be invoked
* for each Node in the traversal path. But the NodeView has to prepare
* information to support that visitor invocation. Before invocation, the value
* of isConverged will be checked. If it is true, some of that preparation can
* be skipped -- because the visitor would not be updated. This method can be
* overwritten to optimize visitors that do not need to visit every node on the
* root to leaf path before returning a value.
*
* Mote that this convergence applies to a single visitor computation and is
* expected to be a speedup without any change in the value of the answer. This
* is different from converging accumulator which corresponds to sequential
* evaluation of different visitors and early stopping.
**/
default boolean isConverged() {
return false;
}
}
| 445 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/CommonUtils.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest;
import java.util.Objects;
import java.util.function.Supplier;
import com.amazon.randomcutforest.tree.IBoundingBoxView;
/**
* A collection of common utility functions.
*/
public class CommonUtils {
private CommonUtils() {
}
/**
* Throws an {@link IllegalArgumentException} with the specified message if the
* specified input is false.
*
* @param condition A condition to test.
* @param message The error message to include in the
* {@code IllegalArgumentException} if {@code condition} is
* false.
* @throws IllegalArgumentException if {@code condition} is false.
*/
public static void checkArgument(boolean condition, String message) {
if (!condition) {
throw new IllegalArgumentException(message);
}
}
// a lazy equivalent of the above, which avoids parameter evaluation
public static void checkArgument(boolean condition, Supplier<String> messageSupplier) {
if (!condition) {
throw new IllegalArgumentException(messageSupplier.get());
}
}
/**
* Throws an {@link IllegalStateException} with the specified message if the
* specified input is false.
*
* @param condition A condition to test.
* @param message The error message to include in the
* {@code IllegalStateException} if {@code condition} is false.
* @throws IllegalStateException if {@code condition} is false.
*/
public static void checkState(boolean condition, String message) {
if (!condition) {
throw new IllegalStateException(message);
}
}
/**
* Throws an {@link IllegalStateException} with the specified message if the
* specified input is false. This would eventually become asserts.
*
* @param condition A condition to test.
* @param message The error message to include in the
* {@code IllegalStateException} if {@code condition} is false.
* @throws IllegalStateException if {@code condition} is false.
*/
public static void validateInternalState(boolean condition, String message) {
if (!condition) {
throw new IllegalStateException(message);
}
}
/**
* Throws a {@link NullPointerException} with the specified message if the
* specified input is null.
*
* @param <T> An arbitrary type.
* @param object An object reference to test for nullity.
* @param message The error message to include in the
* {@code NullPointerException} if {@code object} is null.
* @return {@code object} if not null.
* @throws NullPointerException if the supplied object is null.
*/
public static <T> T checkNotNull(T object, String message) {
Objects.requireNonNull(object, message);
return object;
}
/**
* Compute the probability of separation for a bounding box adn a point. This
* method considers the bounding box created by merging the query point into the
* existing bounding box, and computes the probability that a random cut would
* separate the query point from the merged bounding box.
*
* @param boundingBox is the bounding box used in RandomCutTree
* @param queryPoint is the multidimensional point
* @return the probability of separation choosing a random cut
*/
public static double getProbabilityOfSeparation(final IBoundingBoxView boundingBox, float[] queryPoint) {
double sumOfNewRange = 0d;
double sumOfDifferenceInRange = 0d;
for (int i = 0; i < queryPoint.length; ++i) {
double maxVal = boundingBox.getMaxValue(i);
double minVal = boundingBox.getMinValue(i);
double oldRange = maxVal - minVal;
if (maxVal < queryPoint[i]) {
maxVal = queryPoint[i];
} else if (minVal > queryPoint[i]) {
minVal = queryPoint[i];
} else {
sumOfNewRange += oldRange;
continue;
}
double newRange = maxVal - minVal;
sumOfNewRange += newRange;
sumOfDifferenceInRange += (newRange - oldRange);
}
if (sumOfNewRange <= 0) {
return 0;
} else
return sumOfDifferenceInRange / sumOfNewRange;
}
/**
* The default anomaly scoring function for points that contained in a tree.
*
* @param depth The depth of the leaf node where this method is invoked
* @param mass The number of times the point has been seen before
* @return The score contribution from this previously-seen point
*/
public static double defaultScoreSeenFunction(double depth, double mass) {
return 1.0 / (depth + Math.log(mass + 1.0) / Math.log(2.0));
}
/**
* The default anomaly scoring function for points not already contained in a
* tree.
*
* @param depth The depth of the leaf node where this method is invoked
* @param mass The number of times the point has been seen before
* @return The score contribution from this point
*/
public static double defaultScoreUnseenFunction(double depth, double mass) {
return 1.0 / (depth + 1);
}
public static double defaultDampFunction(double leafMass, double treeMass) {
return 1.0 - leafMass / (2 * treeMass);
}
/**
* Some algorithms which return a scalar value need to scale that value by tree
* mass for consistency. This is the default method for computing the scale
* factor in these cases. The function has to be associative in its first
* argument (when the second is fixed) That is, fn (x1, y) + fn (x2, y) = fn (x1
* + x2, y)
*
* @param scalarValue The value being scaled
* @param mass The mass of the tree where this method is invoked
* @return The original value scaled appropriately for this tree
*/
public static double defaultScalarNormalizerFunction(double scalarValue, double mass) {
return scalarValue * Math.log(mass + 1) / Math.log(2.0);
}
/**
* The following function forms the core of RCFs, given a BoundingBox it
* produces the probability of cutting in different dimensions. While this
* function is absorbed in the logic of the different simpler scoring methods,
* the scoring methods that are mode advanced (for example, trying to simulate
* an Transductive Isolation Forest with streaming) require this function. A
* different function can be used to simulate via non-RCFs.
*
* @param boundingBox bounding box of a set of points
* @return array of probabilities of cutting in that specific dimension
*/
public static double[] defaultRCFgVecFunction(IBoundingBoxView boundingBox) {
double[] answer = new double[boundingBox.getDimensions()];
for (int i = 0; i < boundingBox.getDimensions(); ++i) {
double maxVal = boundingBox.getMaxValue(i);
double minVal = boundingBox.getMinValue(i);
double oldRange = maxVal - minVal;
if (oldRange > 0) {
answer[i] = oldRange;
}
}
return answer;
};
public static double[] toDoubleArray(float[] array) {
checkNotNull(array, "array must not be null");
double[] result = new double[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i];
}
return result;
}
public static float[] toFloatArray(double[] array) {
checkNotNull(array, "array must not be null");
float[] result = new float[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = (array[i] == 0) ? 0 : (float) array[i];
// eliminating -0.0 issues
}
return result;
}
public static int[] toIntArray(byte[] values) {
checkNotNull(values, "array must not be null");
int[] result = new int[values.length];
for (int i = 0; i < values.length; i++) {
result[i] = values[i] & 0xff;
}
return result;
}
public static int[] toIntArray(char[] values) {
checkNotNull(values, "array must not be null");
int[] result = new int[values.length];
for (int i = 0; i < values.length; i++) {
result[i] = values[i];
}
return result;
}
public static char[] toCharArray(int[] values) {
checkNotNull(values, "array must not be null");
char[] result = new char[values.length];
for (int i = 0; i < values.length; i++) {
result[i] = (char) values[i];
}
return result;
}
public static byte[] toByteArray(int[] values) {
checkNotNull(values, "array must not be null");
byte[] result = new byte[values.length];
for (int i = 0; i < values.length; i++) {
result[i] = (byte) values[i];
}
return result;
}
}
| 446 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/IComponentModel.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest;
import com.amazon.randomcutforest.config.IDynamicConfig;
import com.amazon.randomcutforest.executor.ITraversable;
import com.amazon.randomcutforest.executor.IUpdatable;
/**
*
* @param <PointReference> The internal point representation expected by the
* component models in this list.
* @param <Point> The explicit data type of points being passed
*/
public interface IComponentModel<PointReference, Point>
extends ITraversable, IUpdatable<PointReference>, IDynamicConfig {
}
| 447 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/IMultiVisitorFactory.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest;
import com.amazon.randomcutforest.tree.ITree;
@FunctionalInterface
public interface IMultiVisitorFactory<R> {
MultiVisitor<R> newVisitor(ITree<?, ?> tree, float[] point);
default R liftResult(ITree<?, ?> tree, R result) {
return result;
}
}
| 448 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/MultiVisitorFactory.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest;
import java.util.function.BiFunction;
import com.amazon.randomcutforest.tree.ITree;
/**
* This is the interface for a visitor which can be used with
* {RandomCutTree::traversePathToLeafAndVisitNodesMulti}. In this traversal
* method, we optionally choose to split the visitor into two copies when
* visiting nodes. Each copy then visits one of the paths down from that node.
* The results from both visitors are combined before returning back up the
* tree.
*/
public class MultiVisitorFactory<R> implements IMultiVisitorFactory<R> {
private final BiFunction<ITree<?, ?>, float[], MultiVisitor<R>> newVisitor;
private final BiFunction<ITree<?, ?>, R, R> liftResult;
public MultiVisitorFactory(BiFunction<ITree<?, ?>, float[], MultiVisitor<R>> newVisitor,
BiFunction<ITree<?, ?>, R, R> liftResult) {
this.newVisitor = newVisitor;
this.liftResult = liftResult;
}
public MultiVisitorFactory(BiFunction<ITree<?, ?>, float[], MultiVisitor<R>> newVisitor) {
this(newVisitor, (tree, x) -> x);
}
@Override
public MultiVisitor<R> newVisitor(ITree<?, ?> tree, float[] point) {
return newVisitor.apply(tree, point);
}
@Override
public R liftResult(ITree<?, ?> tree, R result) {
return liftResult.apply(tree, result);
}
}
| 449 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/tree/INodeView.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
import java.util.HashMap;
public interface INodeView {
boolean isLeaf();
int getMass();
IBoundingBoxView getBoundingBox();
IBoundingBoxView getSiblingBoundingBox(float[] point);
int getCutDimension();
double getCutValue();
float[] getLeafPoint();
default float[] getLiftedLeafPoint() {
return getLeafPoint();
};
/**
* for a leaf node, return the sequence indices corresponding leaf point. If
* this method is invoked on a non-leaf node then it throws an
* IllegalStateException.
*/
HashMap<Long, Integer> getSequenceIndexes();
/**
* provides the probability of separation vis-a-vis the bounding box at the node
*
* @param point input piint being evaluated
* @return the probability of separation
*/
double probailityOfSeparation(float[] point);
/**
* for a leaf node, return the index in the point store for the leaf point. If
* this method is invoked on a non-leaf node then it throws an
* IllegalStateException.
*/
int getLeafPointIndex();
}
| 450 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/tree/NodeStoreMedium.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.toCharArray;
import static com.amazon.randomcutforest.CommonUtils.toIntArray;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Stack;
import com.amazon.randomcutforest.store.IndexIntervalManager;
/**
* A fixed-size buffer for storing interior tree nodes. An interior node is
* defined by its location in the tree (parent and child nodes), its random cut,
* and its bounding box. The NodeStore class uses arrays to store these field
* values for a collection of nodes. An index in the store can be used to look
* up the field values for a particular node.
*
* The internal nodes (handled by this store) corresponds to [0..capacity]. The
* mass of the nodes is cyclic, i.e., mass % (capacity + 1) -- therefore, in
* presence of duplicates there would be nodes which are free, and they would
* have mass 0 == (capacity + 1). But those nodes would not be reachable by the
* code below.
*
*/
public class NodeStoreMedium extends AbstractNodeStore {
private final char[] parentIndex;
private final int[] leftIndex;
private final int[] rightIndex;
public final char[] cutDimension;
private final char[] mass;
public NodeStoreMedium(AbstractNodeStore.Builder builder) {
super(builder);
mass = new char[capacity];
Arrays.fill(mass, (char) 0);
if (builder.storeParent) {
parentIndex = new char[capacity];
Arrays.fill(parentIndex, (char) capacity);
} else {
parentIndex = null;
}
if (builder.leftIndex == null) {
leftIndex = new int[capacity];
rightIndex = new int[capacity];
cutDimension = new char[capacity];
Arrays.fill(leftIndex, capacity);
Arrays.fill(rightIndex, capacity);
} else {
leftIndex = Arrays.copyOf(builder.leftIndex, builder.leftIndex.length);
rightIndex = Arrays.copyOf(builder.rightIndex, builder.rightIndex.length);
cutDimension = toCharArray(builder.cutDimension);
BitSet bits = new BitSet(capacity);
if (builder.root != Null) {
bits.set(builder.root);
}
for (int i = 0; i < leftIndex.length; i++) {
if (isInternal(leftIndex[i])) {
bits.set(leftIndex[i]);
if (parentIndex != null) {
parentIndex[leftIndex[i]] = (char) i;
}
}
}
for (int i = 0; i < rightIndex.length; i++) {
if (isInternal(rightIndex[i])) {
bits.set(rightIndex[i]);
if (parentIndex != null) {
parentIndex[rightIndex[i]] = (char) i;
}
}
}
freeNodeManager = new IndexIntervalManager(capacity, capacity, bits);
// need to set up parents using the root
}
}
@Override
public int addNode(Stack<int[]> pathToRoot, float[] point, long sequenceIndex, int pointIndex, int childIndex,
int childMassIfLeaf, int cutDimension, float cutValue, BoundingBox box) {
int index = freeNodeManager.takeIndex();
this.cutValue[index] = cutValue;
this.cutDimension[index] = (char) cutDimension;
if (leftOf(cutValue, cutDimension, point)) {
this.leftIndex[index] = (pointIndex + capacity + 1);
this.rightIndex[index] = childIndex;
} else {
this.rightIndex[index] = (pointIndex + capacity + 1);
this.leftIndex[index] = childIndex;
}
this.mass[index] = (char) ((((childMassIfLeaf > 0) ? childMassIfLeaf : getMass(childIndex)) + 1)
% (capacity + 1));
int parentIndex = (pathToRoot.size() == 0) ? Null : pathToRoot.lastElement()[0];
if (this.parentIndex != null) {
this.parentIndex[index] = (char) parentIndex;
if (!isLeaf(childIndex)) {
this.parentIndex[childIndex] = (char) (index);
}
}
if (parentIndex != Null) {
spliceEdge(parentIndex, childIndex, index);
}
return index;
}
@Override
public void assignInPartialTree(int node, float[] point, int childReference) {
if (leftOf(node, point)) {
leftIndex[node] = childReference;
} else {
rightIndex[node] = childReference;
}
}
public int getLeftIndex(int index) {
return leftIndex[index];
}
public int getRightIndex(int index) {
return rightIndex[index];
}
public int getParentIndex(int index) {
checkArgument(parentIndex != null, "incorrect call");
return parentIndex[index];
}
public void setRoot(int index) {
if (!isLeaf(index) && parentIndex != null) {
parentIndex[index] = (char) capacity;
}
}
@Override
protected void decreaseMassOfInternalNode(int node) {
mass[node] = (char) ((mass[node] + capacity) % (capacity + 1)); // this cannot get to 0
}
@Override
protected void increaseMassOfInternalNode(int node) {
mass[node] = (char) ((mass[node] + 1) % (capacity + 1));
// mass of root == 0; note capacity = number_of_leaves - 1
}
public void deleteInternalNode(int index) {
leftIndex[index] = capacity;
rightIndex[index] = capacity;
if (parentIndex != null) {
parentIndex[index] = (char) capacity;
}
freeNodeManager.releaseIndex(index);
}
public int getMass(int index) {
return mass[index] != 0 ? mass[index] : (capacity + 1);
}
public void spliceEdge(int parent, int node, int newNode) {
assert (!isLeaf(newNode));
if (node == leftIndex[parent]) {
leftIndex[parent] = newNode;
} else {
rightIndex[parent] = newNode;
}
if (parentIndex != null && isInternal(node)) {
parentIndex[node] = (char) newNode;
}
}
public void replaceParentBySibling(int grandParent, int parent, int node) {
int sibling = getSibling(node, parent);
if (parent == leftIndex[grandParent]) {
leftIndex[grandParent] = sibling;
} else {
rightIndex[grandParent] = sibling;
}
if (parentIndex != null && isInternal(sibling)) {
parentIndex[sibling] = (char) grandParent;
}
}
public int getCutDimension(int index) {
return cutDimension[index];
}
public int[] getCutDimension() {
return toIntArray(cutDimension);
}
public int[] getLeftIndex() {
return Arrays.copyOf(leftIndex, leftIndex.length);
}
public int[] getRightIndex() {
return Arrays.copyOf(rightIndex, rightIndex.length);
}
}
| 451 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/tree/Cut.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
/**
* A Cut represents a division of space into two half-spaces. Cuts are used to
* define the tree structure in {@link RandomCutTree}, and they determine the
* standard tree traversal path defined in {@link RandomCutTree#traverse}.
*/
public class Cut {
private final int dimension;
private final double value;
/**
* Create a new Cut with the given dimension and value.
*
* @param dimension The 0-based index of the dimension that the cut is made in.
* @param value The spatial value of the cut.
*/
public Cut(int dimension, double value) {
this.dimension = dimension;
this.value = value;
}
/**
* For the given point, this method compares the value of that point in the cut
* dimension to the cut value. If the point's value in the cut dimension is less
* than or equal to the cut value this method returns true, otherwise it returns
* false. The name of this method is a mnemonic: if we are working in a
* one-dimensional space, then this method will return 'true' if the point value
* is to the left of the cut value on the standard number line.
*
* @param point A point that we are testing in relation to the cut
* @param cut A Cut instance.
* @return true if the value of the point coordinate corresponding to the cut
* dimension is less than or equal to the cut value, false otherwise.
*/
public static boolean isLeftOf(double[] point, Cut cut) {
return point[cut.getDimension()] <= cut.getValue();
}
/**
* Return the index of the dimension that this cut was made in.
*
* @return the 0-based index of the dimension that this cut was made in.
*/
public int getDimension() {
return dimension;
}
/**
* Return the value of the cut. This value separates space into two half-spaces:
* the set of points whose coordinate in the cut dimension is less than the cut
* value, and the set of points whose coordinate in the cut dimension is greater
* than the cut value.
*
* @return the value of the cut.
*/
public double getValue() {
return value;
}
@Override
public String toString() {
return String.format("Cut(%d, %f)", dimension, value);
}
}
| 452 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/tree/BoundingBox.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.checkState;
import java.util.Arrays;
/**
* A single precision implementation of AbstractBoundingBox which also satisfies
* the interface for Visitor classes
*/
public class BoundingBox implements IBoundingBoxView {
/**
* An array containing the minimum value corresponding to each dimension.
*/
protected final float[] minValues;
/**
* An array containing the maximum value corresponding to each dimensions
*/
protected final float[] maxValues;
/**
* The sum of side lengths defined by this bounding box.
*/
protected double rangeSum;
public BoundingBox(float[] point) {
minValues = maxValues = point;
// a copy in not needed because mergedBox would create a copy
// addPoint, addBox would also create copies
rangeSum = 0.0;
}
/**
* Create a new BoundingBox with the given minimum values and maximum values.
*
* @param minValues The minimum values for each coordinate.
* @param maxValues The maximum values for each coordinate
*/
public BoundingBox(final float[] minValues, final float[] maxValues, double sum) {
this.minValues = minValues;
this.maxValues = maxValues;
rangeSum = sum;
}
public BoundingBox(final float[] first, final float[] second) {
checkArgument(first.length == second.length, " incorrect lengths in box");
minValues = new float[first.length];
maxValues = new float[first.length];
rangeSum = 0;
for (int i = 0; i < minValues.length; ++i) {
minValues[i] = Math.min(first[i], second[i]);
maxValues[i] = Math.max(first[i], second[i]);
rangeSum += maxValues[i] - minValues[i];
}
}
public BoundingBox copy() {
return new BoundingBox(Arrays.copyOf(minValues, minValues.length), Arrays.copyOf(maxValues, maxValues.length),
rangeSum);
}
public BoundingBox getMergedBox(IBoundingBoxView otherBox) {
float[] minValuesMerged = new float[minValues.length];
float[] maxValuesMerged = new float[minValues.length];
double sum = 0.0;
for (int i = 0; i < minValues.length; ++i) {
minValuesMerged[i] = Math.min(minValues[i], (float) otherBox.getMinValue(i));
maxValuesMerged[i] = Math.max(maxValues[i], (float) otherBox.getMaxValue(i));
sum += maxValuesMerged[i] - minValuesMerged[i];
}
return new BoundingBox(minValuesMerged, maxValuesMerged, sum);
}
public double probabilityOfCut(float[] point) {
double range = 0;
for (int i = 0; i < point.length; i++) {
range += Math.max(minValues[i] - point[i], 0);
}
for (int i = 0; i < point.length; i++) {
range += Math.max(point[i] - maxValues[i], 0);
}
if (range == 0) {
return 0;
} else if (rangeSum == 0) {
return 1;
} else {
return range / (range + rangeSum);
}
}
public BoundingBox getMergedBox(float[] point) {
checkArgument(point.length == minValues.length, "incorrect length");
return copy().addPoint(point);
}
public float[] getMaxValues() {
return maxValues;
}
public float[] getMinValues() {
return minValues;
}
public BoundingBox addPoint(float[] point) {
checkArgument(minValues.length == point.length, "incorrect length");
checkArgument(minValues != maxValues, "not a mutable box");
rangeSum = 0;
for (int i = 0; i < point.length; ++i) {
minValues[i] = Math.min(minValues[i], point[i]);
}
for (int i = 0; i < point.length; ++i) {
maxValues[i] = Math.max(maxValues[i], point[i]);
}
for (int i = 0; i < point.length; ++i) {
rangeSum += maxValues[i] - minValues[i];
}
return this;
}
public BoundingBox addBox(BoundingBox otherBox) {
checkState(minValues != maxValues, "not a mutable box");
rangeSum = 0;
for (int i = 0; i < minValues.length; ++i) {
minValues[i] = Math.min(minValues[i], otherBox.minValues[i]);
}
for (int i = 0; i < minValues.length; ++i) {
maxValues[i] = Math.max(maxValues[i], otherBox.maxValues[i]);
}
for (int i = 0; i < minValues.length; ++i) {
rangeSum += maxValues[i] - minValues[i];
}
return this;
}
public int getDimensions() {
return minValues.length;
}
/**
* @return the sum of side lengths for this BoundingBox.
*/
public double getRangeSum() {
return rangeSum;
}
/**
* Gets the max value of the specified dimension.
*
* @param dimension the dimension for which we need the max value
* @return the max value of the specified dimension
*/
public double getMaxValue(final int dimension) {
return maxValues[dimension];
}
/**
* Gets the min value of the specified dimension.
*
* @param dimension the dimension for which we need the min value
* @return the min value of the specified dimension
*/
public double getMinValue(final int dimension) {
return minValues[dimension];
}
/**
* Returns true if the given point is contained in this bounding box. This is
* equivalent to the point being a member of the set defined by this bounding
* box.
*
* @param point with which we're performing the comparison
* @return whether the point is contained by the bounding box
*/
public boolean contains(float[] point) {
checkArgument(point.length == minValues.length, " incorrect lengths");
for (int i = 0; i < minValues.length; i++) {
if (minValues[i] > point[i] || maxValues[i] < point[i]) {
return false;
}
}
return true;
}
public boolean contains(BoundingBox otherBox) {
checkArgument(otherBox.minValues.length == minValues.length, " incorrect lengths");
return contains(otherBox.minValues) && contains(otherBox.maxValues);
}
public double getRange(final int dimension) {
return maxValues[dimension] - minValues[dimension];
}
@Override
public String toString() {
return String.format("BoundingBox(%s, %s)", Arrays.toString(minValues), Arrays.toString(maxValues));
}
/**
* Two bounding boxes are considered equal if they have the same dimensions and
* all their min values and max values are the same. Min and max values are
* compared as primitive doubles using ==, so two bounding boxes are not equal
* if their min and max values are merely very close.
*
* @param other An object to test for equality
* @return true if other is a bounding box with the same min and max values
*/
@Override
public boolean equals(Object other) {
if (!(other instanceof BoundingBox)) {
return false;
}
BoundingBox otherBox = (BoundingBox) other;
return Arrays.equals(minValues, otherBox.minValues) && Arrays.equals(maxValues, otherBox.maxValues);
}
}
| 453 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/tree/ITree.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
import com.amazon.randomcutforest.config.IDynamicConfig;
import com.amazon.randomcutforest.executor.ITraversable;
/**
* A tree that can potentially interact with a coordinator
*
* @param <PointReference> The internal point representation expected by the
* component models in this list.
* @param <Point> The explicit data type of points being passed
*/
public interface ITree<PointReference, Point> extends ITraversable, IDynamicConfig {
int getMass();
float[] projectToTree(float[] point);
float[] liftFromTree(float[] result);
double[] liftFromTree(double[] result);
int[] projectMissingIndices(int[] list);
PointReference addPoint(PointReference point, long sequenceIndex);
void addPointToPartialTree(PointReference point, long sequenceIndex);
void validateAndReconstruct();
PointReference deletePoint(PointReference point, long sequenceIndex);
default long getRandomSeed() {
return 0L;
}
}
| 454 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/tree/HyperTree.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.tree.AbstractNodeStore.Null;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.function.Function;
public class HyperTree extends RandomCutTree {
private final Function<IBoundingBoxView, double[]> gVecBuild;
public Function<IBoundingBoxView, double[]> getgVec() {
return gVecBuild;
}
public static Builder builder() {
return new Builder();
}
protected HyperTree(HyperTree.Builder builder) {
super(builder);
this.gVecBuild = builder.gVec;
}
public void makeTree(List<Integer> list, int seed) {
// this function allows a public call, which may be useful someday
if (list.size() > 0 && list.size() < numberOfLeaves + 1) {
int[] leftIndex = new int[numberOfLeaves - 1];
int[] rightIndex = new int[numberOfLeaves - 1];
Arrays.fill(leftIndex, numberOfLeaves - 1);
Arrays.fill(rightIndex, numberOfLeaves - 1);
int[] cutDimension = new int[numberOfLeaves - 1];
float[] cutValue = new float[numberOfLeaves - 1];
root = makeTreeInt(list, seed, 0, this.gVecBuild, leftIndex, rightIndex, cutDimension, cutValue);
nodeStore = AbstractNodeStore.builder().dimension(dimension).capacity(numberOfLeaves - 1)
.leftIndex(leftIndex).rightIndex(rightIndex).cutDimension(cutDimension).cutValues(cutValue).build();
// the cuts are specififed; now build tree
for (int i = 0; i < list.size(); i++) {
addPointToPartialTree(list.get(i), 0L);
}
} else {
root = Null;
}
}
private int makeTreeInt(List<Integer> pointList, int seed, int firstFree,
Function<IBoundingBoxView, double[]> vecBuild, int[] left, int[] right, int[] cutDimension,
float[] cutValue) {
if (pointList.size() == 0)
return Null;
BoundingBox thisBox = new BoundingBox(pointStoreView.getNumericVector(pointList.get(0)));
for (int i = 1; i < pointList.size(); i++) {
thisBox = (BoundingBox) thisBox.getMergedBox(pointStoreView.getNumericVector(pointList.get(i)));
}
if (thisBox.getRangeSum() <= 0) {
return pointList.get(0) + nodeStore.getCapacity() + 1;
}
Random ring = new Random(seed);
int leftSeed = ring.nextInt();
int rightSeed = ring.nextInt();
Cut cut = getCut(thisBox, ring, vecBuild);
List<Integer> leftList = new ArrayList<>();
List<Integer> rightList = new ArrayList<>();
for (int j = 0; j < pointList.size(); j++) {
if (nodeStore.leftOf((float) cut.getValue(), cut.getDimension(),
pointStoreView.getNumericVector(pointList.get(j)))) {
leftList.add(pointList.get(j));
} else
rightList.add(pointList.get(j));
}
int leftIndex = makeTreeInt(leftList, leftSeed, firstFree + 1, vecBuild, left, right, cutDimension, cutValue);
int rightIndex = makeTreeInt(rightList, rightSeed, firstFree + leftList.size(), vecBuild, left, right,
cutDimension, cutValue);
left[firstFree] = Math.min(leftIndex, numberOfLeaves - 1);
right[firstFree] = Math.min(rightIndex, numberOfLeaves - 1);
cutDimension[firstFree] = cut.getDimension();
cutValue[firstFree] = (float) cut.getValue();
return firstFree;
}
private Cut getCut(IBoundingBoxView bb, Random ring, Function<IBoundingBoxView, double[]> vecSeparation) {
Random rng = new Random(ring.nextInt());
double cutf = rng.nextDouble();
double dimf = rng.nextDouble();
int td = -1;
double rangeSum = 0;
double[] vector = vecSeparation.apply(bb);
for (int i = 0; i < bb.getDimensions(); i++) {
vector[i] = (float) vector[i];
rangeSum += vector[i];
}
double breakPoint = dimf * rangeSum;
float cutValue = 0;
for (int i = 0; i < bb.getDimensions(); i++) {
double range = vector[i];
if (range > 0) {
if ((breakPoint > 0) && (breakPoint <= range)) {
td = i;
cutValue = (float) (bb.getMinValue(td) + bb.getRange(td) * cutf);
if (cutValue == bb.getMaxValue(td)) {
cutValue = (float) bb.getMinValue(td);
}
}
breakPoint -= range;
}
}
checkArgument(td != -1, "Pivot selection failed.");
return new Cut(td, cutValue);
}
public static class Builder extends RandomCutTree.Builder<Builder> {
private Function<IBoundingBoxView, double[]> gVec;
public Builder buildGVec(Function<IBoundingBoxView, double[]> gVec) {
this.gVec = gVec;
return this;
}
public HyperTree build() {
return new HyperTree(this);
}
}
}
| 455 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/tree/NodeView.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
import static com.amazon.randomcutforest.CommonUtils.checkState;
import java.util.HashMap;
import com.amazon.randomcutforest.store.IPointStoreView;
public class NodeView implements INodeView {
public static double SWITCH_FRACTION = 0.499;
RandomCutTree tree;
int currentNodeOffset;
float[] leafPoint;
BoundingBox currentBox;
public NodeView(RandomCutTree tree, IPointStoreView<float[]> pointStoreView, int root) {
this.currentNodeOffset = root;
this.tree = tree;
}
public int getMass() {
return tree.getMass(currentNodeOffset);
}
public IBoundingBoxView getBoundingBox() {
if (currentBox == null) {
return tree.getBox(currentNodeOffset);
}
return currentBox;
}
public IBoundingBoxView getSiblingBoundingBox(float[] point) {
return (toLeft(point)) ? tree.getBox(tree.nodeStore.getRightIndex(currentNodeOffset))
: tree.getBox(tree.nodeStore.getLeftIndex(currentNodeOffset));
}
public int getCutDimension() {
return tree.nodeStore.getCutDimension(currentNodeOffset);
}
@Override
public double getCutValue() {
return tree.nodeStore.getCutValue(currentNodeOffset);
}
public float[] getLeafPoint() {
return leafPoint;
}
public HashMap<Long, Integer> getSequenceIndexes() {
checkState(isLeaf(), "can only be invoked for a leaf");
if (tree.storeSequenceIndexesEnabled) {
return tree.getSequenceMap(tree.getPointIndex(currentNodeOffset));
} else {
return new HashMap<>();
}
}
@Override
public double probailityOfSeparation(float[] point) {
return tree.probabilityOfCut(currentNodeOffset, point, currentBox);
}
@Override
public int getLeafPointIndex() {
return tree.getPointIndex(currentNodeOffset);
}
public boolean isLeaf() {
return tree.nodeStore.isLeaf(currentNodeOffset);
}
protected void setCurrentNode(int newNode, int index, boolean setBox) {
currentNodeOffset = newNode;
leafPoint = tree.pointStoreView.getNumericVector(index);
if (setBox && tree.boundingBoxCacheFraction < SWITCH_FRACTION) {
currentBox = new BoundingBox(leafPoint, leafPoint);
}
}
protected void setCurrentNodeOnly(int newNode) {
currentNodeOffset = newNode;
}
public void updateToParent(int parent, int currentSibling, boolean updateBox) {
currentNodeOffset = parent;
if (updateBox && tree.boundingBoxCacheFraction < SWITCH_FRACTION) {
tree.growNodeBox(currentBox, tree.pointStoreView, parent, currentSibling);
}
}
// this function exists for matching the behavior of RCF2.0 and will be replaced
// this function explicitly uses the encoding of the new nodestore
protected boolean toLeft(float[] point) {
return point[tree.nodeStore.getCutDimension(currentNodeOffset)] <= tree.nodeStore
.getCutValue(currentNodeOffset);
}
}
| 456 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/tree/IBoundingBoxView.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
public interface IBoundingBoxView {
double getRangeSum();
int getDimensions();
double getRange(int i);
double getMinValue(int i);
double getMaxValue(int i);
// duplicates
IBoundingBoxView copy();
// below keeps the older box unchanged
IBoundingBoxView getMergedBox(float[] point);
// merges and keeps the older box unchaged
IBoundingBoxView getMergedBox(IBoundingBoxView otherBox);
}
| 457 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/tree/AbstractNodeStore.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.Stack;
import com.amazon.randomcutforest.store.IndexIntervalManager;
/**
* A fixed-size buffer for storing interior tree nodes. An interior node is
* defined by its location in the tree (parent and child nodes), its random cut,
* and its bounding box. The NodeStore class uses arrays to store these field
* values for a collection of nodes. An index in the store can be used to look
* up the field values for a particular node.
*
* The internal nodes (handled by this store) corresponds to
* [0..upperRangeLimit]
*
* If we think of an array of Node objects as being row-oriented (where each row
* is a Node), then this class is analogous to a column-oriented database of
* Nodes.
*
*/
public abstract class AbstractNodeStore {
public static int Null = -1;
public static boolean DEFAULT_STORE_PARENT = false;
/**
* the number of internal nodes; the nodes will range from 0..capacity-1 the
* value capacity would correspond to "not yet set" the values Y= capacity+1+X
* correspond to pointstore index X note that capacity + 1 + X =
* number_of_leaves + X
*/
protected final int capacity;
protected final float[] cutValue;
protected IndexIntervalManager freeNodeManager;
public AbstractNodeStore(AbstractNodeStore.Builder<?> builder) {
this.capacity = builder.capacity;
if ((builder.leftIndex == null)) {
freeNodeManager = new IndexIntervalManager(capacity);
}
cutValue = (builder.cutValues != null) ? builder.cutValues : new float[capacity];
}
protected abstract int addNode(Stack<int[]> pathToRoot, float[] point, long sendex, int pointIndex, int childIndex,
int childMassIfLeaf, int cutDimension, float cutValue, BoundingBox box);
public boolean isLeaf(int index) {
return index > capacity;
}
public boolean isInternal(int index) {
return index < capacity && index >= 0;
}
public abstract void assignInPartialTree(int savedParent, float[] point, int childReference);
public abstract int getLeftIndex(int index);
public abstract int getRightIndex(int index);
public abstract int getParentIndex(int index);
public abstract void setRoot(int index);
protected abstract void decreaseMassOfInternalNode(int node);
protected abstract void increaseMassOfInternalNode(int node);
protected void manageInternalNodesPartial(Stack<int[]> path) {
while (!path.isEmpty()) {
int index = path.pop()[0];
increaseMassOfInternalNode(index);
}
}
public Stack<int[]> getPath(int root, float[] point, boolean verbose) {
int node = root;
Stack<int[]> answer = new Stack<>();
answer.push(new int[] { root, capacity });
while (isInternal(node)) {
double y = getCutValue(node);
if (leftOf(node, point)) {
answer.push(new int[] { getLeftIndex(node), getRightIndex(node) });
node = getLeftIndex(node);
} else { // this would push potential Null, of node == capacity
// that would be used for tree reconstruction
answer.push(new int[] { getRightIndex(node), getLeftIndex(node) });
node = getRightIndex(node);
}
}
return answer;
}
public abstract void deleteInternalNode(int index);
public abstract int getMass(int index);
protected boolean leftOf(float cutValue, int cutDimension, float[] point) {
return point[cutDimension] <= cutValue;
}
public boolean leftOf(int node, float[] point) {
int cutDimension = getCutDimension(node);
return leftOf(cutValue[node], cutDimension, point);
}
public int getSibling(int node, int parent) {
int sibling = getLeftIndex(parent);
if (node == sibling) {
sibling = getRightIndex(parent);
}
return sibling;
}
public abstract void spliceEdge(int parent, int node, int newNode);
public abstract void replaceParentBySibling(int grandParent, int parent, int node);
public abstract int getCutDimension(int index);
public double getCutValue(int index) {
return cutValue[index];
}
protected boolean toLeft(float[] point, int currentNodeOffset) {
return point[getCutDimension(currentNodeOffset)] <= cutValue[currentNodeOffset];
}
public abstract int[] getCutDimension();
public abstract int[] getRightIndex();
public abstract int[] getLeftIndex();
public float[] getCutValues() {
return cutValue;
}
public int getCapacity() {
return capacity;
}
public int size() {
return capacity - freeNodeManager.size();
}
/**
* a builder
*/
public static class Builder<T extends Builder<T>> {
protected int capacity;
protected int[] leftIndex;
protected int[] rightIndex;
protected int[] cutDimension;
protected float[] cutValues;
protected boolean storeParent = DEFAULT_STORE_PARENT;
protected int dimension;
protected int root;
// maximum number of points in the store
public T capacity(int capacity) {
this.capacity = capacity;
return (T) this;
}
public T dimension(int dimension) {
this.dimension = dimension;
return (T) this;
}
public T useRoot(int root) {
this.root = root;
return (T) this;
}
public T leftIndex(int[] leftIndex) {
this.leftIndex = leftIndex;
return (T) this;
}
public T rightIndex(int[] rightIndex) {
this.rightIndex = rightIndex;
return (T) this;
}
public T cutDimension(int[] cutDimension) {
this.cutDimension = cutDimension;
return (T) this;
}
public T cutValues(float[] cutValues) {
this.cutValues = cutValues;
return (T) this;
}
public T storeParent(boolean storeParent) {
this.storeParent = storeParent;
return (T) this;
}
public AbstractNodeStore build() {
if (leftIndex == null) {
checkArgument(rightIndex == null, " incorrect option of right indices");
checkArgument(cutValues == null, "incorrect option of cut values");
checkArgument(cutDimension == null, " incorrect option of cut dimensions");
} else {
checkArgument(rightIndex.length == capacity, " incorrect length of right indices");
checkArgument(cutValues.length == capacity, "incorrect length of cut values");
checkArgument(cutDimension.length == capacity, " incorrect length of cut dimensions");
}
// capacity is numbner of internal nodes
if (capacity < 256 && dimension <= 256) {
return new NodeStoreSmall(this);
} else if (capacity < Character.MAX_VALUE && dimension <= Character.MAX_VALUE) {
return new NodeStoreMedium(this);
} else {
return new NodeStoreLarge(this);
}
}
}
public static Builder builder() {
return new Builder();
}
}
| 458 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/tree/RandomCutTree.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.checkNotNull;
import static com.amazon.randomcutforest.CommonUtils.checkState;
import static com.amazon.randomcutforest.tree.AbstractNodeStore.DEFAULT_STORE_PARENT;
import static com.amazon.randomcutforest.tree.AbstractNodeStore.Null;
import static java.lang.Math.max;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Optional;
import java.util.Random;
import java.util.Stack;
import com.amazon.randomcutforest.IMultiVisitorFactory;
import com.amazon.randomcutforest.IVisitorFactory;
import com.amazon.randomcutforest.MultiVisitor;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.Visitor;
import com.amazon.randomcutforest.config.Config;
import com.amazon.randomcutforest.store.IPointStoreView;
/**
* A Compact Random Cut Tree is a tree data structure whose leaves represent
* points inserted into the tree and whose interior nodes represent regions of
* space defined by Bounding Boxes and Cuts. New nodes and leaves are added to
* the tree by making random cuts.
*
* The offsets are encoded as follows: an offset greater or equal maxSize
* corresponds to a leaf node of offset (offset - maxSize) otherwise the offset
* corresponds to an internal node
*
* The main use of this class is to be updated with points sampled from a
* stream, and to define traversal methods. Users can then implement a
* {@link Visitor} which can be submitted to a traversal method in order to
* compute a statistic from the tree.
*/
public class RandomCutTree implements ITree<Integer, float[]> {
/**
* The index value used to represent the absence of a node. For example, when
* the tree is created the root node index will be NULL. After a point is added
* and a root node is created, the root node's parent will be NULL, and so on.
*/
private Random testRandom;
protected boolean storeSequenceIndexesEnabled;
protected boolean centerOfMassEnabled;
private long randomSeed;
protected int root;
protected IPointStoreView<float[]> pointStoreView;
protected int numberOfLeaves;
protected AbstractNodeStore nodeStore;
protected double boundingBoxCacheFraction;
protected int outputAfter;
protected int dimension;
protected final HashMap<Integer, Integer> leafMass;
protected double[] rangeSumData;
protected float[] boundingBoxData;
protected float[] pointSum;
protected HashMap<Integer, List<Long>> sequenceMap;
protected RandomCutTree(Builder<?> builder) {
pointStoreView = builder.pointStoreView;
numberOfLeaves = builder.capacity;
randomSeed = builder.randomSeed;
testRandom = builder.random;
outputAfter = builder.outputAfter.orElse(max(1, numberOfLeaves / 4));
dimension = (builder.dimension != 0) ? builder.dimension : pointStoreView.getDimensions();
nodeStore = (builder.nodeStore != null) ? builder.nodeStore
: AbstractNodeStore.builder().capacity(numberOfLeaves - 1).storeParent(builder.storeParent)
.dimension(dimension).build();
this.boundingBoxCacheFraction = builder.boundingBoxCacheFraction;
this.storeSequenceIndexesEnabled = builder.storeSequenceIndexesEnabled;
this.centerOfMassEnabled = builder.centerOfMassEnabled;
this.root = builder.root;
leafMass = new HashMap<>();
int cache_limit = (int) Math.floor(boundingBoxCacheFraction * (numberOfLeaves - 1));
rangeSumData = new double[cache_limit];
boundingBoxData = new float[2 * dimension * cache_limit];
if (this.centerOfMassEnabled) {
pointSum = new float[(numberOfLeaves - 1) * dimension];
}
if (this.storeSequenceIndexesEnabled) {
sequenceMap = new HashMap<>();
}
}
@Override
public <T> void setConfig(String name, T value, Class<T> clazz) {
if (Config.BOUNDING_BOX_CACHE_FRACTION.equals(name)) {
checkArgument(Double.class.isAssignableFrom(clazz),
() -> String.format("Setting '%s' must be a double value", name));
setBoundingBoxCacheFraction((Double) value);
} else {
throw new IllegalArgumentException("Unsupported configuration setting: " + name);
}
}
@Override
public <T> T getConfig(String name, Class<T> clazz) {
checkNotNull(clazz, "clazz must not be null");
if (Config.BOUNDING_BOX_CACHE_FRACTION.equals(name)) {
checkArgument(clazz.isAssignableFrom(Double.class),
() -> String.format("Setting '%s' must be a double value", name));
return clazz.cast(boundingBoxCacheFraction);
} else {
throw new IllegalArgumentException("Unsupported configuration setting: " + name);
}
}
// dynamically change the fraction of the new nodes which caches their bounding
// boxes
// 0 would mean less space usage, but slower throughput
// 1 would imply larger space but better throughput
public void setBoundingBoxCacheFraction(double fraction) {
checkArgument(0 <= fraction && fraction <= 1, "incorrect parameter");
boundingBoxCacheFraction = fraction;
resizeCache(fraction);
}
/**
* Return a new {@link Cut}, which is chosen uniformly over the space of
* possible cuts for a bounding box and its union with a point. The cut must
* exist unless the union box is a single point. There are floating point issues
* -- even though the original values are in float anf the calculations are in
* double, which can show up with large number of dimensions (each trigerring an
* addition/substraction).
*
* @param factor A random cut
* @param point the point whose union is taken with the box
* @param box A bounding box that we want to find a random cut for.
* @return A new Cut corresponding to a random cut in the bounding box.
*/
protected Cut randomCut(double factor, float[] point, BoundingBox box) {
double range = 0.0;
for (int i = 0; i < point.length; i++) {
float minValue = (float) box.getMinValue(i);
float maxValue = (float) box.getMaxValue(i);
if (point[i] < minValue) {
minValue = point[i];
} else if (point[i] > maxValue) {
maxValue = point[i];
}
range += maxValue - minValue;
}
checkArgument(range > 0, () -> " the union is a single point " + Arrays.toString(point)
+ "or the box is inappropriate, box" + box.toString() + "factor =" + factor);
double breakPoint = factor * range;
for (int i = 0; i < box.getDimensions(); i++) {
float minValue = (float) box.getMinValue(i);
float maxValue = (float) box.getMaxValue(i);
if (point[i] < minValue) {
minValue = point[i];
} else if (point[i] > maxValue) {
maxValue = point[i];
}
double gap = maxValue - minValue;
if (breakPoint <= gap && gap > 0) {
float cutValue = (float) (minValue + breakPoint);
// Random cuts have to take a value in the half-open interval [minValue,
// maxValue) to ensure that a
// Node has a valid left child and right child.
if (cutValue >= maxValue) {
cutValue = Math.nextAfter((float) maxValue, minValue);
}
return new Cut(i, cutValue);
}
breakPoint -= gap;
}
// if we are here then factor is likely almost 1 and we have floating point
// issues
// we will randomize between the first and the last non-zero ranges and choose
// the
// same cutValue as using nextAfter above -- we will use the factor as a seed
// and
// not be optimizing this sequel (either in execution or code) to ensure easier
// debugging
// this should be an anomaly - no pun intended.
Random rng = new Random((long) (factor * Long.MAX_VALUE / 2));
if (rng.nextDouble() < 0.5) {
for (int i = 0; i < box.getDimensions(); i++) {
float minValue = (float) box.getMinValue(i);
float maxValue = (float) box.getMaxValue(i);
if (point[i] < minValue) {
minValue = point[i];
} else if (point[i] > maxValue) {
maxValue = point[i];
}
if (maxValue > minValue) {
double cutValue = Math.nextAfter((float) maxValue, minValue);
return new Cut(i, cutValue);
}
}
} else {
for (int i = box.getDimensions() - 1; i >= 0; i--) {
float minValue = (float) box.getMinValue(i);
float maxValue = (float) box.getMaxValue(i);
if (point[i] < minValue) {
minValue = point[i];
} else if (point[i] > maxValue) {
maxValue = point[i];
}
if (maxValue > minValue) {
double cutValue = Math.nextAfter((float) maxValue, minValue);
return new Cut(i, cutValue);
}
}
}
throw new IllegalStateException("The break point did not lie inside the expected range; factor " + factor
+ ", point " + Arrays.toString(point) + " box " + box.toString());
}
/**
* the following function adds a point to the tree
*
* @param pointIndex the number corresponding to the point
* @param sequenceIndex sequence index of the point
* @return the value of the point index where the point was added; this is
* pointIndex if there are no duplicates; otherwise it is the value of
* the point being duplicated.
*/
public Integer addPoint(Integer pointIndex, long sequenceIndex) {
if (root == Null) {
root = convertToLeaf(pointIndex);
addLeaf(pointIndex, sequenceIndex);
return pointIndex;
} else {
float[] point = projectToTree(pointStoreView.getNumericVector(pointIndex));
checkArgument(point.length == dimension, () -> " mismatch in dimensions for " + pointIndex);
Stack<int[]> pathToRoot = nodeStore.getPath(root, point, false);
int[] first = pathToRoot.pop();
int leafNode = first[0];
int savedParent = (pathToRoot.size() == 0) ? Null : pathToRoot.lastElement()[0];
int leafSavedSibling = first[1];
int sibling = leafSavedSibling;
int leafPointIndex = getPointIndex(leafNode);
float[] oldPoint = projectToTree(pointStoreView.getNumericVector(leafPointIndex));
checkArgument(oldPoint.length == dimension, () -> " mismatch in dimensions for " + pointIndex);
Stack<int[]> parentPath = new Stack<>();
if (Arrays.equals(point, oldPoint)) {
increaseLeafMass(leafNode);
manageAncestorsAdd(pathToRoot, point);
addLeaf(leafPointIndex, sequenceIndex);
return leafPointIndex;
} else {
int node = leafNode;
int savedNode = node;
int parent = savedParent;
float savedCutValue = (float) 0.0;
BoundingBox currentBox = new BoundingBox(oldPoint, oldPoint);
BoundingBox savedBox = currentBox.copy();
int savedDim = Integer.MAX_VALUE;
Random rng;
if (testRandom == null) {
rng = new Random(randomSeed);
randomSeed = rng.nextLong();
} else {
rng = testRandom;
}
while (true) {
double factor = rng.nextDouble();
Cut cut = randomCut(factor, point, currentBox);
int dim = cut.getDimension();
float value = (float) cut.getValue();
boolean separation = ((point[dim] <= value && value < currentBox.getMinValue(dim)
|| point[dim] > value && value >= currentBox.getMaxValue(dim)));
if (separation) {
savedCutValue = value;
savedDim = dim;
savedParent = parent;
savedNode = node;
savedBox = currentBox.copy();
parentPath.clear();
} else {
parentPath.push(new int[] { node, sibling });
}
if (currentBox.contains(point) || parent == Null) {
break;
} else {
growNodeBox(currentBox, pointStoreView, parent, sibling);
int[] next = pathToRoot.pop();
node = next[0];
sibling = next[1];
if (pathToRoot.size() != 0) {
parent = pathToRoot.lastElement()[0];
} else {
parent = Null;
}
}
}
if (savedParent != Null) {
while (!parentPath.isEmpty()) {
pathToRoot.push(parentPath.pop());
}
}
int childMassIfLeaf = isLeaf(savedNode) ? getLeafMass(savedNode) : 0;
int mergedNode = nodeStore.addNode(pathToRoot, point, sequenceIndex, pointIndex, savedNode,
childMassIfLeaf, savedDim, savedCutValue, savedBox);
addLeaf(pointIndex, sequenceIndex);
addBox(mergedNode, point, savedBox);
manageAncestorsAdd(pathToRoot, point);
if (pointSum != null) {
recomputePointSum(mergedNode);
}
if (savedParent == Null) {
root = mergedNode;
}
}
return pointIndex;
}
}
protected void manageAncestorsAdd(Stack<int[]> path, float[] point) {
while (!path.isEmpty()) {
int index = path.pop()[0];
nodeStore.increaseMassOfInternalNode(index);
if (pointSum != null) {
recomputePointSum(index);
}
if (boundingBoxCacheFraction > 0.0) {
checkContainsAndRebuildBox(index, point, pointStoreView);
addPointInPlace(index, point);
}
}
}
/**
* the following is the same as in addPoint() except this function is used to
* rebuild the tree structure. This function does not create auxiliary arrays,
* which should be performed using validateAndReconstruct()
*
* @param pointIndex index of point (in point store)
* @param sequenceIndex sequence index (stored in sampler)
*/
public void addPointToPartialTree(Integer pointIndex, long sequenceIndex) {
checkArgument(root != Null, " a null root is not a partial tree");
float[] point = projectToTree(pointStoreView.getNumericVector(pointIndex));
checkArgument(point.length == dimension, () -> " incorrect projection at index " + pointIndex);
Stack<int[]> pathToRoot = nodeStore.getPath(root, point, false);
int[] first = pathToRoot.pop();
int leafNode = first[0];
int savedParent = (pathToRoot.size() == 0) ? Null : pathToRoot.lastElement()[0];
if (!isLeaf(leafNode)) {
if (savedParent == Null) {
root = convertToLeaf(pointIndex);
} else {
nodeStore.assignInPartialTree(savedParent, point, convertToLeaf(pointIndex));
nodeStore.manageInternalNodesPartial(pathToRoot);
addLeaf(pointIndex, sequenceIndex);
}
return;
}
int leafPointIndex = getPointIndex(leafNode);
float[] oldPoint = projectToTree(pointStoreView.getNumericVector(leafPointIndex));
checkArgument(oldPoint.length == dimension && Arrays.equals(point, oldPoint),
() -> "incorrect state on adding " + pointIndex);
increaseLeafMass(leafNode);
nodeStore.manageInternalNodesPartial(pathToRoot);
addLeaf(leafPointIndex, sequenceIndex);
return;
}
public Integer deletePoint(Integer pointIndex, long sequenceIndex) {
checkArgument(root != Null, " deleting from an empty tree");
float[] point = projectToTree(pointStoreView.getNumericVector(pointIndex));
checkArgument(point.length == dimension, () -> " incorrect projection at index " + pointIndex);
Stack<int[]> pathToRoot = nodeStore.getPath(root, point, false);
int[] first = pathToRoot.pop();
int leafSavedSibling = first[1];
int leafNode = first[0];
int leafPointIndex = getPointIndex(leafNode);
checkArgument(leafPointIndex == pointIndex,
() -> " deleting wrong node " + leafPointIndex + " instead of " + pointIndex);
removeLeaf(leafPointIndex, sequenceIndex);
if (decreaseLeafMass(leafNode) == 0) {
if (pathToRoot.size() == 0) {
root = Null;
} else {
int parent = pathToRoot.pop()[0];
if (pathToRoot.size() == 0) {
root = leafSavedSibling;
} else {
int grandParent = pathToRoot.lastElement()[0];
nodeStore.replaceParentBySibling(grandParent, parent, leafNode);
manageAncestorsDelete(pathToRoot, point);
}
nodeStore.deleteInternalNode(parent);
if (pointSum != null) {
invalidatePointSum(parent);
}
int idx = translate(parent);
if (idx != Integer.MAX_VALUE) {
rangeSumData[idx] = 0.0;
}
}
} else {
manageAncestorsDelete(pathToRoot, point);
}
return leafPointIndex;
}
protected void manageAncestorsDelete(Stack<int[]> path, float[] point) {
boolean resolved = false;
while (!path.isEmpty()) {
int index = path.pop()[0];
nodeStore.decreaseMassOfInternalNode(index);
if (pointSum != null) {
recomputePointSum(index);
}
if (boundingBoxCacheFraction > 0.0 && !resolved) {
resolved = checkContainsAndRebuildBox(index, point, pointStoreView);
}
}
}
//// leaf, nonleaf representations
public boolean isLeaf(int index) {
// note that numberOfLeaves - 1 corresponds to an unspefied leaf in partial tree
// 0 .. numberOfLeaves - 2 corresponds to internal nodes
return index >= numberOfLeaves;
}
public boolean isInternal(int index) {
// note that numberOfLeaves - 1 corresponds to an unspefied leaf in partial tree
// 0 .. numberOfLeaves - 2 corresponds to internal nodes
return index < numberOfLeaves - 1 && index >= 0;
}
public int convertToLeaf(int pointIndex) {
return pointIndex + numberOfLeaves;
}
public int getPointIndex(int index) {
checkArgument(index >= numberOfLeaves, () -> " does not have a point associated " + index);
return index - numberOfLeaves;
}
public int getLeftChild(int index) {
checkArgument(isInternal(index), () -> "incorrect call to get left Index " + index);
return nodeStore.getLeftIndex(index);
}
public int getRightChild(int index) {
checkArgument(isInternal(index), () -> "incorrect call to get right child " + index);
return nodeStore.getRightIndex(index);
}
public int getCutDimension(int index) {
checkArgument(isInternal(index), () -> "incorrect call to get cut dimension " + index);
return nodeStore.getCutDimension(index);
}
public double getCutValue(int index) {
checkArgument(isInternal(index), () -> "incorrect call to get cut value " + index);
return nodeStore.getCutValue(index);
}
///// mass assignments; separating leafs and internal nodes
protected int getMass(int index) {
return (isLeaf(index)) ? getLeafMass(index) : nodeStore.getMass(index);
}
protected int getLeafMass(int index) {
int y = (index - numberOfLeaves);
Integer value = leafMass.get(y);
return (value != null) ? value + 1 : 1;
}
protected void increaseLeafMass(int index) {
int y = (index - numberOfLeaves);
leafMass.merge(y, 1, Integer::sum);
}
protected int decreaseLeafMass(int index) {
int y = (index - numberOfLeaves);
Integer value = leafMass.remove(y);
if (value != null) {
if (value > 1) {
leafMass.put(y, (value - 1));
return value;
} else {
return 1;
}
} else {
return 0;
}
}
@Override
public int getMass() {
return root == Null ? 0 : isLeaf(root) ? getLeafMass(root) : nodeStore.getMass(root);
}
/////// Bounding box
public void resizeCache(double fraction) {
if (fraction == 0) {
rangeSumData = null;
boundingBoxData = null;
} else {
int limit = (int) Math.floor(fraction * (numberOfLeaves - 1));
rangeSumData = (rangeSumData == null) ? new double[limit] : Arrays.copyOf(rangeSumData, limit);
boundingBoxData = (boundingBoxData == null) ? new float[limit * 2 * dimension]
: Arrays.copyOf(boundingBoxData, limit * 2 * dimension);
}
boundingBoxCacheFraction = fraction;
}
protected int translate(int index) {
if (rangeSumData == null || rangeSumData.length <= index) {
return Integer.MAX_VALUE;
} else {
return index;
}
}
void copyBoxToData(int idx, BoundingBox box) {
int base = 2 * idx * dimension;
int mid = base + dimension;
System.arraycopy(box.getMinValues(), 0, boundingBoxData, base, dimension);
System.arraycopy(box.getMaxValues(), 0, boundingBoxData, mid, dimension);
rangeSumData[idx] = box.getRangeSum();
}
void addPointInPlace(int index, float[] point) {
int idx = translate(index);
if (idx != Integer.MAX_VALUE) {
int base = 2 * idx * dimension;
int mid = base + dimension;
double rangeSum = 0;
for (int i = 0; i < dimension; i++) {
boundingBoxData[base + i] = Math.min(boundingBoxData[base + i], point[i]);
}
for (int i = 0; i < dimension; i++) {
boundingBoxData[mid + i] = max(boundingBoxData[mid + i], point[i]);
}
for (int i = 0; i < dimension; i++) {
rangeSum += boundingBoxData[mid + i] - boundingBoxData[base + i];
}
rangeSumData[idx] = rangeSum;
}
}
public BoundingBox getBox(int index) {
if (isLeaf(index)) {
float[] point = projectToTree(pointStoreView.getNumericVector(getPointIndex(index)));
checkArgument(point.length == dimension, () -> "failure in projection at index " + index);
return new BoundingBox(point, point);
} else {
checkState(isInternal(index), " incomplete state");
int idx = translate(index);
if (idx != Integer.MAX_VALUE) {
if (rangeSumData[idx] != 0) {
// return non-trivial boxes
return getBoxFromData(idx);
} else {
BoundingBox box = reconstructBox(index, pointStoreView);
copyBoxToData(idx, box);
return box;
}
}
return reconstructBox(index, pointStoreView);
}
}
BoundingBox reconstructBox(int index, IPointStoreView<float[]> pointStoreView) {
BoundingBox mutatedBoundingBox = getBox(nodeStore.getLeftIndex(index));
growNodeBox(mutatedBoundingBox, pointStoreView, index, nodeStore.getRightIndex(index));
return mutatedBoundingBox;
}
boolean checkStrictlyContains(int index, float[] point) {
int idx = translate(index);
if (idx != Integer.MAX_VALUE) {
int base = 2 * idx * dimension;
int mid = base + dimension;
boolean isInside = true;
for (int i = 0; i < dimension && isInside; i++) {
if (point[i] >= boundingBoxData[mid + i] || boundingBoxData[base + i] >= point[i]) {
isInside = false;
}
}
return isInside;
}
return false;
}
boolean checkContainsAndRebuildBox(int index, float[] point, IPointStoreView<float[]> pointStoreView) {
int idx = translate(index);
if (idx != Integer.MAX_VALUE) {
if (!checkStrictlyContains(index, point)) {
BoundingBox mutatedBoundingBox = reconstructBox(index, pointStoreView);
copyBoxToData(idx, mutatedBoundingBox);
return false;
}
return true;
}
return false;
}
BoundingBox getBoxFromData(int idx) {
int base = 2 * idx * dimension;
int mid = base + dimension;
return new BoundingBox(Arrays.copyOfRange(boundingBoxData, base, base + dimension),
Arrays.copyOfRange(boundingBoxData, mid, mid + dimension));
}
void addBox(int index, float[] point, BoundingBox box) {
int idx = translate(index);
if (idx != Integer.MAX_VALUE) { // always add irrespective of rangesum
copyBoxToData(idx, box);
addPointInPlace(index, point);
}
}
void growNodeBox(BoundingBox box, IPointStoreView<float[]> pointStoreView, int node, int sibling) {
if (isLeaf(sibling)) {
float[] point = projectToTree(pointStoreView.getNumericVector(getPointIndex(sibling)));
checkArgument(point.length == dimension, () -> " incorrect projection at index " + sibling);
box.addPoint(point);
} else {
if (!isInternal(sibling)) {
throw new IllegalStateException(" incomplete state " + sibling);
}
int siblingIdx = translate(sibling);
if (siblingIdx != Integer.MAX_VALUE) {
if (rangeSumData[siblingIdx] != 0) {
box.addBox(getBoxFromData(siblingIdx));
} else {
BoundingBox newBox = getBox(siblingIdx);
copyBoxToData(siblingIdx, newBox);
box.addBox(newBox);
}
return;
}
growNodeBox(box, pointStoreView, sibling, nodeStore.getLeftIndex(sibling));
growNodeBox(box, pointStoreView, sibling, nodeStore.getRightIndex(sibling));
return;
}
}
public double probabilityOfCut(int node, float[] point, BoundingBox otherBox) {
int nodeIdx = translate(node);
if (nodeIdx != Integer.MAX_VALUE && rangeSumData[nodeIdx] != 0) {
int base = 2 * nodeIdx * dimension;
int mid = base + dimension;
double minsum = 0;
double maxsum = 0;
for (int i = 0; i < dimension; i++) {
minsum += max(boundingBoxData[base + i] - point[i], 0);
}
for (int i = 0; i < dimension; i++) {
maxsum += max(point[i] - boundingBoxData[mid + i], 0);
}
double sum = maxsum + minsum;
if (sum == 0.0) {
return 0.0;
}
return sum / (rangeSumData[nodeIdx] + sum);
} else if (otherBox != null) {
return otherBox.probabilityOfCut(point);
} else {
BoundingBox box = getBox(node);
return box.probabilityOfCut(point);
}
}
/// additional information at nodes
public float[] getPointSum(int index) {
checkArgument(centerOfMassEnabled, " enable center of mass");
if (isLeaf(index)) {
float[] point = projectToTree(pointStoreView.getNumericVector(getPointIndex(index)));
checkArgument(point.length == dimension, () -> " incorrect projection");
int mass = getMass(index);
for (int i = 0; i < point.length; i++) {
point[i] *= mass;
}
return point;
} else {
return Arrays.copyOfRange(pointSum, index * dimension, (index + 1) * dimension);
}
}
public void invalidatePointSum(int index) {
for (int i = 0; i < dimension; i++) {
pointSum[index * dimension + i] = 0;
}
}
public void recomputePointSum(int index) {
float[] left = getPointSum(nodeStore.getLeftIndex(index));
float[] right = getPointSum(nodeStore.getRightIndex(index));
for (int i = 0; i < dimension; i++) {
pointSum[index * dimension + i] = left[i] + right[i];
}
}
public HashMap<Long, Integer> getSequenceMap(int index) {
HashMap<Long, Integer> hashMap = new HashMap<>();
List<Long> list = getSequenceList(index);
for (Long e : list) {
hashMap.merge(e, 1, Integer::sum);
}
return hashMap;
}
public List<Long> getSequenceList(int index) {
return sequenceMap.get(index);
}
protected void addLeaf(int pointIndex, long sequenceIndex) {
if (storeSequenceIndexesEnabled) {
List<Long> leafList = sequenceMap.remove(pointIndex);
if (leafList == null) {
leafList = new ArrayList<>(1);
}
leafList.add(sequenceIndex);
sequenceMap.put(pointIndex, leafList);
}
}
public void removeLeaf(int leafPointIndex, long sequenceIndex) {
if (storeSequenceIndexesEnabled) {
List<Long> leafList = sequenceMap.remove(leafPointIndex);
checkArgument(leafList != null, " leaf index not found in tree");
checkArgument(leafList.remove(sequenceIndex), " sequence index not found in leaf");
if (!leafList.isEmpty()) {
sequenceMap.put(leafPointIndex, leafList);
}
}
}
//// validations
public void validateAndReconstruct() {
if (root != Null) {
validateAndReconstruct(root);
}
}
/**
* This function is supposed to validate the integrity of the tree and rebuild
* internal data structures. At this moment the only internal structure is the
* pointsum.
*
* @param index the node of a tree
* @return a bounding box of the points
*/
public BoundingBox validateAndReconstruct(int index) {
if (isLeaf(index)) {
return getBox(index);
} else {
checkState(isInternal(index), "illegal state");
BoundingBox leftBox = validateAndReconstruct(getLeftChild(index));
BoundingBox rightBox = validateAndReconstruct(getRightChild(index));
if (leftBox.maxValues[getCutDimension(index)] > getCutValue(index)
|| rightBox.minValues[getCutDimension(index)] <= getCutValue(index)) {
throw new IllegalStateException(" incorrect bounding state at index " + index + " cut value "
+ getCutValue(index) + "cut dimension " + getCutDimension(index) + " left Box "
+ leftBox.toString() + " right box " + rightBox.toString());
}
if (centerOfMassEnabled) {
recomputePointSum(index);
}
rightBox.addBox(leftBox);
int idx = translate(index);
if (idx != Integer.MAX_VALUE) { // always add irrespective of rangesum
copyBoxToData(idx, rightBox);
}
return rightBox;
}
}
//// traversals
/**
* Starting from the root, traverse the canonical path to a leaf node and visit
* the nodes along the path. The canonical path is determined by the input
* point: at each interior node, we select the child node by comparing the
* node's {@link Cut} to the corresponding coordinate value in the input point.
* The method recursively traverses to the leaf node first and then invokes the
* visitor on each node in reverse order. That is, if the path to the leaf node
* determined by the input point is root, node1, node2, ..., node(N-1), nodeN,
* leaf; then we will first invoke visitor::acceptLeaf on the leaf node, and
* then we will invoke visitor::accept on the remaining nodes in the following
* order: nodeN, node(N-1), ..., node2, node1, and root.
*
* @param point A point which determines the traversal path from the
* root to a leaf node.
* @param visitorFactory A visitor that will be invoked for each node on the
* path.
* @param <R> The return type of the Visitor.
* @return the value of {@link Visitor#getResult()}} after the traversal.
*/
@Override
public <R> R traverse(float[] point, IVisitorFactory<R> visitorFactory) {
checkArgument(root != Null, "this tree doesn't contain any nodes");
checkNotNull(point, "point must not be null");
checkNotNull(visitorFactory, "visitor must not be null");
Visitor<R> visitor = visitorFactory.newVisitor(this, point);
NodeView currentNodeView = new NodeView(this, pointStoreView, root);
traversePathToLeafAndVisitNodes(point, visitor, currentNodeView, root, 0);
return visitorFactory.liftResult(this, visitor.getResult());
}
protected <R> void traversePathToLeafAndVisitNodes(float[] point, Visitor<R> visitor, NodeView currentNodeView,
int node, int depthOfNode) {
if (isLeaf(node)) {
currentNodeView.setCurrentNode(node, getPointIndex(node), true);
visitor.acceptLeaf(currentNodeView, depthOfNode);
} else {
checkState(isInternal(node), " incomplete state ");
if (nodeStore.toLeft(point, node)) {
traversePathToLeafAndVisitNodes(point, visitor, currentNodeView, nodeStore.getLeftIndex(node),
depthOfNode + 1);
currentNodeView.updateToParent(node, nodeStore.getRightIndex(node), !visitor.isConverged());
} else {
traversePathToLeafAndVisitNodes(point, visitor, currentNodeView, nodeStore.getRightIndex(node),
depthOfNode + 1);
currentNodeView.updateToParent(node, nodeStore.getLeftIndex(node), !visitor.isConverged());
}
visitor.accept(currentNodeView, depthOfNode);
}
}
/**
* This is a traversal method which follows the standard traversal path (defined
* in {@link #traverse(float[], IVisitorFactory)}) but at Node in checks to see
* whether the visitor should split. If a split is triggered, then independent
* copies of the visitor are sent down each branch of the tree and then merged
* before propagating the result.
*
* @param point A point which determines the traversal path from the
* root to a leaf node.
* @param visitorFactory A visitor that will be invoked for each node on the
* path.
* @param <R> The return type of the Visitor.
* @return the value of {@link Visitor#getResult()}} after the traversal.
*/
@Override
public <R> R traverseMulti(float[] point, IMultiVisitorFactory<R> visitorFactory) {
checkArgument(root != Null, "this tree doesn't contain any nodes");
checkNotNull(point, "point must not be null");
checkNotNull(visitorFactory, "visitor must not be null");
MultiVisitor<R> visitor = visitorFactory.newVisitor(this, point);
NodeView currentNodeView = new NodeView(this, pointStoreView, root);
traverseTreeMulti(point, visitor, currentNodeView, root, 0);
return visitorFactory.liftResult(this, visitor.getResult());
}
protected <R> void traverseTreeMulti(float[] point, MultiVisitor<R> visitor, NodeView currentNodeView, int node,
int depthOfNode) {
if (isLeaf(node)) {
currentNodeView.setCurrentNode(node, getPointIndex(node), false);
visitor.acceptLeaf(currentNodeView, depthOfNode);
} else {
checkState(isInternal(node), " incomplete state");
currentNodeView.setCurrentNodeOnly(node);
if (visitor.trigger(currentNodeView)) {
traverseTreeMulti(point, visitor, currentNodeView, nodeStore.getLeftIndex(node), depthOfNode + 1);
MultiVisitor<R> newVisitor = visitor.newCopy();
currentNodeView.setCurrentNodeOnly(nodeStore.getRightIndex(node));
traverseTreeMulti(point, newVisitor, currentNodeView, nodeStore.getRightIndex(node), depthOfNode + 1);
currentNodeView.updateToParent(node, nodeStore.getLeftIndex(node), false);
visitor.combine(newVisitor);
} else if (nodeStore.toLeft(point, node)) {
traverseTreeMulti(point, visitor, currentNodeView, nodeStore.getLeftIndex(node), depthOfNode + 1);
currentNodeView.updateToParent(node, nodeStore.getRightIndex(node), false);
} else {
traverseTreeMulti(point, visitor, currentNodeView, nodeStore.getRightIndex(node), depthOfNode + 1);
currentNodeView.updateToParent(node, nodeStore.getLeftIndex(node), false);
}
visitor.accept(currentNodeView, depthOfNode);
}
}
public int getNumberOfLeaves() {
return numberOfLeaves;
}
public boolean isCenterOfMassEnabled() {
return centerOfMassEnabled;
}
public boolean isStoreSequenceIndexesEnabled() {
return storeSequenceIndexesEnabled;
}
public double getBoundingBoxCacheFraction() {
return boundingBoxCacheFraction;
}
public int getDimension() {
return dimension;
}
/**
*
* @return the root of the tree
*/
public Integer getRoot() {
return (int) root;
}
/**
* returns the number of samples that needs to be seen before returning
* meaningful inference
*/
public int getOutputAfter() {
return outputAfter;
}
@Override
public boolean isOutputReady() {
return getMass() >= outputAfter;
}
public float[] projectToTree(float[] point) {
return Arrays.copyOf(point, point.length);
}
public float[] liftFromTree(float[] result) {
return Arrays.copyOf(result, result.length);
}
public double[] liftFromTree(double[] result) {
return Arrays.copyOf(result, result.length);
}
public int[] projectMissingIndices(int[] list) {
return Arrays.copyOf(list, list.length);
}
public long getRandomSeed() {
return randomSeed;
}
public AbstractNodeStore getNodeStore() {
return nodeStore;
}
public static class Builder<T extends Builder<T>> {
protected boolean storeSequenceIndexesEnabled = RandomCutForest.DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;
protected boolean centerOfMassEnabled = RandomCutForest.DEFAULT_CENTER_OF_MASS_ENABLED;
protected double boundingBoxCacheFraction = RandomCutForest.DEFAULT_BOUNDING_BOX_CACHE_FRACTION;
protected long randomSeed = new Random().nextLong();
protected Random random = null;
protected int capacity = RandomCutForest.DEFAULT_SAMPLE_SIZE;
protected Optional<Integer> outputAfter = Optional.empty();
protected int dimension;
protected IPointStoreView<float[]> pointStoreView;
protected AbstractNodeStore nodeStore;
protected int root = Null;
protected boolean storeParent = DEFAULT_STORE_PARENT;
public T capacity(int capacity) {
this.capacity = capacity;
return (T) this;
}
public T boundingBoxCacheFraction(double boundingBoxCacheFraction) {
this.boundingBoxCacheFraction = boundingBoxCacheFraction;
return (T) this;
}
public T pointStoreView(IPointStoreView<float[]> pointStoreView) {
this.pointStoreView = pointStoreView;
return (T) this;
}
public T nodeStore(AbstractNodeStore nodeStore) {
this.nodeStore = nodeStore;
return (T) this;
}
public T randomSeed(long randomSeed) {
this.randomSeed = randomSeed;
return (T) this;
}
public T random(Random random) {
this.random = random;
return (T) this;
}
public T outputAfter(int outputAfter) {
this.outputAfter = Optional.of(outputAfter);
return (T) this;
}
public T dimension(int dimension) {
this.dimension = dimension;
return (T) this;
}
public T setRoot(int root) {
this.root = root;
return (T) this;
}
public T storeParent(boolean storeParent) {
this.storeParent = storeParent;
return (T) this;
}
public T storeSequenceIndexesEnabled(boolean storeSequenceIndexesEnabled) {
this.storeSequenceIndexesEnabled = storeSequenceIndexesEnabled;
return (T) this;
}
public T centerOfMassEnabled(boolean centerOfMassEnabled) {
this.centerOfMassEnabled = centerOfMassEnabled;
return (T) this;
}
public RandomCutTree build() {
return new RandomCutTree(this);
}
}
public static Builder builder() {
return new Builder();
}
}
| 459 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/tree/NodeStoreLarge.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Stack;
import com.amazon.randomcutforest.store.IndexIntervalManager;
/**
* A fixed-size buffer for storing interior tree nodes. An interior node is
* defined by its location in the tree (parent and child nodes), its random cut,
* and its bounding box. The NodeStore class uses arrays to store these field
* values for a collection of nodes. An index in the store can be used to look
* up the field values for a particular node.
*
* The internal nodes (handled by this store) corresponds to
* [0..upperRangeLimit]
*
* If we think of an array of Node objects as being row-oriented (where each row
* is a Node), then this class is analogous to a column-oriented database of
* Nodes.
*
*/
public class NodeStoreLarge extends AbstractNodeStore {
private final int[] parentIndex;
private final int[] leftIndex;
private final int[] rightIndex;
public final int[] cutDimension;
private final int[] mass;
public NodeStoreLarge(AbstractNodeStore.Builder builder) {
super(builder);
mass = new int[capacity];
Arrays.fill(mass, 0);
if (builder.storeParent) {
parentIndex = new int[capacity];
Arrays.fill(parentIndex, capacity);
} else {
parentIndex = null;
}
if (builder.leftIndex == null) {
leftIndex = new int[capacity];
rightIndex = new int[capacity];
cutDimension = new int[capacity];
Arrays.fill(leftIndex, capacity);
Arrays.fill(rightIndex, capacity);
} else {
leftIndex = Arrays.copyOf(builder.leftIndex, builder.leftIndex.length);
rightIndex = Arrays.copyOf(builder.rightIndex, builder.rightIndex.length);
cutDimension = Arrays.copyOf(builder.cutDimension, builder.cutDimension.length);
BitSet bits = new BitSet(capacity);
if (builder.root != Null) {
bits.set(builder.root);
}
for (int i = 0; i < leftIndex.length; i++) {
if (isInternal(leftIndex[i])) {
bits.set(leftIndex[i]);
if (parentIndex != null) {
parentIndex[leftIndex[i]] = i;
}
}
}
for (int i = 0; i < rightIndex.length; i++) {
if (isInternal(rightIndex[i])) {
bits.set(rightIndex[i]);
if (parentIndex != null) {
parentIndex[rightIndex[i]] = i;
}
}
}
freeNodeManager = new IndexIntervalManager(capacity, capacity, bits);
}
}
@Override
public int addNode(Stack<int[]> pathToRoot, float[] point, long sequenceIndex, int pointIndex, int childIndex,
int childMassIfLeaf, int cutDimension, float cutValue, BoundingBox box) {
int index = freeNodeManager.takeIndex();
this.cutValue[index] = cutValue;
this.cutDimension[index] = (byte) cutDimension;
if (leftOf(cutValue, cutDimension, point)) {
this.leftIndex[index] = (pointIndex + capacity + 1);
this.rightIndex[index] = childIndex;
} else {
this.rightIndex[index] = (pointIndex + capacity + 1);
this.leftIndex[index] = childIndex;
}
this.mass[index] = (((childMassIfLeaf > 0) ? childMassIfLeaf : getMass(childIndex)) + 1) % (capacity + 1);
int parentIndex = (pathToRoot.size() == 0) ? Null : pathToRoot.lastElement()[0];
if (this.parentIndex != null) {
this.parentIndex[index] = parentIndex;
if (!isLeaf(childIndex)) {
this.parentIndex[childIndex] = (index);
}
}
if (parentIndex != Null) {
spliceEdge(parentIndex, childIndex, index);
}
return index;
}
public int getLeftIndex(int index) {
return leftIndex[index];
}
public int getRightIndex(int index) {
return rightIndex[index];
}
public void setRoot(int index) {
if (!isLeaf(index) && parentIndex != null) {
parentIndex[index] = capacity;
}
}
@Override
protected void decreaseMassOfInternalNode(int node) {
mass[node] = (mass[node] + capacity) % (capacity + 1);
}
@Override
protected void increaseMassOfInternalNode(int node) {
mass[node] = (mass[node] + 1) % (capacity + 1);
}
public void deleteInternalNode(int index) {
leftIndex[index] = capacity;
rightIndex[index] = capacity;
if (parentIndex != null) {
parentIndex[index] = capacity;
}
freeNodeManager.releaseIndex(index);
}
public int getMass(int index) {
return mass[index] != 0 ? mass[index] : (capacity + 1);
}
@Override
public void assignInPartialTree(int node, float[] point, int childReference) {
if (leftOf(node, point)) {
leftIndex[node] = childReference;
} else {
rightIndex[node] = childReference;
}
}
public void spliceEdge(int parent, int node, int newNode) {
assert (!isLeaf(newNode));
if (node == leftIndex[parent]) {
leftIndex[parent] = newNode;
} else {
rightIndex[parent] = newNode;
}
if (parentIndex != null && isInternal(node)) {
parentIndex[node] = newNode;
}
}
public void replaceParentBySibling(int grandParent, int parent, int node) {
int sibling = getSibling(node, parent);
if (parent == leftIndex[grandParent]) {
leftIndex[grandParent] = sibling;
} else {
rightIndex[grandParent] = sibling;
}
if (parentIndex != null && isInternal(sibling)) {
parentIndex[sibling] = grandParent;
}
}
public int getCutDimension(int index) {
return cutDimension[index];
}
public int[] getCutDimension() {
return Arrays.copyOf(cutDimension, cutDimension.length);
}
public int[] getLeftIndex() {
return Arrays.copyOf(leftIndex, leftIndex.length);
}
public int[] getRightIndex() {
return Arrays.copyOf(rightIndex, rightIndex.length);
}
public int getParentIndex(int index) {
checkArgument(parentIndex != null, "incorrect call");
return parentIndex[index];
}
}
| 460 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/tree/NodeStoreSmall.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.tree;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.toByteArray;
import static com.amazon.randomcutforest.CommonUtils.toCharArray;
import static com.amazon.randomcutforest.CommonUtils.toIntArray;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Stack;
import com.amazon.randomcutforest.store.IndexIntervalManager;
/**
* A fixed-size buffer for storing interior tree nodes. An interior node is
* defined by its location in the tree (parent and child nodes), its random cut,
* and its bounding box. The NodeStore class uses arrays to store these field
* values for a collection of nodes. An index in the store can be used to look
* up the field values for a particular node.
*
* The internal nodes (handled by this store) corresponds to
* [0..upperRangeLimit]
*
* If we think of an array of Node objects as being row-oriented (where each row
* is a Node), then this class is analogous to a column-oriented database of
* Nodes.
*/
public class NodeStoreSmall extends AbstractNodeStore {
private final byte[] parentIndex;
private final char[] leftIndex;
private final char[] rightIndex;
public final byte[] cutDimension;
private final byte[] mass;
public NodeStoreSmall(AbstractNodeStore.Builder builder) {
super(builder);
mass = new byte[capacity];
Arrays.fill(mass, (byte) 0);
if (builder.storeParent) {
parentIndex = new byte[capacity];
Arrays.fill(parentIndex, (byte) capacity);
} else {
parentIndex = null;
}
if (builder.leftIndex == null) {
leftIndex = new char[capacity];
rightIndex = new char[capacity];
cutDimension = new byte[capacity];
Arrays.fill(leftIndex, (char) capacity);
Arrays.fill(rightIndex, (char) capacity);
} else {
checkArgument(builder.leftIndex.length == capacity, " incorrect length");
checkArgument(builder.rightIndex.length == capacity, " incorrect length");
leftIndex = toCharArray(builder.leftIndex);
rightIndex = toCharArray(builder.rightIndex);
cutDimension = toByteArray(builder.cutDimension);
BitSet bits = new BitSet(capacity);
if (builder.root != Null) {
bits.set(builder.root);
}
for (int i = 0; i < leftIndex.length; i++) {
if (isInternal(leftIndex[i])) {
bits.set(leftIndex[i]);
if (parentIndex != null) {
parentIndex[leftIndex[i]] = (byte) i;
}
}
}
for (int i = 0; i < rightIndex.length; i++) {
if (isInternal(rightIndex[i])) {
bits.set(rightIndex[i]);
if (parentIndex != null) {
parentIndex[rightIndex[i]] = (byte) i;
}
}
}
freeNodeManager = new IndexIntervalManager(capacity, capacity, bits);
// need to set up parents using the root
}
}
@Override
public int addNode(Stack<int[]> pathToRoot, float[] point, long sequenceIndex, int pointIndex, int childIndex,
int childMassIfLeaf, int cutDimension, float cutValue, BoundingBox box) {
int index = freeNodeManager.takeIndex();
this.cutValue[index] = cutValue;
this.cutDimension[index] = (byte) cutDimension;
if (leftOf(cutValue, cutDimension, point)) {
this.leftIndex[index] = (char) (pointIndex + capacity + 1);
this.rightIndex[index] = (char) childIndex;
} else {
this.rightIndex[index] = (char) (pointIndex + capacity + 1);
this.leftIndex[index] = (char) childIndex;
}
this.mass[index] = (byte) ((((childMassIfLeaf > 0) ? childMassIfLeaf : getMass(childIndex)) + 1)
% (capacity + 1));
int parentIndex = (pathToRoot.size() == 0) ? Null : pathToRoot.lastElement()[0];
if (this.parentIndex != null) {
this.parentIndex[index] = (byte) parentIndex;
if (!isLeaf(childIndex)) {
this.parentIndex[childIndex] = (byte) (index);
}
}
if (parentIndex != Null) {
spliceEdge(parentIndex, childIndex, index);
}
return index;
}
@Override
public void assignInPartialTree(int node, float[] point, int childReference) {
if (leftOf(node, point)) {
leftIndex[node] = (char) childReference;
} else {
rightIndex[node] = (char) childReference;
}
}
public int getLeftIndex(int index) {
return leftIndex[index];
}
public int getRightIndex(int index) {
return rightIndex[index];
}
public int getParentIndex(int index) {
checkArgument(parentIndex != null, "incorrect call");
return parentIndex[index];
}
public void setRoot(int index) {
if (!isLeaf(index) && parentIndex != null) {
parentIndex[index] = (byte) capacity;
}
}
@Override
protected void decreaseMassOfInternalNode(int node) {
mass[node] = (byte) (((mass[node] & 0xff) + capacity) % (capacity + 1)); // this cannot get to 0
}
@Override
protected void increaseMassOfInternalNode(int node) {
mass[node] = (byte) (((mass[node] & 0xff) + 1) % (capacity + 1));
// mass of root == 0; note capacity = number_of_leaves - 1
}
public void deleteInternalNode(int index) {
leftIndex[index] = (char) capacity;
rightIndex[index] = (char) capacity;
if (parentIndex != null) {
parentIndex[index] = (byte) capacity;
}
freeNodeManager.releaseIndex(index);
}
public int getMass(int index) {
return mass[index] != 0 ? (mass[index] & 0xff) : (capacity + 1);
}
public void spliceEdge(int parent, int node, int newNode) {
assert (!isLeaf(newNode));
if (node == leftIndex[parent]) {
leftIndex[parent] = (char) newNode;
} else {
rightIndex[parent] = (char) newNode;
}
if (parentIndex != null && isInternal(node)) {
parentIndex[node] = (byte) newNode;
}
}
public void replaceParentBySibling(int grandParent, int parent, int node) {
int sibling = getSibling(node, parent);
if (parent == leftIndex[grandParent]) {
leftIndex[grandParent] = (char) sibling;
} else {
rightIndex[grandParent] = (char) sibling;
}
if (parentIndex != null && isInternal(sibling)) {
parentIndex[sibling] = (byte) grandParent;
}
}
public int getCutDimension(int index) {
return cutDimension[index] & 0xff;
}
public int[] getCutDimension() {
return toIntArray(cutDimension);
}
public int[] getLeftIndex() {
return toIntArray(leftIndex);
}
public int[] getRightIndex() {
return toIntArray(rightIndex);
}
}
| 461 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/AnomalyScoreVisitor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.anomalydetection;
import com.amazon.randomcutforest.CommonUtils;
/**
* This visitor computes a scalar anomaly score for a specified point. The basic
* score computation is defined by {@link AbstractScalarScoreVisitor}, and this
* class overrides the scoring functions so that input points that are more
* likely to separated from in-sample points by a random cut receive a higher
* anomaly score.
*
* While this basic algorithm produces good results when all the points in the
* sample are distinct, it can produce unexpected results when a significant
* portion of the points in the sample are duplicates. Therefore this class
* supports different optional features for modifying the score produced when
* the point being scored is equal to the leaf node in the traversal.
*/
public class AnomalyScoreVisitor extends AbstractScalarScoreVisitor {
/**
* Construct a new ScalarScoreVisitor
*
* @param pointToScore The point whose anomaly score we are computing
* @param treeMass The total mass of the RandomCutTree that is scoring the
* point
*/
public AnomalyScoreVisitor(float[] pointToScore, int treeMass) {
super(pointToScore, treeMass);
}
/**
* Construct a new ScalarScoreVisitor
*
* @param pointToScore The point whose anomaly score we are computing
* @param treeMass The total mass of the RandomCutTree that is
* scoring the point
* @param ignoreLeafMassThreshold Is the maximum mass of the leaf which can be
* ignored
*/
public AnomalyScoreVisitor(float[] pointToScore, int treeMass, int ignoreLeafMassThreshold) {
super(pointToScore, treeMass, ignoreLeafMassThreshold);
}
@Override
protected double scoreSeen(int depth, int mass) {
return CommonUtils.defaultScoreSeenFunction(depth, mass);
}
@Override
protected double scoreUnseen(int depth, int mass) {
return CommonUtils.defaultScoreUnseenFunction(depth, mass);
}
@Override
protected double damp(int leafMass, int treeMass) {
return CommonUtils.defaultDampFunction(leafMass, treeMass);
}
}
| 462 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/TransductiveScalarScoreVisitor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.anomalydetection;
import java.util.function.BiFunction;
import java.util.function.Function;
import com.amazon.randomcutforest.tree.IBoundingBoxView;
import com.amazon.randomcutforest.tree.INodeView;
public class TransductiveScalarScoreVisitor extends DynamicScoreVisitor {
/*
* the goal of this visitor is to allow tranductive inference; where during
* scoring we make adjustments so that it appears (to the best of simulation
* ability) that the tree was built using the knowledge of the point being
* scored
*
*/
protected final Function<IBoundingBoxView, double[]> vecSepScore;
/**
* Construct a new SimulatedTransductiveScalarScoreVisitor
*
* @param pointToScore The point whose anomaly score we are computing
* @param treeMass The total mass of the RandomCutTree that is scoring the
* point
* @param scoreSeen is the part of the score function when the point has been
* seen
* @param scoreUnseen is the part of the score when the point has not been seen
* @param damp corresponds to the dampening of the effect of the seen
* points
* @param vecSep A function that provides the probabilities of choosing
* different dimensions given a BoundingBox when the tree
* was built. This must be the same as the probabilies of
* Transductive inference during scoring. For extenstions
* where these are different, see
* SimulatedTransductiveScalarScoreVisitor
*
* Note that scores are not normalized because the function
* ranges are unknown as is the case with
* DynamicScoreVisitor
*/
public TransductiveScalarScoreVisitor(float[] pointToScore, int treeMass,
BiFunction<Double, Double, Double> scoreSeen, BiFunction<Double, Double, Double> scoreUnseen,
BiFunction<Double, Double, Double> damp, Function<IBoundingBoxView, double[]> vecSep) {
super(pointToScore, treeMass, 0, scoreSeen, scoreUnseen, damp);
this.vecSepScore = vecSep;
// build function is the same as scoring function
}
/**
* Update the anomaly score based on the next step of the tree traversal.
*
* @param node The current node in the tree traversal
* @param depthOfNode The depth of the current node in the tree
*/
@Override
public void accept(INodeView node, int depthOfNode) {
if (pointInsideBox) {
return;
}
// note that score was unchanged before the return
// this is only reasonable if the scoring was done using the same
// probability function used to build the trees.
double probabilityOfSeparation = getProbabilityOfSeparation(node.getBoundingBox());
double weight = getWeight(node.getCutDimension(), vecSepScore, node.getBoundingBox());
if (probabilityOfSeparation == 0) {
pointInsideBox = true;
return;
}
score = probabilityOfSeparation * scoreUnseen(depthOfNode, node.getMass()) + weight * score;
}
/**
* Compute the probability that a random cut would separate the point from the
* rest of the bounding box. This method is intended to compute the probability
* for a non-leaf Node, and will throw an exception if a leaf-node bounding box
* is detected.
*
* @param boundingBox The bounding box that we are computing the probability of
* separation from.
* @return is the probability
*/
@Override
protected double getProbabilityOfSeparation(final IBoundingBoxView boundingBox) {
double sumOfDenominator = 0d;
double sumOfNumerator = 0d;
double[] vec = vecSepScore.apply(boundingBox.getMergedBox(pointToScore));
for (int i = 0; i < pointToScore.length; ++i) {
double maxVal = boundingBox.getMaxValue(i);
double minVal = boundingBox.getMinValue(i);
double oldRange = maxVal - minVal;
sumOfDenominator += vec[i];
if (!coordInsideBox[i]) {
if (maxVal < pointToScore[i]) {
maxVal = pointToScore[i];
} else if (minVal > pointToScore[i]) {
minVal = pointToScore[i];
}
double newRange = maxVal - minVal;
if (newRange > oldRange) {
sumOfNumerator += vec[i] * (newRange - oldRange) / newRange;
} else
coordInsideBox[i] = true;
}
}
if (sumOfDenominator <= 0) {
// Sum of range across dimensions should only be 0 at leaf nodes as non-leaf
// nodes always contain
// more than one distinct point
throw new IllegalStateException("Incorrect State");
}
return sumOfNumerator / sumOfDenominator;
// for RCFs vec[i] = newRange (for dimension i) and therefore the
// sumOfNumerator is the sum of the difference (after and before
// merging the point to the box) of ranges
// sum of denominator is the sum the ranges in each dimension
}
// for this visitor class the assumption is that the trees are built using the
// same probabilities as are used in scoring. In the application herein
// vecSepBuild
// is the same as vecSepScore as in the accept(node) above; however the function
// is
// written in the more general form so that it can be used for the Simulated
// version as well without any changes.
protected double getWeight(int dim, Function<IBoundingBoxView, double[]> vecSepBuild,
final IBoundingBoxView boundingBox) {
double[] vecSmall = vecSepBuild.apply(boundingBox);
// the smaller box was built!
IBoundingBoxView largeBox = boundingBox.getMergedBox(pointToScore);
double[] vecLarge = vecSepScore.apply(largeBox);
// the larger box is only scored!
double sumSmall = 0;
double sumLarge = 0;
for (int i = 0; i < pointToScore.length; i++) {
sumSmall += vecSmall[i];
sumLarge += vecLarge[i];
}
return (boundingBox.getRange(dim) / largeBox.getRange(dim)) * (sumSmall / sumLarge)
* (vecLarge[dim] / vecSmall[dim]);
// this can be larger than 1
// For RCFs vecLarge[dim] = largeBox.getRange(dim) and
// vecSmall[dim] = smallBox.getRange(dim)
// sumSmall/sumLarge is the probability of non-separation
}
}
| 463 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/AbstractScalarScoreVisitor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.anomalydetection;
import java.util.Arrays;
import com.amazon.randomcutforest.CommonUtils;
import com.amazon.randomcutforest.Visitor;
import com.amazon.randomcutforest.tree.IBoundingBoxView;
import com.amazon.randomcutforest.tree.INodeView;
/**
* This abstract visitor encodes a standard method for computing a scalar result
* value. The basic computation is as follows:
*
* <ol>
* <li>After following the traversal path to a leaf, compute a base score at the
* leaf node.</li>
* <li>For each node in the traversal path from the leaf to the root, compute
* the probability that a random cut would separate the query point from the
* node. The updated score uses this probability to create a weighted
* combination between the current score and a score contribution from the
* current node.</li>
* </ol>
* <p>
* While this basic algorithm produces good results when all the points in the
* sample are distinct, it can produce unexpected results when a significant
* portion of the points in the sample are duplicates. Therefore this class
* supports different optional features for modifying the score produced when
* the point being scored is equal to the leaf node in the traversal.
*/
public abstract class AbstractScalarScoreVisitor implements Visitor<Double> {
public static final int DEFAULT_IGNORE_LEAF_MASS_THRESHOLD = 0;
/**
* The point whose anomaly score is being computed.
*/
protected final float[] pointToScore;
/**
* The mass of the tree being visited. This value is used to normalize the final
* result.
*/
protected final int treeMass;
/**
* This flag is set to 'true' if the point being scored is found to be contained
* by a bounding box in the traversal path, allowing us to short-circuit further
* computation.
*/
protected boolean pointInsideBox;
/**
* Similar to pointInsideBox, the array coordInsideBox keeps track of whether
* each coordinate is contained in the corresponding bounding box projection for
* a bounding box in the traversal path. This field is used to skip unnecessary
* steps in the probability computation.
*/
protected boolean[] coordInsideBox;
/**
* shadowbox used in attribution and ignoring the leaf to simulate a deletion
*/
protected IBoundingBoxView shadowBox = null;
/**
* The function used to compute the base score in the case where the point being
* scored is equal to the leaf point (provided the ignoreLeafEquals and
* ignoreLeafMassThreshold variables indicate that we should use this method).
*
* Function arguments: leaf depth, leaf mass
*/
protected double score;
/**
* If true, then the scoreUnseen method will be used to score a point equal to a
* leaf point in {@link #acceptLeaf(INodeView, int)}.
*/
protected boolean ignoreLeafEquals;
/**
* If the point being scored is equal to the leaf point but the leaf mass is
* smaller than this value, then the scoreUnseen method will be used to score
* the point in {@link #accept(INodeView, int)}.
*/
protected int ignoreLeafMassThreshold;
/**
* Construct a new ScalarScoreVisitor
*
* @param pointToScore The point whose anomaly score we are computing
* @param treeMass The total mass of the RandomCutTree that is
* scoring the point
* @param ignoreLeafMassThreshold Is the maximum mass of the leaf which can be
* ignored
*/
public AbstractScalarScoreVisitor(float[] pointToScore, int treeMass, int ignoreLeafMassThreshold) {
this.pointToScore = Arrays.copyOf(pointToScore, pointToScore.length);
this.treeMass = treeMass;
pointInsideBox = false;
score = 0.0;
this.ignoreLeafEquals = (ignoreLeafMassThreshold > DEFAULT_IGNORE_LEAF_MASS_THRESHOLD);
this.ignoreLeafMassThreshold = ignoreLeafMassThreshold;
// will be initialized to an array of false values
coordInsideBox = new boolean[pointToScore.length];
}
/**
* Construct a new AbstractScalarScoreVisitor using default leaf options.
*
* @param pointToScore The point whose anomaly score we are computing
* @param treeMass The total mass of the RandomCutTree that is scoring the
* point
*/
public AbstractScalarScoreVisitor(float[] pointToScore, int treeMass) {
this(pointToScore, treeMass, DEFAULT_IGNORE_LEAF_MASS_THRESHOLD);
}
/**
* @return The score computed up until this point.
*/
@Override
public Double getResult() {
return CommonUtils.defaultScalarNormalizerFunction(score, treeMass);
}
/**
* Update the anomaly score based on the next step of the tree traversal.
*
* @param node The current node in the tree traversal
* @param depthOfNode The depth of the current node in the tree
*/
@Override
public void accept(INodeView node, int depthOfNode) {
if (pointInsideBox) {
return;
}
double probabilityOfSeparation;
if (!ignoreLeafEquals) {
probabilityOfSeparation = node.probailityOfSeparation(pointToScore);
if (probabilityOfSeparation <= 0) {
pointInsideBox = true;
return;
}
} else {
shadowBox = shadowBox == null ? node.getSiblingBoundingBox(pointToScore)
: shadowBox.getMergedBox(node.getSiblingBoundingBox(pointToScore));
probabilityOfSeparation = (shadowBox.getRangeSum() <= 0) ? 1.0 : getProbabilityOfSeparation(shadowBox);
}
score = probabilityOfSeparation * scoreUnseen(depthOfNode, node.getMass())
+ (1 - probabilityOfSeparation) * score;
}
/**
* Update the anomaly score with the given leaf node.
*
* @param leafNode The leaf node that was reached by traversing the tree
* @param depthOfNode The depth of the leaf node
*/
@Override
public void acceptLeaf(INodeView leafNode, int depthOfNode) {
if (Arrays.equals(leafNode.getLeafPoint(), pointToScore)
&& (!ignoreLeafEquals || (leafNode.getMass() > ignoreLeafMassThreshold))) {
pointInsideBox = true;
score = damp(leafNode.getMass(), treeMass) * scoreSeen(depthOfNode, leafNode.getMass());
} else {
score = scoreUnseen(depthOfNode, leafNode.getMass());
}
}
/**
* A scoring function which is applied when the leaf node visited is equal to
* the point being scored.
*
* @param depth The depth of the node being visited
* @param mass The mass of the node being visited
* @return an anomaly score contribution for a given node
*/
protected abstract double scoreSeen(int depth, int mass);
/**
* A scoring function which is applied when the leaf node visited is not equal
* to the point being scored. This function is also used to compute the
* contribution to the anomaly score from non-leaf nodes.
*
* @param depth The depth of the node being visited.
* @param mass The mass of the node being visited.
* @return an anomaly score contribution for a given node.
*/
protected abstract double scoreUnseen(int depth, int mass);
/**
* This function produces a scaling factor which can be used to reduce the
* influence of leaf nodes with mass greater than 1.
*
* @param leafMass The mass of the leaf node visited
* @param treeMass The mass of the tree being visited
* @return a scaling factor to apply to the result from
* {@link #scoreSeen(int, int)}.
*/
protected abstract double damp(int leafMass, int treeMass);
/**
* Compute the probability that a random cut would separate the point from the
* rest of the bounding box. This method is intended to compute the probability
* for a non-leaf Node, and will throw an exception if a leaf-node bounding box
* is detected.
*
* @param boundingBox The bounding box that we are computing the probability of
* separation from.
* @return is the probability
*/
protected double getProbabilityOfSeparation(final IBoundingBoxView boundingBox) {
double sumOfNewRange = 0d;
double sumOfDifferenceInRange = 0d;
for (int i = 0; i < pointToScore.length; ++i) {
double maxVal = boundingBox.getMaxValue(i);
double minVal = boundingBox.getMinValue(i);
double oldRange = maxVal - minVal;
if (!coordInsideBox[i]) {
if (maxVal < pointToScore[i]) {
maxVal = pointToScore[i];
} else if (minVal > pointToScore[i]) {
minVal = pointToScore[i];
} else if (!ignoreLeafEquals) {
// optimization turned on for ignoreLeafEquals==false
sumOfNewRange += oldRange;
coordInsideBox[i] = true;
continue;
}
double newRange = maxVal - minVal;
sumOfNewRange += newRange;
sumOfDifferenceInRange += (newRange - oldRange);
} else {
sumOfNewRange += oldRange;
}
}
if (sumOfNewRange <= 0) {
// Sum of range across dimensions should only be 0 at leaf nodes as non-leaf
// nodes always contain
// more than one distinct point
throw new IllegalStateException("Sum of new range of merged box in scoring function is smaller than 0 "
+ "for a non-leaf node. The sum of range of new bounding box is: " + sumOfNewRange);
}
return sumOfDifferenceInRange / sumOfNewRange;
}
public boolean isConverged() {
return pointInsideBox;
}
}
| 464 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/AbstractAttributionVisitor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.anomalydetection;
import java.util.Arrays;
import com.amazon.randomcutforest.CommonUtils;
import com.amazon.randomcutforest.Visitor;
import com.amazon.randomcutforest.returntypes.DiVector;
import com.amazon.randomcutforest.tree.IBoundingBoxView;
import com.amazon.randomcutforest.tree.INodeView;
/**
* Attribution exposes the attribution of scores produced by ScalarScoreVisitor
* corresponding to different attributes. It allows a boolean
* ignoreClosestCandidate; which when true will compute the attribution as it
* that near neighbor was not present in RCF. This is turned on by default for
* duplicate points seen by the forest, so that the attribution does not change
* is a sequence of duplicate points are seen. For non-duplicate points, if the
* boolean turned on, reduces effects of masking (when anomalous points are
* included in the forest -- which will be true with a few samples or when the
* samples are not refreshed appropriately). It is worth remembering that
* disallowing anomalous points from being included in the forest explicitly
* will render the algorithm incapable of adjusting to a new normal -- which is
* a strength of this algorithm.
**/
public abstract class AbstractAttributionVisitor implements Visitor<DiVector> {
public static final int DEFAULT_IGNORE_LEAF_MASS_THRESHOLD = 0;
protected final double[] differenceInRangeVector;
protected final float[] pointToScore;
protected final int treeMass;
protected final DiVector directionalAttribution;
protected boolean hitDuplicates;
protected double savedScore;
protected double sumOfNewRange;
protected double sumOfDifferenceInRange;
protected boolean ignoreLeaf;
protected int ignoreLeafMassThreshold;
/**
* A flag that states whether the point to score is known to be contained inside
* the bounding box of Nodes being accepted. Assumes nodes are accepted in
* leaf-to-root order.
*/
protected boolean pointInsideBox;
/**
* An array that keeps track of whether each margin of the point being scored is
* outside inside the box considered during the recursive call to compute the
* score. Assumes nodes are accepted in leaf-to-root order.
*/
protected boolean[] coordInsideBox;
protected IBoundingBoxView shadowBox;
public AbstractAttributionVisitor(float[] pointToScore, int treeMass, int ignoreLeafMassThreshold) {
this.pointToScore = Arrays.copyOf(pointToScore, pointToScore.length);
this.treeMass = treeMass;
this.ignoreLeaf = ignoreLeafMassThreshold > DEFAULT_IGNORE_LEAF_MASS_THRESHOLD;
this.ignoreLeafMassThreshold = ignoreLeafMassThreshold;
hitDuplicates = false;
pointInsideBox = false;
savedScore = 0;
directionalAttribution = new DiVector(pointToScore.length);
shadowBox = null;
coordInsideBox = new boolean[pointToScore.length];
// array is twice as long as pointToScore because we store
// positive and negative differences separately
differenceInRangeVector = new double[2 * pointToScore.length];
}
public AbstractAttributionVisitor(float[] pointToScore, int treeMass) {
this(pointToScore, treeMass, DEFAULT_IGNORE_LEAF_MASS_THRESHOLD);
}
/**
* Take the normalization function applied to the corresponding scoring visitor
* and apply that to each coordinate of the DiVector to modify the data in
* place. The function has to be associative in its first parameter; that is, fn
* (x1, y) + fn (x2, y) = fn (x1 + x2, y)
*
* @return The modified data.
*/
@Override
public DiVector getResult() {
DiVector result = new DiVector(directionalAttribution);
result.componentwiseTransform(x -> CommonUtils.defaultScalarNormalizerFunction(x, treeMass));
return result;
}
/**
* Update the anomaly score based on the next step of the tree traversal.
*
* @param node The current node in the tree traversal
* @param depthOfNode The depth of the current node in the tree
*/
@Override
public void accept(INodeView node, int depthOfNode) {
if (pointInsideBox) {
return;
}
IBoundingBoxView smallBox;
if (hitDuplicates || ignoreLeaf) {
// use the sibling bounding box to represent counterfactual "what if point & the
// candidate near neighbor
// had not been inserted in the tree"
shadowBox = shadowBox == null ? node.getSiblingBoundingBox(pointToScore)
: shadowBox.getMergedBox(node.getSiblingBoundingBox(pointToScore));
smallBox = shadowBox;
} else {
smallBox = node.getBoundingBox();
}
IBoundingBoxView largeBox = smallBox.getMergedBox(pointToScore);
updateRangesForScoring(smallBox, largeBox);
double probOfCut = sumOfDifferenceInRange / sumOfNewRange;
// if leaves were ignored we need to keep accounting for the score
if (ignoreLeaf) {
savedScore = probOfCut * scoreUnseen(depthOfNode, node.getMass()) + (1 - probOfCut) * savedScore;
}
if (probOfCut <= 0) {
pointInsideBox = true;
} else {
double newScore = scoreUnseen(depthOfNode, node.getMass());
for (int i = 0; i < pointToScore.length; i++) {
double probOfCutInSpikeDirection = differenceInRangeVector[2 * i] / sumOfNewRange;
directionalAttribution.high[i] = probOfCutInSpikeDirection * newScore
+ (1 - probOfCut) * directionalAttribution.high[i];
double probOfCutInDipDirection = differenceInRangeVector[2 * i + 1] / sumOfNewRange;
directionalAttribution.low[i] = probOfCutInDipDirection * newScore
+ (1 - probOfCut) * directionalAttribution.low[i];
}
}
boolean capture = (pointInsideBox || depthOfNode == 0);
if ((hitDuplicates || ignoreLeaf) && capture) {
// final rescaling; this ensures agreement with the ScalarScoreVector
// the scoreUnseen/scoreSeen should be the same as scoring; other uses need
// caution.
directionalAttribution.renormalize(savedScore);
}
}
@Override
public void acceptLeaf(INodeView leafNode, int depthOfNode) {
updateRangesForScoring(leafNode.getBoundingBox(), leafNode.getBoundingBox().getMergedBox(pointToScore));
// newrange == 0 corresponds to equality of points and is fater than
// Array.equals
if (sumOfNewRange <= 0) {
hitDuplicates = true;
}
if ((hitDuplicates) && ((!ignoreLeaf) || (leafNode.getMass() > ignoreLeafMassThreshold))) {
savedScore = damp(leafNode.getMass(), treeMass) * scoreSeen(depthOfNode, leafNode.getMass());
} else {
savedScore = scoreUnseen(depthOfNode, leafNode.getMass());
}
if ((hitDuplicates) || ((ignoreLeaf) && (leafNode.getMass() <= ignoreLeafMassThreshold))) {
Arrays.fill(directionalAttribution.high, savedScore / (2 * pointToScore.length));
Arrays.fill(directionalAttribution.low, savedScore / (2 * pointToScore.length));
/* in this case do not have a better option than an equal attribution */
Arrays.fill(coordInsideBox, false);
} else {
for (int i = 0; i < pointToScore.length; i++) {
directionalAttribution.high[i] = savedScore * differenceInRangeVector[2 * i] / sumOfNewRange;
directionalAttribution.low[i] = savedScore * differenceInRangeVector[2 * i + 1] / sumOfNewRange;
}
}
}
/**
* A scoring function which is applied when the leaf node visited is equal to
* the point being scored.
*
* @param depth The depth of the node being visited
* @param mass The mass of the node being visited
* @return an anomaly score contribution for a given node
*/
protected abstract double scoreSeen(int depth, int mass);
/**
* A scoring function which is applied when the leaf node visited is not equal
* to the point being scored. This function is also used to compute the
* contribution to the anomaly score from non-leaf nodes.
*
* @param depth The depth of the node being visited.
* @param mass The mass of the node being visited.
* @return an anomaly score contribution for a given node.
*/
protected abstract double scoreUnseen(int depth, int mass);
/**
* This function produces a scaling factor which can be used to reduce the
* influence of leaf nodes with mass greater than 1.
*
* @param leafMass The mass of the leaf node visited
* @param treeMass The mass of the tree being visited
* @return a scaling factor to apply to the result from
* {@link #scoreSeen(int, int)}.
*/
protected abstract double damp(int leafMass, int treeMass);
/**
* When updating the score for a node, we compare the node's bounding box to the
* merged bounding box that would be created by adding the point to be scored.
* This method updates local instance variables sumOfDifferenceInRange and
* differenceInRange vector to reflect the total difference in side length and
* the difference in side length in each dimension, respectively.
*
* @param smallBox The bounding box corresponding to a Node being visited.
* @param largeBox The merged bounding box containing smallBox and the point
* being scored.
*/
protected void updateRangesForScoring(IBoundingBoxView smallBox, IBoundingBoxView largeBox) {
sumOfDifferenceInRange = 0.0;
sumOfNewRange = 0.0;
Arrays.fill(differenceInRangeVector, 0.0);
for (int i = 0; i < pointToScore.length; i++) {
sumOfNewRange += largeBox.getRange(i);
// optimization turned off for ignoreLeaf
if (coordInsideBox[i] && !ignoreLeaf) {
continue;
}
double maxGap = Math.max(largeBox.getMaxValue(i) - smallBox.getMaxValue(i), 0.0);
double minGap = Math.max(smallBox.getMinValue(i) - largeBox.getMinValue(i), 0.0);
if (maxGap + minGap > 0.0) {
sumOfDifferenceInRange += (minGap + maxGap);
differenceInRangeVector[2 * i] = maxGap;
differenceInRangeVector[2 * i + 1] = minGap;
} else {
coordInsideBox[i] = true;
}
}
}
@Override
public boolean isConverged() {
return pointInsideBox;
}
}
| 465 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/DynamicAttributionVisitor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.anomalydetection;
import java.util.function.BiFunction;
import com.amazon.randomcutforest.returntypes.DiVector;
public class DynamicAttributionVisitor extends AbstractAttributionVisitor {
/**
* The function used to compute the base score in the case where the point being
* scored is equal to the leaf point (provided the ignoreLeafEquals and
* ignoreLeafMassThreshold variables indicate that we should use this method).
* <p>
* Function arguments: leaf depth, leaf mass
*/
private final BiFunction<Double, Double, Double> scoreSeen;
/**
* A damping function used to dilute the impact of a point with a large number
* of duplicates on the base score.
* <p>
* Function arguments: leaf mass, tree mass
*/
private final BiFunction<Double, Double, Double> damp;
/**
* The scoring function to use when the point being scored is not equal to the
* leaf point, or when the points are equal but the ignoreLeafEquals or
* ignoreLeafMassThreshold variable indicates that we should use the scoreUnseen
* method.
* <p>
* Function arguments: leaf depth, leaf mass
*/
private final BiFunction<Double, Double, Double> scoreUnseen;
/**
*
* @param point to be scored
* @param treeMass mass of the tree
* @param ignoreLeafMassThreshold threshold of mass for leaves to be ignored
* @param scoreSeen part of the score when point has been seen
* @param scoreUnseen part of the score for unseen point
* @param damp dampening function for seen points
*/
public DynamicAttributionVisitor(float[] point, int treeMass, int ignoreLeafMassThreshold,
BiFunction<Double, Double, Double> scoreSeen, BiFunction<Double, Double, Double> scoreUnseen,
BiFunction<Double, Double, Double> damp) {
super(point, treeMass, ignoreLeafMassThreshold);
this.scoreSeen = scoreSeen;
this.scoreUnseen = scoreUnseen;
this.damp = damp;
}
@Override
protected double scoreSeen(int depth, int leafMass) {
return scoreSeen.apply((double) depth, (double) leafMass);
}
@Override
protected double scoreUnseen(int depth, int leafMass) {
return scoreUnseen.apply((double) depth, (double) leafMass);
}
@Override
protected double damp(int leafMass, int treeMass) {
return damp.apply((double) leafMass, (double) treeMass);
}
// turning off normalization
@Override
public DiVector getResult() {
return new DiVector(directionalAttribution);
}
}
| 466 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/DynamicScoreVisitor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.anomalydetection;
import java.util.function.BiFunction;
public class DynamicScoreVisitor extends AbstractScalarScoreVisitor {
/**
* The function used to compute the base score in the case where the point being
* scored is equal to the leaf point (provided the ignoreLeafEquals and
* ignoreLeafMassThreshold variables indicate that we should use this method).
* <p>
* Function arguments: leaf depth, leaf mass
*/
protected final BiFunction<Double, Double, Double> scoreSeen;
/**
* A damping function used to dilute the impact of a point with a large number
* of duplicates on the base score.
* <p>
* Function arguments: leaf mass, tree mass
*/
protected final BiFunction<Double, Double, Double> damp;
/**
* The scoring function to use when the point being scored is not equal to the
* leaf point, or when the points are equal but the ignoreLeafEquals or
* ignoreLeafMassThreshold variable indicates that we should use the scoreUnseen
* method.
* <p>
* Function arguments: leaf depth, leaf mass
*/
protected final BiFunction<Double, Double, Double> scoreUnseen;
/**
* Constructor
*
* @param point being scored
* @param treeMass mass of the tree
* @param ignoreLeafMassThreshold the threshold for ignoring leaf nodes
* @param scoreSeen the part of score function for previously seen
* values
* @param scoreUnseen part of the score for unseen values
* @param damp dampening function for seen points
*/
public DynamicScoreVisitor(float[] point, int treeMass, int ignoreLeafMassThreshold,
BiFunction<Double, Double, Double> scoreSeen, BiFunction<Double, Double, Double> scoreUnseen,
BiFunction<Double, Double, Double> damp) {
super(point, treeMass, ignoreLeafMassThreshold);
this.scoreSeen = scoreSeen;
this.scoreUnseen = scoreUnseen;
this.damp = damp;
}
@Override
protected double scoreSeen(int depth, int leafMass) {
return scoreSeen.apply((double) depth, (double) leafMass);
}
@Override
protected double scoreUnseen(int depth, int leafMass) {
return scoreUnseen.apply((double) depth, (double) leafMass);
}
@Override
protected double damp(int leafMass, int treeMass) {
return damp.apply((double) leafMass, (double) treeMass);
}
/**
* normalization is turned off for dynamic scoring because the function ranges
* are unknown
*/
@Override
public Double getResult() {
return score;
}
}
| 467 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/SimulatedTransductiveScalarScoreVisitor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.anomalydetection;
import java.util.function.BiFunction;
import java.util.function.Function;
import com.amazon.randomcutforest.tree.IBoundingBoxView;
import com.amazon.randomcutforest.tree.INodeView;
public class SimulatedTransductiveScalarScoreVisitor extends TransductiveScalarScoreVisitor {
private final Function<IBoundingBoxView, double[]> vecSepBuild;
/**
* Construct a new SimulatedTransductiveScalarScoreVisitor
*
* @param pointToScore The point whose anomaly score we are computing
* @param treeMass The total mass of the RandomCutTree that is scoring the
* point
* @param scoreSeen is the part of the score function when the point has been
* seen
* @param scoreUnseen is the part of the score when the point has not been seen
* @param damp corresponds to the dampening of the effect of the seen
* points
* @param vecSepBuild A function that provides the probabilities of choosing
* different dimensions given a BoundingBox when the tree
* was built.
* @param vecSepScore A function that corresponds to importance of dimensions
* during scoring
*/
public SimulatedTransductiveScalarScoreVisitor(float[] pointToScore, int treeMass,
BiFunction<Double, Double, Double> scoreSeen, BiFunction<Double, Double, Double> scoreUnseen,
BiFunction<Double, Double, Double> damp, Function<IBoundingBoxView, double[]> vecSepBuild,
Function<IBoundingBoxView, double[]> vecSepScore) {
super(pointToScore, treeMass, scoreSeen, scoreUnseen, damp, vecSepScore);
this.vecSepBuild = vecSepBuild;
}
/**
* Update the anomaly score based on the next step of the tree traversal.
*
* @param node The current node in the tree traversal
* @param depthOfNode The depth of the current node in the tree
*/
@Override
public void accept(INodeView node, int depthOfNode) {
double weight = getWeight(node.getCutDimension(), vecSepBuild, node.getBoundingBox());
if (pointInsideBox) {
score *= weight;
return;
}
double probabilityOfSeparation = getProbabilityOfSeparation(node.getBoundingBox());
if (probabilityOfSeparation == 0) {
pointInsideBox = true;
}
score = probabilityOfSeparation * scoreUnseen(depthOfNode, node.getMass()) + weight * score;
}
// The above function differs from TransductiveScalarScoreVisitor only in the
// weight
// computation and when the weight function is used.
@Override
public boolean isConverged() {
return false;
}
}
| 468 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/AnomalyAttributionVisitor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.anomalydetection;
import com.amazon.randomcutforest.CommonUtils;
/**
* Attribution exposes the attribution of scores produced by ScalarScoreVisitor
* corresponding to different attributes. It allows a boolean
* ignoreClosestCandidate; which when true will compute the attribution as it
* that near neighbor was not present in RCF. This is turned on by default for
* duplicate points seen by the forest, so that the attribution does not change
* is a sequence of duplicate points are seen. For non-duplicate points, if the
* boolean turned on, reduces effects of masking (when anomalous points are
* included in the forest (which will be true with a few samples or when the
* samples are not refreshed appropriately). It is worth remembering that
* disallowing anomalous points from being included in the forest forest
* explicitly will render the algorithm incapable of adjusting to a new normal
* -- which is a strength of this algorithm.
**/
public class AnomalyAttributionVisitor extends AbstractAttributionVisitor {
public AnomalyAttributionVisitor(float[] pointToScore, int treeMass, int ignoreThreshold) {
super(pointToScore, treeMass, ignoreThreshold);
}
public AnomalyAttributionVisitor(float[] pointToScore, int treeMass) {
super(pointToScore, treeMass);
}
@Override
protected double scoreSeen(int depth, int mass) {
return CommonUtils.defaultScoreSeenFunction(depth, mass);
}
@Override
protected double scoreUnseen(int depth, int mass) {
return CommonUtils.defaultScoreUnseenFunction(depth, mass);
}
@Override
protected double damp(int leafMass, int treeMass) {
return CommonUtils.defaultDampFunction(leafMass, treeMass);
}
}
| 469 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/inspect/NearNeighborVisitor.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.inspect;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import com.amazon.randomcutforest.Visitor;
import com.amazon.randomcutforest.returntypes.Neighbor;
import com.amazon.randomcutforest.tree.INodeView;
/**
* A visitor that returns the leaf node in a traversal if the distance between
* the leaf point and the query point is less than a given threshold.
*/
public class NearNeighborVisitor implements Visitor<Optional<Neighbor>> {
private final float[] queryPoint;
private final double distanceThreshold;
private Neighbor neighbor;
/**
* Create a NearNeighborVisitor for the given query point.
*
* @param queryPoint The point whose neighbors we are looking for.
* @param distanceThreshold Leaf points whose distance from the query point is
* less than this value are considered near neighbors.
*/
public NearNeighborVisitor(float[] queryPoint, double distanceThreshold) {
this.queryPoint = queryPoint;
this.distanceThreshold = distanceThreshold;
neighbor = null;
}
/**
* Create a NearNeighborVisitor which always returns the leaf point in the
* traversal. The distance threshold is set to positive infinity.
*
* @param queryPoint The point whose neighbors we are looking for.
*/
public NearNeighborVisitor(float[] queryPoint) {
this(queryPoint, Double.POSITIVE_INFINITY);
}
/**
* Near neighbors are identified in the {@link #acceptLeaf} method, hence this
* method does nothing.
*
* @param node the node being visited
* @param depthOfNode the depth of the node being visited
*/
@Override
public void accept(INodeView node, int depthOfNode) {
}
/**
* Check to see whether the Euclidean distance between the leaf point and the
* query point is less than the distance threshold. If it is, then this visitor
* will return an {@link java.util.Optional} containing this leaf point
* (converted to a {@link Neighbor} object). Otherwise, this visitor will return
* an empty Optional.
*
* @param leafNode the leaf node being visited
* @param depthOfNode the depth of the leaf node
*/
@Override
public void acceptLeaf(INodeView leafNode, int depthOfNode) {
float[] leafPoint = leafNode.getLiftedLeafPoint();
double distanceSquared = 0.0;
for (int i = 0; i < leafPoint.length; i++) {
double diff = queryPoint[i] - leafPoint[i];
distanceSquared += diff * diff;
}
if (Math.sqrt(distanceSquared) < distanceThreshold) {
List<Long> sequenceIndexes = new ArrayList<>(leafNode.getSequenceIndexes().keySet());
neighbor = new Neighbor(leafPoint, Math.sqrt(distanceSquared), sequenceIndexes);
}
}
/**
* @return an {@link Optional} containing the leaf point (converted to a
* {@link Neighbor} if the Euclidean distance between the leaf point and
* the query point is less than the distance threshold. Otherwise return
* an empty Optional.
*/
@Override
public Optional<Neighbor> getResult() {
return Optional.ofNullable(neighbor);
}
@Override
public boolean isConverged() {
return true;
}
}
| 470 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/util/ArrayPacking.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.util;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.checkNotNull;
import static java.lang.Math.min;
import java.nio.ByteBuffer;
import java.util.Arrays;
public class ArrayPacking {
/**
* For a given base value, return the smallest int value {@code p} so that
* {@code base^(p + 1) >= Integer.MAX_VALUE}. If
* {@code base >= Integer.MAX_VALUE}, return 1.
*
* @param base Compute the approximate log of {@code Integer.MAX_VALUE} in this
* base.
* @return the largest int value {@code p} so that
* {@code base^p >= Integer.MAX_VALUE} or 1 if
* {@code base >= Integer.MAX_VALUE}.
*/
public static int logMax(long base) {
checkArgument(base > 1, "Absolute value of base must be greater than 1");
int pack = 0;
long num = base;
while (num < Integer.MAX_VALUE) {
num = num * base;
++pack;
}
return Math.max(pack, 1); // pack can be 0 for max - min being more than Integer.MaxValue
}
/**
* Pack an array of ints. If {@code compress} is true, then this method will
* apply arithmetic compression to the inputs, otherwise it returns a copy of
* the input.
*
* @param inputArray An array of ints to pack.
* @param compress A flag indicating whether to apply arithmetic compression.
* @return an array of packed ints.
*/
public static int[] pack(int[] inputArray, boolean compress) {
return pack(inputArray, inputArray.length, compress);
}
/**
* Pack an array of ints. If {@code compress} is true, then this method will
* apply arithmetic compression to the inputs, otherwise it returns a copy of
* the input.
*
* @param inputArray An array of ints to pack.
* @param length The length of the output array. Only the first
* {@code length} values in {@code inputArray} will be packed.
* @param compress A flag indicating whether to apply arithmetic compression.
* @return an array of packed ints.
*/
public static int[] pack(int[] inputArray, int length, boolean compress) {
checkNotNull(inputArray, "inputArray must not be null");
checkArgument(0 <= length && length <= inputArray.length,
"length must be between 0 and inputArray.length (inclusive)");
if (!compress || length < 3) {
return Arrays.copyOf(inputArray, length);
}
int min = inputArray[0];
int max = inputArray[0];
for (int i = 1; i < length; i++) {
min = min(min, inputArray[i]);
max = Math.max(max, inputArray[i]);
}
long base = (long) max - min + 1;
if (base == 1) {
return new int[] { min, max, length };
} else {
int packNum = logMax(base);
int[] output = new int[3 + (int) Math.ceil(1.0 * length / packNum)];
output[0] = min;
output[1] = max;
output[2] = length;
int len = 0;
int used = 0;
while (len < length) {
long code = 0;
int reach = min(len + packNum - 1, length - 1);
for (int i = reach; i >= len; i--) {
code = base * code + (inputArray[i] - min);
}
output[3 + used++] = (int) code;
len += packNum;
}
// uncomment for debug; should be always true
// checkArgument(used + 3 == output.length, "incorrect state");
return output;
}
}
/**
* Pack an array of shorts. If {@code compress} is true, then this method will
* apply arithmetic compression to the inputs, otherwise it returns a copy of
* the input.
*
* @param inputArray An array of ints to pack.
* @param compress A flag indicating whether to apply arithmetic compression.
* @return an array of packed ints.
*/
public static int[] pack(short[] inputArray, boolean compress) {
return pack(inputArray, inputArray.length, compress);
}
/**
* Pack an array of shorts. If {@code compress} is true, then this method will
* apply arithmetic compression to the inputs, otherwise it returns a copy of
* the input.
*
* @param inputArray An array of ints to pack.
* @param length The length of the output array. Only the first
* {@code length} values in {@code inputArray} will be packed.
* @param compress A flag indicating whether to apply arithmetic compression.
* @return an array of packed ints.
*/
public static int[] pack(short[] inputArray, int length, boolean compress) {
checkNotNull(inputArray, "inputArray must not be null");
checkArgument(0 <= length && length <= inputArray.length,
"length must be between 0 and inputArray.length (inclusive)");
if (!compress || length < 3) {
int[] ret = new int[length];
for (int i = 0; i < length; i++) {
ret[i] = inputArray[i];
}
return ret;
}
int min = inputArray[0];
int max = inputArray[0];
for (int i = 1; i < length; i++) {
min = min(min, inputArray[i]);
max = Math.max(max, inputArray[i]);
}
long base = (long) max - min + 1;
if (base == 1) {
return new int[] { min, max, length };
} else {
int packNum = logMax(base);
int[] output = new int[3 + (int) Math.ceil(1.0 * length / packNum)];
output[0] = min;
output[1] = max;
output[2] = length;
int len = 0;
int used = 0;
while (len < length) {
long code = 0;
int reach = min(len + packNum - 1, length - 1);
for (int i = reach; i >= len; i--) {
code = base * code + (inputArray[i] - min);
}
output[3 + used++] = (int) code;
len += packNum;
}
// uncomment for debug; should be always true
// checkArgument(used + 3 == output.length, "incorrect state");
return output;
}
}
/**
* Unpack an array previously created by {@link #pack(int[], int, boolean)}.
*
* @param packedArray An array previously created by
* {@link #pack(int[], int, boolean)}.
* @param decompress A flag indicating whether the packed array was created
* with arithmetic compression enabled.
* @return the array of unpacked ints.
*/
public static int[] unpackInts(int[] packedArray, boolean decompress) {
checkNotNull(packedArray, " array unpacking invoked on null arrays");
if (!decompress) {
return Arrays.copyOf(packedArray, packedArray.length);
}
return (packedArray.length < 3) ? unpackInts(packedArray, packedArray.length, decompress)
: unpackInts(packedArray, packedArray[2], decompress);
}
/**
* Unpack an array previously created by {@link #pack(int[], int, boolean)}.
*
* @param packedArray An array previously created by
* {@link #pack(int[], int, boolean)}.
* @param length The desired length of the output array. If this number is
* different from the length of the array that was originally
* packed, then the result will be truncated or padded with
* zeros as needed.
* @param decompress A flag indicating whether the packed array was created
* with arithmetic compression enabled.
* @return the array of unpacked ints.
*/
public static int[] unpackInts(int[] packedArray, int length, boolean decompress) {
checkNotNull(packedArray, " array unpacking invoked on null arrays");
checkArgument(length >= 0, "incorrect length parameter");
if (packedArray.length < 3 || !decompress) {
return Arrays.copyOf(packedArray, length);
}
int min = packedArray[0];
int max = packedArray[1];
int[] output = new int[length];
if (min == max) {
if (packedArray[2] >= length) {
Arrays.fill(output, min);
} else {
for (int i = 0; i < packedArray[2]; i++) {
output[i] = min;
}
}
} else {
long base = ((long) max - min + 1);
int packNum = logMax(base);
int count = 0;
for (int i = 3; i < packedArray.length; i++) {
long code = packedArray[i];
for (int j = 0; j < packNum && count < min(packedArray[2], length); j++) {
output[count++] = (int) (min + code % base);
code = (int) (code / base);
}
}
}
return output;
}
private static short[] copyToShort(int[] array, int length) {
short[] ret = new short[length];
for (int i = 0; i < Math.min(length, array.length); i++) {
ret[i] = (short) array[i];
}
return ret;
}
/**
* Unpack an array previously created by {@link #pack(short[], int, boolean)}.
*
* @param packedArray An array previously created by
* {@link #pack(short[], int, boolean)}.
* @param decompress A flag indicating whether the packed array was created
* with arithmetic compression enabled.
* @return the array of unpacked shorts.
*/
public static short[] unpackShorts(int[] packedArray, boolean decompress) {
checkNotNull(packedArray, " array unpacking invoked on null arrays");
if (!decompress) {
return copyToShort(packedArray, packedArray.length);
}
return (packedArray.length < 3) ? unpackShorts(packedArray, packedArray.length, decompress)
: unpackShorts(packedArray, packedArray[2], decompress);
}
/**
* Unpack an array previously created by {@link #pack(short[], int, boolean)}.
*
* @param packedArray An array previously created by
* {@link #pack(short[], int, boolean)}.
* @param length The desired length of the output array. If this number is
* different from the length of the array that was originally
* packed, then the result will be truncated or padded with
* zeros as needed.
* @param decompress A flag indicating whether the packed array was created
* with arithmetic compression enabled.
* @return the array of unpacked ints.
*/
public static short[] unpackShorts(int[] packedArray, int length, boolean decompress) {
checkNotNull(packedArray, " array unpacking invoked on null arrays");
checkArgument(length >= 0, "incorrect length parameter");
if (packedArray.length < 3 || !decompress) {
return copyToShort(packedArray, length);
}
int min = packedArray[0];
int max = packedArray[1];
short[] output = new short[length];
if (min == max) {
if (packedArray[2] >= length) {
Arrays.fill(output, (short) min);
} else {
for (int i = 0; i < packedArray[2]; i++) {
output[i] = (short) min;
}
}
} else {
long base = ((long) max - min + 1);
int packNum = logMax(base);
int count = 0;
for (int i = 3; i < packedArray.length; i++) {
long code = packedArray[i];
for (int j = 0; j < packNum && count < min(packedArray[2], length); j++) {
output[count++] = (short) (min + code % base);
code = (int) (code / base);
}
}
}
return output;
}
/**
* Pack an array of doubles into an array of bytes.
*
* @param array An array of doubles.
* @return An array of bytes representing the original array of doubles.
*/
public static byte[] pack(double[] array) {
checkNotNull(array, "array must not be null");
return pack(array, array.length);
}
/**
* Pack an array of doubles into an array of bytes.
*
* @param array An array of doubles.
* @param length The number of doubles in the input array to pack into the
* resulting byte array.
* @return An array of bytes representing the original array of doubles.
*/
public static byte[] pack(double[] array, int length) {
checkNotNull(array, "array must not be null");
checkArgument(0 <= length, "incorrect length parameter");
checkArgument(length <= array.length, "length must be between 0 and inputArray.length (inclusive)");
ByteBuffer buf = ByteBuffer.allocate(length * Double.BYTES);
for (int i = 0; i < length; i++) {
buf.putDouble(array[i]);
}
return buf.array();
}
/**
* Pack an array of floats into an array of bytes.
*
* @param array An array of floats.
* @return An array of bytes representing the original array of floats.
*/
public static byte[] pack(float[] array) {
checkNotNull(array, "array must not be null");
return pack(array, array.length);
}
/**
* Pack an array of floats into an array of bytes.
*
* @param array An array of floats.
* @param length The number of doubles in the input array to pack into the
* resulting byte array.
* @return An array of bytes representing the original array of floats.
*/
public static byte[] pack(float[] array, int length) {
checkArgument(0 <= length, "incorrect length parameter");
checkArgument(length <= array.length, "length must be between 0 and inputArray.length (inclusive)");
ByteBuffer buf = ByteBuffer.allocate(length * Float.BYTES);
for (int i = 0; i < length; i++) {
buf.putFloat(array[i]);
}
return buf.array();
}
/**
* Unpack an array of bytes as an array of doubles.
*
* @param bytes An array of bytes.
* @return an array of doubles obtained by marshalling consecutive bytes in the
* input array into doubles.
*/
public static double[] unpackDoubles(byte[] bytes) {
checkNotNull(bytes, "bytes must not be null");
return unpackDoubles(bytes, bytes.length / Double.BYTES);
}
/**
* Unpack an array of bytes as an array of doubles.
*
* @param bytes An array of bytes.
* @param length The desired length of the resulting double array. The input
* will be truncated or padded with zeros as needed.
* @return an array of doubles obtained by marshalling consecutive bytes in the
* input array into doubles.
*/
public static double[] unpackDoubles(byte[] bytes, int length) {
checkNotNull(bytes, "bytes must not be null");
checkArgument(length >= 0, "length must be greater than or equal to 0");
checkArgument(bytes.length % Double.BYTES == 0, "bytes.length must be divisible by Double.BYTES");
ByteBuffer buf = ByteBuffer.wrap(bytes);
double[] result = new double[length];
int m = Math.min(length, bytes.length / Double.BYTES);
for (int i = 0; i < m; i++) {
result[i] = buf.getDouble();
}
return result;
}
/**
* Unpack an array of bytes as an array of floats.
*
* @param bytes An array of bytes.
* @return an array of floats obtained by marshalling consecutive bytes in the
* input array into floats.
*/
public static float[] unpackFloats(byte[] bytes) {
checkNotNull(bytes, "bytes must not be null");
return unpackFloats(bytes, bytes.length / Float.BYTES);
}
/**
* Unpack an array of bytes as an array of floats.
*
* @param bytes An array of bytes.
* @param length The desired length of the resulting float array. The input will
* be truncated or padded with zeros as needed.
* @return an array of doubles obtained by marshalling consecutive bytes in the
* input array into floats.
*/
public static float[] unpackFloats(byte[] bytes, int length) {
checkNotNull(bytes, "bytes must not be null");
checkArgument(length >= 0, "length must be greater than or equal to 0");
checkArgument(bytes.length % Float.BYTES == 0, "bytes.length must be divisible by Float.BYTES");
ByteBuffer buf = ByteBuffer.wrap(bytes);
float[] result = new float[length];
int m = Math.min(length, bytes.length / Float.BYTES);
for (int i = 0; i < m; i++) {
result[i] = buf.getFloat();
}
return result;
}
}
| 471 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/util/Weighted.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.util;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
/**
* a container class that manages weights
*
* @param <Q>
*/
public class Weighted<Q> {
public Q index;
public float weight;
public Weighted(Q object, float weight) {
this.index = object;
this.weight = weight;
}
/**
* a generic MonteCarlo sampler that creates an Arraylist of WeightedIndexes
*
* @param input input list of weighted objects
* @param seed random seed for repreoducibility
* @param forceSampleFraction add the items which have weight over this fraction
* @param scale scale that multiples the weights of the remainder.
* Note that elements that are sampled are rescaled
* to have ensured that the total weight (after
* removal of heavy items) remains the same in
* expectation
* @param <Q> a generic index type, typically float[] in the
* current usage
* @return a randomly sampled arraylist (which can be the same list) of length
* about LengthBound
*/
public static <Q> List<Weighted<Q>> createSample(List<Weighted<Q>> input, long seed, int lengthBound,
double forceSampleFraction, double scale) {
if (input.size() < lengthBound) {
return input;
}
ArrayList<Weighted<Q>> samples = new ArrayList<>();
Random rng = new Random(seed);
double totalWeight = input.stream().map(x -> (double) x.weight).reduce(Double::sum).get();
double remainder = totalWeight;
if (forceSampleFraction > 0) {
remainder = input.stream().map(e -> {
if (e.weight > totalWeight * forceSampleFraction) {
samples.add(new Weighted<>(e.index, e.weight));
return 0.0;
} else {
return (double) e.weight;
}
}).reduce(Double::sum).get();
}
float factor = (float) (lengthBound * 1.0 / input.size());
float newScale = (float) (scale * (remainder / totalWeight) / factor);
input.stream().forEach(e -> {
if ((e.weight <= totalWeight * forceSampleFraction) && (rng.nextDouble() < factor)) {
samples.add(new Weighted<>(e.index, e.weight * newScale));
}
});
return samples;
}
/**
* an utility routine to pick the element such that the prefix sum including
* that element exceeds a weight (or is the last element)
*
* @param points a list of weighted objects
* @param wt a parameter determining the cumulative weight
* @return the position of the item satisfying the prefix condition or the last
* element
*/
public static <Q> Weighted<Q> prefixPick(List<Weighted<Q>> points, double wt) {
checkArgument(points.size() > 0, "cannot pick from an empty list");
double running = wt;
Weighted<Q> saved = points.get(0);
for (Weighted<Q> point : points) {
if (running - point.weight <= 0.0) {
return point;
}
running -= point.weight;
saved = point;
}
return saved;
}
}
| 472 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/util/ArrayUtils.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.util;
import java.util.Arrays;
/**
* A utility class for data arrays.
*/
public class ArrayUtils {
/**
* Returns a clean deep copy of the point. Current clean-ups include changing
* negative zero -0.0 to positive zero 0.0.
*
* @param point The original data point.
* @return a clean deep copy of the original point.
*/
public static double[] cleanCopy(double[] point) {
double[] pointCopy = Arrays.copyOf(point, point.length);
for (int i = 0; i < point.length; i++) {
if (pointCopy[i] == 0.0) {
pointCopy[i] = 0.0;
}
}
return pointCopy;
}
public static float[] cleanCopy(float[] point) {
float[] pointCopy = Arrays.copyOf(point, point.length);
for (int i = 0; i < point.length; i++) {
if (pointCopy[i] == 0.0) {
pointCopy[i] = 0.0f;
}
}
return pointCopy;
}
}
| 473 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/util/ShingleBuilder.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.util;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.checkNotNull;
/**
* A utility class for creating shingled points, which are also referred to as
* shingles. A shingle consists of multiple points appended together. If
* individual points have n dimensions, and we include k points in a shingle,
* then the shingle will have size n * m.
*
* There are two strategies for shingling: sliding and cyclic. In a sliding
* shingle, new points are appended to the end of the shingle, and old points
* are removed from the front. For example, if we have a shingle size of 4 which
* currently contains the points a, b, c, and d, then we can represent the
* shingle as abcd. The following schematic shows how the shingle is updated as
* we add new points e and f.
*
* <pre>
* abcd => bcde
* bcde => cdef
* </pre>
*
* With cycling shingling, when a new point is added to a shingle it overwrites
* the oldest point in the shingle. Using the same setup as above, a cyclic
* shingle would be updated as follows:
*
* <pre>
* abcd => ebcd
* ebcd => efcd
* </pre>
*/
public class ShingleBuilder {
/**
* Number of dimensions of each point in the shingle.
*/
private final int dimensions;
/**
* Number of points in the shingle.
*/
private final int shingleSize;
/**
* A buffer containing points recently added to the shingle.
*/
private final double[][] recentPoints;
/**
* A flag indicating whether we should use a cyclic shift or a linear shift when
* creating shingles.
*/
private final boolean cyclic;
/**
* The index where the next point will be copied to. This is equal to the index
* of the oldest point currently in the shingle.
*/
private int shingleIndex;
/**
* A flag indicating whether the shingle has been completely filled once.
*/
private boolean full;
/**
* Create a new ShingleBuilder with the given dimensions and shingle size.
*
* @param dimensions The number of dimensions in the input points.
* @param shingleSize The number of points to store in a shingle.
* @param cyclic If true, the shingle will use cyclic updates. If false, it
* will use sliding updates.
*/
public ShingleBuilder(int dimensions, int shingleSize, boolean cyclic) {
checkArgument(dimensions > 0, "dimensions must be greater than 0");
checkArgument(shingleSize > 0, "shingleSize must be greater than 0");
this.dimensions = dimensions;
this.shingleSize = shingleSize;
this.cyclic = cyclic;
recentPoints = new double[shingleSize][dimensions];
shingleIndex = 0;
full = false;
}
/**
* Create a ShingleBuilder with the given dimensions and shingleSize. The
* resulting builder uses sliding updates.
*
* @param dimensions The number of dimensions in the input points.
* @param shingleSize The number of points to store in a shingle.
*/
public ShingleBuilder(int dimensions, int shingleSize) {
this(dimensions, shingleSize, false);
}
/**
* @return true if the shingle has been completely filled once, false otherwise.
*/
public boolean isFull() {
return full;
}
/**
* @return the number of dimensions in input points.
*/
public int getInputPointSize() {
return dimensions;
}
/**
* @return the number of dimensions in a shingled point.
*/
public int getShingledPointSize() {
return dimensions * shingleSize;
}
/**
* @return true if this ShingleBuilder uses cyclic updates, false otherwise.
*/
public boolean isCyclic() {
return cyclic;
}
/**
* Return the index where the next input point will be stored in the internal
* shingle buffer. If the ShingleBuilder uses cyclic updates, this value
* indicates the current point in the cycle.
*
* @return the index where the next input point will be stored in the internal
* shingle buffer.
*/
public int getShingleIndex() {
return shingleIndex;
}
/**
* Add a new point to this shingle. The point values are copied.
*
* @param point The new point to be added to the shingle.
*/
public void addPoint(double[] point) {
checkNotNull(point, "point must not be null");
checkArgument(point.length == dimensions, String.format("point.length must equal %d", dimensions));
System.arraycopy(point, 0, recentPoints[shingleIndex], 0, dimensions);
shingleIndex = (shingleIndex + 1) % shingleSize;
if (!full && shingleIndex == 0) {
full = true;
}
}
/**
* @return the current shingled point.
*/
public double[] getShingle() {
double[] shingle = new double[shingleSize * dimensions];
getShingle(shingle);
return shingle;
}
/**
* Write the current shingled point into the supplied buffer.
*
* @param shingle A buffer where the shingled point will be written.
*/
public void getShingle(double[] shingle) {
checkNotNull(shingle, "shingle must not be null");
checkArgument(shingle.length == dimensions * shingleSize, "shingle.length must be dimensions * shingleSize");
int beginIndex = cyclic ? 0 : shingleIndex;
for (int i = 0; i < shingleSize; i++) {
System.arraycopy(recentPoints[(beginIndex + i) % shingleSize], 0, shingle, i * dimensions, dimensions);
}
}
}
| 474 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/config/Precision.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.config;
/**
* Options for floating-point precision.
*/
public enum Precision {
/**
* Single-precision (32 bit) floating point numbers.
*/
FLOAT_32,
/**
* Double-precision (64 bit) floating point numbers.
*/
FLOAT_64;
}
| 475 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/config/TransformMethod.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.config;
/**
* Options for internally transforming data in RCF These are built for
* convenience. Domain knowledge before feeding data into RCF(any tool) will
* often have the best benefit! These apply to the basic data and not
* timestamps, time is (hopefully) always moving forward and is measured shifted
* (from a running mean), with an option of normalization.
*/
public enum TransformMethod {
/**
* the best transformation for data!
*/
NONE,
/**
* standard column normalization using fixed weights
*/
WEIGHTED,
/**
* subtract a moving average -- the average would be computed using the same
* discount factor as the time decay of the RCF samplers.
*/
SUBTRACT_MA,
/**
* divide by standard deviation, after subtracting MA
*/
NORMALIZE,
/**
* difference from previous
*/
DIFFERENCE,
/**
* divide by standard deviation of difference, after differencing (again
* subtract MA)
*/
NORMALIZE_DIFFERENCE;
}
| 476 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/config/ImputationMethod.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.config;
/**
* Options for filling in missing values
*/
public enum ImputationMethod {
/**
* use all 0's
*/
ZERO,
/**
* use a fixed set of specified values (same as input dimension)
*/
FIXED_VALUES,
/**
* last known value in each input dimension
*/
PREVIOUS,
/**
* next seen value in each input dimension
*/
NEXT,
/**
* linear interpolation
*/
LINEAR,
/**
* use the RCF imputation; but would often require a minimum number of
* observations and would use defaults (often LINEAR) till that point
*/
RCF;
}
| 477 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/config/ScoringStrategy.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.config;
/**
* Options for using RCF, specially with thresholds
*/
public enum ScoringStrategy {
/**
* default behavior to be optimized; currently EXPECTED_INVERSE_DEPTH
*/
EXPECTED_INVERSE_DEPTH,
/**
* This is the same as STANDARD mode where the scoring function is switched to
* distances between the vectors. Since RCFs build a multiresolution tree, and
* in the aggregate, preserves distances to some approximation, this provides an
* alternate anomaly detection mechanism which can be useful for shingleSize = 1
* and (dynamic) population analysis via RCFs. Specifically it switches the
* scoring to be based on the distance computation in the Density Estimation
* (interpolation). This allows for a direct comparison of clustering based
* outlier detection and RCFs over numeric vectors. All transformations
* available to the STANDARD mode in the ThresholdedRCF are available for this
* mode as well; this does not affect RandomCutForest core in any way. For
* timeseries analysis the STANDARD mode is recommended, but this does provide
* another option in combination with the TransformMethods.
*/
DISTANCE,
/**
* RCFs are an updatable data structure that can support multiple difference
* inference methods. Given the longstanding interest in ensembles of different
* models, this strategy uses the multiple inference capabilities to increase
* precision. It does not escape our attention that multi-mode allows the
* functionality of multi-models yet use a significantly smaller state/memory
* footprint since all the modes use RCF. The different modes are probed with
* computational efficiency in mind.
*/
MULTI_MODE,
/**
* Same as above, except optimized for increasing recall.
*/
MULTI_MODE_RECALL;
}
| 478 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/config/CorrectionMode.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.config;
/**
* Options for using RCF, specially with thresholds
*/
public enum CorrectionMode {
/**
* default behavior, no correction
*/
NONE,
/**
* due to transforms, or due to input noise
*/
NOISE,
/**
* elimination due to multi mode operation
*/
MULTI_MODE,
/**
* effect of an anomaly in shingle
*/
ANOMALY_IN_SHINGLE,
/**
* conditional forecast, using conditional fields
*/
CONDITIONAL_FORECAST,
/**
* forecasted value was not very different
*/
FORECAST,
/**
* data drifts and level shifts, will not be corrected unless level shifts are
* turned on
*/
DATA_DRIFT
}
| 479 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/config/Config.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.config;
public class Config {
public static final String BOUNDING_BOX_CACHE_FRACTION = "bounding_box_cache_fraction";
public static final String TIME_DECAY = "time_decay";
}
| 480 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/config/ForestMode.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.config;
/**
* Options for using RCF, specially with thresholds
*/
public enum ForestMode {
/**
* a standard mode that uses shingling and most known applications; it uses the
* last K data points where K=1 would correspond to non time series (population)
* analysis
*/
STANDARD,
/**
* time stamp is added automatically to data to correlate within RCF itself;
* this is useful for event streaams and for modeling sparse events. Option is
* provided to normalize the time gaps.
*/
TIME_AUGMENTED,
/**
* uses various Fill-In strageies for data with gaps but not really sparse. Must
* have shingleSize greater than 1, typically larger shingle size is better, and
* so is fewer input dimensions
*/
STREAMING_IMPUTE;
}
| 481 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/config/IDynamicConfig.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.config;
/**
* This interface is used by model classes to configure model parameters by
* name. This is intended primarily for settings that a user may want to change
* at runtime.
*/
public interface IDynamicConfig {
<T> void setConfig(String name, T value, Class<T> clazz);
default void setConfig(String name, short value) {
setConfig(name, value, Short.class);
}
default void setConfig(String name, int value) {
setConfig(name, value, Integer.class);
}
default void setConfig(String name, long value) {
setConfig(name, value, Long.class);
}
default void setConfig(String name, float value) {
setConfig(name, value, Float.class);
}
default void setConfig(String name, double value) {
setConfig(name, value, Double.class);
}
default void setConfig(String name, boolean value) {
setConfig(name, value, Boolean.class);
}
<T> T getConfig(String name, Class<T> clazz);
default Object getConfig(String name) {
return getConfig(name, Object.class);
}
}
| 482 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/summarization/Center.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.summarization;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static java.lang.Math.exp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.function.BiFunction;
import java.util.function.Function;
import com.amazon.randomcutforest.util.Weighted;
/**
* the following class abstracts a single centroid representation of a group of
* points
*/
public class Center implements ICluster<float[]> {
float[] representative;
double weight;
ArrayList<Weighted<Integer>> assignedPoints;
double sumOfRadius;
double previousWeight = 0;
double previousSumOFRadius = 0;
Center(float[] coordinate, float weight) {
// explicitly copied because array elements will change
this.representative = Arrays.copyOf(coordinate, coordinate.length);
this.weight = weight;
this.assignedPoints = new ArrayList<>();
}
public static Center initialize(float[] coordinate, float weight) {
return new Center(coordinate, weight);
}
// adds a point; only the index to keep space bounds lower
// note that the weight may not be the entire weight of a point in case of a
// "soft" assignment
public void addPoint(int index, float weight, double dist, float[] point,
BiFunction<float[], float[], Double> distance) {
assignedPoints.add(new Weighted<>(index, weight));
this.weight += weight;
this.sumOfRadius += weight * dist;
}
// the following sets up reassignment of the coordinate based on the points
// assigned to the center
public void reset() {
assignedPoints = new ArrayList<>();
previousWeight = weight;
weight = 0;
previousSumOFRadius = sumOfRadius;
}
public double averageRadius() {
return (weight > 0) ? sumOfRadius / weight : 0;
}
// average radius computation, provides an extent measure
public double extentMeasure() {
return (weight > 0) ? sumOfRadius / weight : 0;
}
public double getWeight() {
return weight;
}
// a standard reassignment using the median values and NOT the mean; the mean is
// unlikely to
// provide robust convergence
public double recompute(Function<Integer, float[]> getPoint, boolean approx,
BiFunction<float[], float[], Double> distance) {
if (assignedPoints.size() == 0 || weight == 0.0) {
Arrays.fill(representative, 0); // zero out values
return 0;
}
previousSumOFRadius = sumOfRadius;
sumOfRadius = 0;
for (int i = 0; i < representative.length; i++) {
int index = i;
// the following would be significantly slow unless points are backed by arrays
assignedPoints
.sort((o1, o2) -> Double.compare(getPoint.apply(o1.index)[index], getPoint.apply(o2.index)[index]));
double runningWeight = weight / 2;
int position = 0;
while (runningWeight >= 0 && position < assignedPoints.size()) {
if (runningWeight > assignedPoints.get(position).weight) {
runningWeight -= assignedPoints.get(position).weight;
++position;
} else {
break;
}
}
if (position == assignedPoints.size()) {
position--;
}
representative[index] = getPoint.apply(assignedPoints.get(position).index)[index];
}
for (int j = 0; j < assignedPoints.size(); j++) {
double addTerm = distance.apply(representative, getPoint.apply(assignedPoints.get(j).index))
* assignedPoints.get(j).weight;
checkArgument(addTerm >= 0, "distances or weights cannot be negative");
sumOfRadius += addTerm;
}
return (previousSumOFRadius - sumOfRadius);
}
@Override
public List<Weighted<Integer>> getAssignedPoints() {
return assignedPoints;
}
// merges a center into another
// this can be followed by a reassignment step; however the merger uses a
// sigmoid based weightage
// for robustness
public void absorb(ICluster<float[]> other, BiFunction<float[], float[], Double> distance) {
List<Weighted<float[]>> representatives = other.getRepresentatives();
float[] closest = representatives.get(0).index;
double dist = Double.MAX_VALUE;
for (Weighted<float[]> e : representatives) {
double t = distance.apply(e.index, representative);
checkArgument(t >= 0, "distances cannot be negative");
if (t < dist) {
dist = t;
closest = e.index;
}
}
double otherWeight = other.getWeight();
double expRatio = exp(2 * (weight - otherWeight) / (weight + otherWeight));
double factor = expRatio / (1.0 + expRatio);
for (int i = 0; i < representative.length; i++) {
representative[i] = (float) (factor * representative[i] + (1 - factor) * closest[i]);
}
// distance is (approximately) the reverse of the ratio
// this computation is meant to be approximate
sumOfRadius += (weight * (1.0 - factor) + otherWeight * factor) * dist;
weight += otherWeight;
assignedPoints.addAll(other.getAssignedPoints());
other.reset();
}
public double distance(float[] point, BiFunction<float[], float[], Double> distance) {
double t = distance.apply(point, representative);
checkArgument(t >= 0, "distance cannot be negative");
return t;
}
@Override
public double distance(ICluster<float[]> other, BiFunction<float[], float[], Double> distance) {
return other.distance(representative, distance);
}
@Override
public float[] primaryRepresentative(BiFunction<float[], float[], Double> distance) {
return Arrays.copyOf(representative, representative.length);
}
@Override
public List<Weighted<float[]>> getRepresentatives() {
ArrayList<Weighted<float[]>> answer = new ArrayList<>();
answer.add(new Weighted<>(representative, (float) weight));
return answer;
}
}
| 483 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/summarization/GenericMultiCenter.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.summarization;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static java.lang.Math.min;
import java.util.ArrayList;
import java.util.List;
import java.util.function.BiFunction;
import java.util.function.Function;
import com.amazon.randomcutforest.util.Weighted;
/**
* the following class abstracts a single centroid representation of a group of
* points. The class is modeled after the well scattered representatives used in
* CURE https://en.wikipedia.org/wiki/CURE_algorithm
*
* The number of representatives (refered as c in above) determines the possible
* shapes that can be represented. Setting c=1 corresponds to stnadard centroid
* based clustering
*
* The parameter shrinkage is slightly different from its usage in CURE,
* although the idea of its use is similar. The main reason is that CURE was
* designed for geometric spaces, and RCFSummarize is designed to support
* arbitrary distance based clustering; once the user provides a distance
* function from (R, R) into double based on ideas of STREAM
* https://en.wikipedia.org/wiki/Data_stream_clustering In CURE, shrinkage was
* used to create representatives close to the center of a cluster which is
* impossible for generic types R. Instead shrinkage value in [0,1] corresponds
* to morphing the distance function to "pretend" as if the distance is to the
* primary representative of the cluster.
*
* This generic version does not store any assigned points. As a result the size
* is bounded and these clusters are ideal for streaming algorithms where
* resource usage would not increase with more data.
*/
public class GenericMultiCenter<R> implements ICluster<R> {
public static int DEFAULT_NUMBER_OF_REPRESENTATIVES = 5;
public static double DEFAULT_SHRINKAGE = 0.0;
int numberOfRepresentatives = DEFAULT_NUMBER_OF_REPRESENTATIVES;
double shrinkage = DEFAULT_SHRINKAGE;
ArrayList<Weighted<R>> representatives;
double weight;
double sumOfRadius;
double previousWeight = 0;
double previousSumOFRadius = 0;
GenericMultiCenter(R coordinate, float weight, double shrinkage, int numberOfRepresentatives) {
// explicitly copied because array elements will change
this.representatives = new ArrayList<>();
this.representatives.add(new Weighted<>(coordinate, weight));
this.weight = weight;
this.numberOfRepresentatives = numberOfRepresentatives;
this.shrinkage = shrinkage;
}
public static <R> GenericMultiCenter<R> initialize(R coordinate, float weight, double shrinkage,
int numberOfRepresentatives) {
checkArgument(shrinkage >= 0 && shrinkage <= 1.0, " parameter has to be in [0,1]");
checkArgument(numberOfRepresentatives > 0 && numberOfRepresentatives <= 100,
" the number of representatives has to be in (0,100]");
return new GenericMultiCenter<>(coordinate, weight, shrinkage, numberOfRepresentatives);
}
// adds a point; only the index to keep space bounds lower
// note that the weight may not be the entire weight of a point in case of a
// "soft" assignment
public void addPoint(int index, float weight, double dist, R point, BiFunction<R, R, Double> distance) {
// accounting for the closest representative, if there are more than one
Weighted<R> closest = representatives.get(0);
if (representatives.size() > 1) {
double newDist = distance.apply(point, representatives.get(0).index);
for (int i = 1; i < representatives.size(); i++) {
double t = distance.apply(point, representatives.get(i).index);
if (t < newDist) {
newDist = t;
closest = representatives.get(i);
}
}
}
closest.weight += weight;
this.weight += weight;
this.sumOfRadius += weight * dist;
}
// the following sets up reassignment of the coordinate based on the points
// assigned to the center
public void reset() {
previousWeight = weight;
weight = 0;
for (int i = 0; i < representatives.size(); i++) {
representatives.get(i).weight = 0;
}
previousSumOFRadius = sumOfRadius;
sumOfRadius = 0;
}
public double averageRadius() {
return (weight > 0) ? sumOfRadius / weight : 0;
}
// forces a nearest neighbor merge
public double extentMeasure() {
return (weight > 0) ? 0.5 * sumOfRadius / (numberOfRepresentatives * weight) : 0;
}
public double getWeight() {
return weight;
}
// reassignment may not be meaningful for generic types, without additional
// information
public double recompute(Function<Integer, R> getPoint, boolean flag, BiFunction<R, R, Double> distanceFunction) {
return 0;
}
// merges a center into another
public void absorb(ICluster<R> other, BiFunction<R, R, Double> distance) {
List<Weighted<R>> savedRepresentatives = this.representatives;
savedRepresentatives.addAll(other.getRepresentatives());
this.representatives = new ArrayList<>();
int maxIndex = 0;
float weight = savedRepresentatives.get(0).weight;
for (int i = 1; i < savedRepresentatives.size(); i++) {
if (weight < savedRepresentatives.get(i).weight) {
weight = savedRepresentatives.get(i).weight;
maxIndex = i;
}
}
this.representatives.add(savedRepresentatives.get(maxIndex));
savedRepresentatives.remove(maxIndex);
sumOfRadius += other.extentMeasure() * other.getWeight();
this.weight += other.getWeight();
/**
* create a list of representatives based on the farthest point method, which
* correspond to a well scattered set. See
* https://en.wikipedia.org/wiki/CURE_algorithm
*/
while (savedRepresentatives.size() > 0 && this.representatives.size() < numberOfRepresentatives) {
double farthestWeightedDistance = 0.0;
int farthestIndex = Integer.MAX_VALUE;
for (int j = 0; j < savedRepresentatives.size(); j++) {
if (savedRepresentatives.get(j).weight > weight / (2 * numberOfRepresentatives)) {
double newWeightedDist = distance.apply(this.representatives.get(0).index,
savedRepresentatives.get(j).index) * savedRepresentatives.get(j).weight;
checkArgument(newWeightedDist >= 0, " weights or distances cannot be negative");
for (int i = 1; i < this.representatives.size(); i++) {
newWeightedDist = min(newWeightedDist,
distance.apply(this.representatives.get(i).index, savedRepresentatives.get(j).index))
* savedRepresentatives.get(j).weight;
checkArgument(newWeightedDist >= 0, " weights or distances cannot be negative");
}
if (newWeightedDist > farthestWeightedDistance) {
farthestWeightedDistance = newWeightedDist;
farthestIndex = j;
}
}
}
if (farthestWeightedDistance == 0.0) {
break;
}
this.representatives.add(savedRepresentatives.get(farthestIndex));
savedRepresentatives.remove(farthestIndex);
}
// absorb the remainder into existing representatives
for (Weighted<R> representative : savedRepresentatives) {
double dist = distance.apply(representative.index, this.representatives.get(0).index);
checkArgument(dist >= 0, "distance cannot be negative");
double minDist = dist;
int minIndex = 0;
for (int i = 1; i < this.representatives.size(); i++) {
double newDist = distance.apply(this.representatives.get(i).index, representative.index);
checkArgument(newDist >= 0, "distance cannot be negative");
if (newDist < minDist) {
minDist = newDist;
minIndex = i;
}
}
this.representatives.get(minIndex).weight += representative.weight;
sumOfRadius += representative.weight * ((1 - shrinkage) * minDist + dist * shrinkage);
}
}
@Override
public double distance(R point, BiFunction<R, R, Double> distanceFunction) {
double dist = distanceFunction.apply(this.representatives.get(0).index, point);
checkArgument(dist >= 0, "distance cannot be negative");
double newDist = dist;
for (int i = 1; i < this.representatives.size(); i++) {
newDist = min(newDist, distanceFunction.apply(this.representatives.get(i).index, point));
checkArgument(newDist >= 0, "distance cannot be negative");
}
return (1 - shrinkage) * newDist + shrinkage * dist;
}
@Override
public double distance(ICluster<R> other, BiFunction<R, R, Double> distanceFunction) {
List<Weighted<R>> representatives = other.getRepresentatives();
double dist = distanceFunction.apply(this.representatives.get(0).index, representatives.get(0).index);
checkArgument(dist >= 0, "distance cannot be negative");
double newDist = dist;
for (int i = 1; i < this.representatives.size(); i++) {
for (int j = 1; j < representatives.size(); j++) {
newDist = min(newDist,
distanceFunction.apply(this.representatives.get(i).index, representatives.get(j).index));
checkArgument(newDist >= 0, "distance cannot be negative");
}
}
return (1 - shrinkage) * newDist + shrinkage * dist;
}
@Override
public List<Weighted<R>> getRepresentatives() {
return representatives;
}
}
| 484 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/summarization/MultiCenter.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.summarization;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import java.util.ArrayList;
import java.util.List;
import java.util.function.BiFunction;
import java.util.function.Function;
import com.amazon.randomcutforest.util.Weighted;
public class MultiCenter extends GenericMultiCenter<float[]> {
ArrayList<Weighted<Integer>> assignedPoints;
MultiCenter(float[] coordinate, float weight, double shrinkage, int numberOfRepresentatives) {
super(coordinate, weight, shrinkage, numberOfRepresentatives);
this.assignedPoints = new ArrayList<>();
}
public static MultiCenter initialize(float[] coordinate, float weight, double shrinkage,
int numberOfRepresentatives) {
checkArgument(shrinkage >= 0 && shrinkage <= 1.0, " parameter has to be in [0,1]");
checkArgument(numberOfRepresentatives > 0 && numberOfRepresentatives <= 100,
" the number of representatives has to be in (0,100]");
return new MultiCenter(coordinate, weight, shrinkage, numberOfRepresentatives);
}
public void addPoint(int index, float weight, double dist, float[] point,
BiFunction<float[], float[], Double> distance) {
super.addPoint(index, weight, dist, point, distance);
assignedPoints.add(new Weighted<>(index, weight));
}
// the following sets up reassignment of the coordinate based on the points
// assigned to the center
public void reset() {
super.reset();
assignedPoints = new ArrayList<>();
}
// a standard reassignment using the median values and NOT the mean; the mean is
// unlikely to
// provide robust convergence
public double recompute(Function<Integer, float[]> getPoint, boolean force,
BiFunction<float[], float[], Double> distanceFunction) {
if (assignedPoints.size() == 0 || weight == 0.0 || !force) {
return 0;
}
previousSumOFRadius = sumOfRadius;
sumOfRadius = 0;
for (int j = 0; j < assignedPoints.size(); j++) {
// distance will check for -negative internally
double addTerm = distance(getPoint.apply(assignedPoints.get(j).index), distanceFunction)
* assignedPoints.get(j).weight;
sumOfRadius += addTerm;
}
return (previousSumOFRadius - sumOfRadius);
}
@Override
public List<Weighted<Integer>> getAssignedPoints() {
return assignedPoints;
}
}
| 485 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/summarization/ICluster.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.summarization;
import java.util.Collections;
import java.util.List;
import java.util.function.BiFunction;
import java.util.function.Function;
import com.amazon.randomcutforest.util.Weighted;
/**
* a set of cunstions that a conceptual "cluster" should satisfy for any generic
* distance based clustering where a distance function of type from (R,R) into
* double is provided externally. It is not feasible (short of various
* assumptions) to check for the validity of a distance function and the
* clustering would not perform any validity checks. The user is referred to
* https://en.wikipedia.org/wiki/Metric_(mathematics)
*
* It does not escape our attention that the clustering can use multiple
* different distance functions over its execution. But such should be performed
* with caution.
*/
public interface ICluster<R> {
// restting statistics for a potential reassignment
void reset();
double averageRadius();
// a measure of the noise/blur around a cluster; for single centroid clustering
// this is the average distance of a point from a cluster representative
double extentMeasure();
// weight computation
double getWeight();
// merge another cluster of same type
void absorb(ICluster<R> other, BiFunction<R, R, Double> distance);
// distance of apoint from a cluster, has to be non-negative
double distance(R point, BiFunction<R, R, Double> distance);
// distance of another cluster from this cluster, has to be non negative
double distance(ICluster<R> other, BiFunction<R, R, Double> distance);
// all potential representativess of a cluster these are typically chosen to be
// well scattered
// by default the first entry is the primary representative
List<Weighted<R>> getRepresentatives();
// a primary representative of the cluster; by default it is the first in the
// list of representatives
// this additional function allows an option for optimization of runtime as well
// as alternate
// representations. For example the distance metric can be altered to be a fixed
// linear combination
// of the primary and secondary representatives, as in CURE
// https://en.wikipedia.org/wiki/CURE_algorithm
default R primaryRepresentative(BiFunction<R, R, Double> distance) {
return getRepresentatives().get(0).index;
}
// Some of the algorithms, in particular the geometric ones may store the
// assigned points for
// iterative refinement. However that can be extremely inefficient if the
// distance measure does not
// have sufficient range, for example, string edit distances (for bounded
// strings) are bounded in a
// short interval. A soft assignment would create multiple copies of points (as
// is appropriate) and
// that can be significantly slower.
default List<Weighted<Integer>> getAssignedPoints() {
return Collections.emptyList();
}
// optimize the cluster representation based on assigned points; this is classic
// iterative optimization
// useful in EM type algorithms
/**
* optimize the cluster representation based on assigned points; this is classic
* iterative optimization useful in EM type algorithms
*
* @param getPoint a function that provides a point given an integer index
* @param force it set as true perform a slow and accurate recomputation;
* otherwise approximation would suffice
* @param distance the distance function
* @return a measure of improvement (if any); this can be useful in the future
* as a part of the stopping condition
*/
double recompute(Function<Integer, R> getPoint, boolean force, BiFunction<R, R, Double> distance);
// adding a point to a cluster, and possibly updates the extent measure and the
// assigned points
void addPoint(int index, float weight, double dist, R point, BiFunction<R, R, Double> distance);
}
| 486 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/summarization/Summarizer.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.summarization;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.util.Weighted.createSample;
import static com.amazon.randomcutforest.util.Weighted.prefixPick;
import static java.lang.Math.max;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Random;
import java.util.function.BiFunction;
import java.util.function.Function;
import com.amazon.randomcutforest.returntypes.SampleSummary;
import com.amazon.randomcutforest.util.Weighted;
public class Summarizer {
/**
* a factor that controls weight assignment for soft clustering; this is the
* multiple of the minimum distance and should be greater or equal 1.
*/
public static double WEIGHT_ALLOCATION_THRESHOLD = 1.25;
/**
* the following determines the ratio between the sum of the (average) radius
* and the separation between centers for a merge; ratio greater than 1 means
* significant overlap a ratio of 0 means merge closest pairs without
* consideration of separartion
*
**/
public static double DEFAULT_SEPARATION_RATIO_FOR_MERGE = 0.8;
public static int PHASE2_THRESHOLD = 2;
public static int LENGTH_BOUND = 1000;
public static Double L1distance(float[] a, float[] b) {
double dist = 0;
for (int i = 0; i < a.length; i++) {
dist += Math.abs(a[i] - b[i]);
}
return dist;
}
public static Double L2distance(float[] a, float[] b) {
double dist = 0;
for (int i = 0; i < a.length; i++) {
double t = Math.abs(a[i] - b[i]);
dist += t * t;
}
return Math.sqrt(dist);
}
public static Double LInfinitydistance(float[] a, float[] b) {
double dist = 0;
for (int i = 0; i < a.length; i++) {
dist = max(Math.abs(a[i] - b[i]), dist);
}
return dist;
}
/**
* a function that reassigns points to clusters
*
* @param sampledPoints a list of sampled points with weights
* @param clusters a list of current clusters, because random access to
* the elements is necessary
* @param distance a distance function
* @param parallelEnabled a flag enabling limited parallelism; only during
* cluster by cluster recomputation. Using parallel mode
* during the assignment of points does not seem to help
*/
public static <R> void assignAndRecompute(List<Weighted<Integer>> sampledPoints, Function<Integer, R> getPoint,
List<ICluster<R>> clusters, BiFunction<R, R, Double> distance, boolean parallelEnabled) {
checkArgument(clusters.size() > 0, " cannot be empty list of clusters");
checkArgument(sampledPoints.size() > 0, " cannot be empty list of points");
for (ICluster<R> cluster : clusters) {
cluster.reset();
}
for (Weighted<Integer> point : sampledPoints) {
if (point.weight > 0) {
double[] dist = new double[clusters.size()];
Arrays.fill(dist, Double.MAX_VALUE);
double minDist = Double.MAX_VALUE;
int minDistNbr = -1;
for (int i = 0; i < clusters.size(); i++) {
// will check for negative distances
dist[i] = clusters.get(i).distance(getPoint.apply(point.index), distance);
if (minDist > dist[i]) {
minDist = dist[i];
minDistNbr = i;
}
if (minDist == 0) {
break;
}
}
if (minDist == 0) {
clusters.get(minDistNbr).addPoint(point.index, point.weight, 0, getPoint.apply(point.index),
distance);
} else {
double sum = 0;
for (int i = 0; i < clusters.size(); i++) {
if (dist[i] <= WEIGHT_ALLOCATION_THRESHOLD * minDist) {
sum += minDist / dist[i]; // setting up harmonic mean
}
}
for (int i = 0; i < clusters.size(); i++) {
if (dist[i] <= WEIGHT_ALLOCATION_THRESHOLD * minDist) {
// harmonic mean
clusters.get(i).addPoint(point.index, (float) (point.weight * minDist / (dist[i] * sum)),
dist[i], getPoint.apply(point.index), distance);
}
}
}
}
}
if (parallelEnabled) {
clusters.parallelStream().forEach(e -> e.recompute(getPoint, true, distance));
} else {
clusters.stream().forEach(e -> e.recompute(getPoint, true, distance));
}
}
/**
* The core subroutine for iterative clustering used herein. The clustering
* algorithm borrows from CURE https://en.wikipedia.org/wiki/CURE_algorithm,
* which used sampling as a tradeoff of representationa accuracy versus
* algorithmic efficiency. Note however random sampling can also perform
* denoising and reduce space as a filtering mechanism. Note that hierarchical
* iterative merging strategies can be proven to not degrade clustering
* https://en.wikipedia.org/wiki/Data_stream_clustering with the benefit of
* small space. The algorithm herein proceeds in three phases, where the first
* phase corresponds from the initial seeding to about twice the maximum number
* of clusters one wishes to consider. The second phase corresponds to reducing
* that number to the maximum allowable number. The third phase corresponds to
* continuing the clustering as long as the conditions are similar to the end of
* phase two, thereby enabling us to use a rough estimate for the maximum
* allowed. By default, recomputation of the cluster makes sense in phases 2 and
* 3 -- however can be enabled for phase 1 as well, thereby enabling the regular
* K-Means algorithm to be expressed in the below algorithm as well. The
* algorithm can also express Minimum Spanning Tree based clustering with
* repeated merging of closest pair (which is a capability derived from CURE)
*
* The primary reason for the number of parameters is the ability to invoke this
* function without creating a copy of the points (or equivalent large objects),
* and hence the functions as parameters
*
* @param maxAllowed number of maximum clusters one is interested in
* @param initial an initial number of sampled centers to start
* from
* @param stopAt a hard lower bound on the number of clusters
* @param refs a (possibly sampled) list of references with
* weight
* @param getPoint a function which retrives the point/object given
* an index in the refs
* @param distance a distance function
* @param clusterInitializer a function that creates a cluster given an object
* aand a weight
* @param seed a random seed
* @param parallelEnabled enabling parallel computation in the first phase
* when points are assigned to different sampled
* centers; and the centers are possibly adjusted
* @param phase2GlobalReassign a flag that determines if the points would be
* reassigned when the clusters fall below 1.2 *
* maxAllowed -- this serves as a denoising.
* @param overlapParameter a parameter that controls the ordering of the
* merges as well as the stopping condition of the
* merges
* @param previousClustering a possibly null list of clusters seen previously,
* used as zero weight seeds to smoothen the
* continuous clustering
* @param <R> type of object being clustered
* @return a list of clusters
*/
public static <R> List<ICluster<R>> iterativeClustering(int maxAllowed, int initial, int stopAt,
List<Weighted<Integer>> refs, Function<Integer, R> getPoint, BiFunction<R, R, Double> distance,
BiFunction<R, Float, ICluster<R>> clusterInitializer, long seed, boolean parallelEnabled,
boolean phase2GlobalReassign, double overlapParameter, List<ICluster<R>> previousClustering) {
checkArgument(refs.size() > 0, "empty list, nothing to do");
checkArgument(stopAt > 0, "has to stop at 1 cluster");
checkArgument(stopAt <= maxAllowed, "cannot stop before achieving the limit");
Random rng = new Random(seed);
double sampledSum = refs.stream().map(e -> {
checkArgument(Double.isFinite(e.weight), " weights have to be finite");
checkArgument(e.weight >= 0.0, () -> "negative weights are not meaningful" + e.weight);
return (double) e.weight;
}).reduce(0.0, Double::sum);
checkArgument(sampledSum > 0, " total weight has to be positive");
ArrayList<ICluster<R>> centers = new ArrayList<>();
if (refs.size() < 10 * (initial + 5)) {
for (Weighted<Integer> point : refs) {
centers.add(clusterInitializer.apply(getPoint.apply(point.index), 0f));
}
} else {
for (int k = 0; k < 2 * (initial + 5); k++) {
double wt = rng.nextDouble() * sampledSum;
Weighted<Integer> picked = prefixPick(refs, wt);
centers.add(clusterInitializer.apply(getPoint.apply(picked.index), 0f));
}
}
if (previousClustering != null) {
for (ICluster<R> previousCluster : previousClustering) {
List<Weighted<R>> representatives = previousCluster.getRepresentatives();
for (Weighted<R> representative : representatives) {
centers.add(clusterInitializer.apply(representative.index, 0f));
}
}
}
assignAndRecompute(refs, getPoint, centers, distance, parallelEnabled);
// assignment would change weights, sorting in non-decreasing order
centers.sort(Comparator.comparingDouble(ICluster::getWeight));
while (centers.get(0).getWeight() == 0) {
centers.remove(0);
}
double phase3Distance = 0;
double runningPhase3Distance = 0;
boolean keepReducingCenters = (centers.size() > maxAllowed);
while (keepReducingCenters) {
double measure = 0;
double measureDist = Double.MAX_VALUE;
int lower = 0;
int firstOfMerge = lower;
int secondOfMerge = lower + 1;// will be reset before exiting the loop
boolean foundMerge = false;
double minDist = Double.MAX_VALUE;
while (lower < centers.size() - 1 && !foundMerge) {
// we will keep searching
int minNbr = -1;
for (int j = lower + 1; j < centers.size(); j++) {
double dist = centers.get(lower).distance(centers.get(j), distance);
if (dist == 0) {
foundMerge = true;
firstOfMerge = lower;
secondOfMerge = minNbr = j;
minDist = measureDist = 0.0;
break;
} else {
if (minDist > dist) {
minNbr = j;
minDist = dist;
}
double temp = (centers.get(lower).extentMeasure() + centers.get(j).extentMeasure()
+ phase3Distance) / dist;
if (temp > overlapParameter && measure < temp) {
firstOfMerge = lower;
secondOfMerge = j;
measure = temp;
measureDist = dist;
}
}
}
if (lower == 0 && !foundMerge) {
measureDist = minDist;
// this is set assuming we may be interested in merging the minimum weight
// cluster which corresponds to lower == 0
secondOfMerge = minNbr;
}
++lower;
}
int inital = centers.size();
if (inital > maxAllowed || foundMerge || (inital > stopAt && measure > overlapParameter)) {
centers.get(secondOfMerge).absorb(centers.get(firstOfMerge), distance);
if (phase2GlobalReassign && centers.size() <= PHASE2_THRESHOLD * maxAllowed + 1) {
centers.remove(firstOfMerge);
assignAndRecompute(refs, getPoint, centers, distance, parallelEnabled);
} else {
centers.get(secondOfMerge).recompute(getPoint, false, distance);
centers.remove(firstOfMerge);
}
centers.sort(Comparator.comparingDouble(ICluster::getWeight));
while (centers.get(0).getWeight() == 0.0) {
// this line is reachable via zeroTest() in
// SampleSummaryTest
centers.remove(0);
}
if (inital < 1.2 * maxAllowed + 1) {
// phase 3 kicks in; but this will execute at most once
// note that measureDist can be 0 as well
runningPhase3Distance = max(runningPhase3Distance, measureDist);
if (inital > maxAllowed && centers.size() <= maxAllowed) {
phase3Distance = runningPhase3Distance;
}
}
} else {
keepReducingCenters = false;
}
}
// sort in decreasing weight
centers.sort((o1, o2) -> Double.compare(o2.getWeight(), o1.getWeight()));
return centers;
}
/**
* the following function returns a summary of the input points
*
* @param points points with associated weights
* @param maxAllowed the maximum number of clusters/summary points
* @param initial the initial number of clusters/summary points,
* chosen at random
* @param stopAt a hard lower bound on the number of clusters
* @param phase2GlobalReassign a flag that performs global reassignments when
* the number of clusters is in the range
* [maxAllowed, ceil(1.2*maxAllowed)]
* @param overlapParameter a control for merging clusters
* @param distance a distance function for the points, that
* determines the order of the reverse delete
* however the EM like step uses L1 measure (to be
* robust to noise)
* @param clusterInitializer a function that creates the cluster type given a
* single object and a weight
* @param seed a random seed for controlling the randomness
* @param parallelEnabled flag enabling (limited) parallelism
* @param previousClustering any previous clustering that can be used as zero
* weight seeds to ensure smoothness
* @return a clustering of the input points (Note: the median returned is an
* approximate median; exact computation is unlikely to be critical for
* true applications of summarization)
*/
public static <R> List<ICluster<R>> summarize(List<Weighted<R>> points, int maxAllowed, int initial, int stopAt,
boolean phase2GlobalReassign, double overlapParameter, BiFunction<R, R, Double> distance,
BiFunction<R, Float, ICluster<R>> clusterInitializer, long seed, boolean parallelEnabled,
List<ICluster<R>> previousClustering) {
checkArgument(maxAllowed < 100, "are you sure you want more elements in the summary?");
checkArgument(maxAllowed <= initial, "initial parameter should be at least maximum allowed in final result");
double totalWeight = points.stream().map(e -> {
checkArgument(Double.isFinite(e.weight), " weights have to be finite");
checkArgument(e.weight >= 0.0, () -> "negative weights are not meaningful" + e.weight);
return (double) e.weight;
}).reduce(0.0, Double::sum);
checkArgument(totalWeight > 0, " total weight has to be positive");
Random rng = new Random(seed);
// the following list is explicity copied and sorted for potential efficiency
List<Weighted<R>> sampledPoints = createSample(points, rng.nextLong(), 5 * LENGTH_BOUND, 0.005, 1.0);
ArrayList<Weighted<Integer>> refs = new ArrayList<>();
for (int i = 0; i < sampledPoints.size(); i++) {
refs.add(new Weighted<>(i, sampledPoints.get(i).weight));
}
Function<Integer, R> getPoint = (i) -> sampledPoints.get(i).index;
return iterativeClustering(maxAllowed, initial, stopAt, refs, getPoint, distance, clusterInitializer,
rng.nextLong(), parallelEnabled, phase2GlobalReassign, overlapParameter, previousClustering);
}
// same as above, specific for single centroid clustering of float[]
// with an explicit stopping condition as well as a global reassignment option
public static List<ICluster<float[]>> singleCentroidSummarize(List<Weighted<float[]>> points, int maxAllowed,
int initial, int stopAt, boolean phase2GlobalReassign, BiFunction<float[], float[], Double> distance,
long seed, boolean parallelEnabled, List<ICluster<float[]>> previousClustering) {
return summarize(points, maxAllowed, initial, stopAt, phase2GlobalReassign, DEFAULT_SEPARATION_RATIO_FOR_MERGE,
distance, Center::initialize, seed, parallelEnabled, previousClustering);
}
/**
* the following function returns a summary of the input points
*
* @param points points with associated weights
* @param maxAllowed the maximum number of clusters/summary points
* @param initial the initial number of clusters/summary points, chosen
* at random
* @param phase1reassign (this parameter is ignored in the current version, but
* the signature is unchanged for convenience)
* @param distance a distance function for the points, that determines
* the order of the reverse delete however the EM like
* step uses L1 measure (to be robust to noise)
* @param seed a random seed for controlling the randomness
* @param parallelEnabled flag enabling (limited) parallelism
* @return a summary of the input points (Note: the median returned is an
* approximate median; exact computation is unlikely to be critical for
* true applications of summarization)
*/
public static SampleSummary summarize(List<Weighted<float[]>> points, int maxAllowed, int initial,
boolean phase1reassign, BiFunction<float[], float[], Double> distance, long seed, boolean parallelEnabled) {
checkArgument(maxAllowed < 100, "are you sure you want more elements in the summary?");
checkArgument(maxAllowed <= initial, "initial parameter should be at least maximum allowed in final result");
double totalWeight = points.stream().map(e -> {
checkArgument(Double.isFinite(e.weight), " weights have to be finite");
checkArgument(e.weight >= 0.0, () -> "negative weights are not meaningful" + e.weight);
return (double) e.weight;
}).reduce(0.0, Double::sum);
checkArgument(totalWeight > 0, " total weight has to be positive");
Random rng = new Random(seed);
// the following list is explicity copied and sorted for potential efficiency
List<Weighted<float[]>> sampledPoints = createSample(points, rng.nextLong(), 5 * LENGTH_BOUND, 0.005, 1.0);
List<ICluster<float[]>> centers = summarize(sampledPoints, maxAllowed, initial, 1, true,
DEFAULT_SEPARATION_RATIO_FOR_MERGE, distance, Center::initialize, seed, parallelEnabled, null);
float[][] pointList = new float[centers.size()][];
float[] likelihood = new float[centers.size()];
int dimensions = centers.get(0).primaryRepresentative(distance).length;
for (int i = 0; i < centers.size(); i++) {
pointList[i] = Arrays.copyOf(centers.get(i).primaryRepresentative(distance), dimensions);
likelihood[i] = (float) (centers.get(i).getWeight() / totalWeight);
}
return new SampleSummary(sampledPoints, pointList, likelihood);
}
/**
* Same as previous over a flat collection of unweighted float[]
*
* @param points points represented by float[][]
* @param maxAllowed maximum number of clusters in output
* @param initial initial number of points to seed; a control parameter
* that serves both as a denoiser, as well as as a
* facilitator of coninuity (large numbers would
* correspond to MST like clustering)
* @param reassignPerStep unusued in current version
* @param distance distance metric over float []
* @param seed random seed
* @param parallelEnabled flag enabling (limited) parallelism
* @return a list of centers with weights
*/
public static SampleSummary summarize(float[][] points, int maxAllowed, int initial, boolean reassignPerStep,
BiFunction<float[], float[], Double> distance, long seed, Boolean parallelEnabled) {
ArrayList<Weighted<float[]>> weighted = new ArrayList<>();
for (float[] point : points) {
weighted.add(new Weighted<>(point, 1.0f));
}
return summarize(weighted, maxAllowed, initial, reassignPerStep, distance, seed, parallelEnabled);
}
/**
* same as before with common cases filled in, used in analysis of
* ConditionalSamples
*
* @param points points in ProjectedPoint{}
* @param maxAllowed maximum number of groups/clusters
* @param initial a parameter controlling the initialization
* @param reassignPerStep if reassignment is to be performed each step
* @param seed random seed
* @return a summarization
*/
public static SampleSummary l2summarize(List<Weighted<float[]>> points, int maxAllowed, int initial,
boolean reassignPerStep, long seed) {
return summarize(points, maxAllowed, initial, reassignPerStep, Summarizer::L2distance, seed, false);
}
/**
* Same as above, with the most common use cases filled in
*
* @param points points in float[][], each of weight 1.0
* @param maxAllowed maximum number of clusters one is interested in
* @param seed random seed
* @return a summarization
*/
public static SampleSummary l2summarize(float[][] points, int maxAllowed, long seed) {
return summarize(points, maxAllowed, 4 * maxAllowed, false, Summarizer::L2distance, seed, false);
}
/**
*
* @param points points represented by R[]
* @param maxAllowed maximum number of clusters in output
* @param initial initial number of points to seed; a control
* parameter that serves both as a denoiser, as
* well as as a facilitator of coninuity (large
* numbers would correspond to MST like
* clustering)
* @param phase2GlobalReassign a boolean determining global reassignment
* @param overlapParameter a parameter controlling order of mergers
* @param distance distance metric over float []
* @param seed random seed
* @param parallelEnabled flag enabling (limited) parallelism
* @param shrinkage a parameter that morphs from centroidal
* behavior (=1) to robust Minimum Spanning Tree
* (=0)
* @param numberOfRepresentatives the number of representatives ina multicentrod
* representation used to cluster potentially
* non-spherical shapes
* @return a list of centers with weights
*/
public static <R> List<ICluster<R>> multiSummarize(List<R> points, int maxAllowed, int initial, int stopAt,
boolean phase2GlobalReassign, double overlapParameter, BiFunction<R, R, Double> distance, long seed,
Boolean parallelEnabled, double shrinkage, int numberOfRepresentatives) {
ArrayList<Weighted<R>> weighted = new ArrayList<>();
for (R point : points) {
weighted.add(new Weighted<>(point, 1.0f));
}
BiFunction<R, Float, ICluster<R>> clusterInitializer = (a, b) -> GenericMultiCenter.initialize(a, b, shrinkage,
numberOfRepresentatives);
return summarize(weighted, maxAllowed, initial, stopAt, phase2GlobalReassign, overlapParameter, distance,
clusterInitializer, seed, parallelEnabled, null);
}
// same as above, different input
public static <R> List<ICluster<R>> multiSummarize(R[] points, int maxAllowed, int initial, int stopAt,
boolean phase2GlobalReassign, double overlapParameter, BiFunction<R, R, Double> distance, long seed,
Boolean parallelEnabled, double shrinkage, int numberOfRepresentatives) {
ArrayList<Weighted<R>> weighted = new ArrayList<>();
for (R point : points) {
weighted.add(new Weighted<>(point, 1.0f));
}
BiFunction<R, Float, ICluster<R>> clusterInitializer = (a, b) -> GenericMultiCenter.initialize(a, b, shrinkage,
numberOfRepresentatives);
return summarize(weighted, maxAllowed, initial, stopAt, phase2GlobalReassign, overlapParameter, distance,
clusterInitializer, seed, parallelEnabled, null);
}
// same as above, with multicenter instead of generic
public static List<ICluster<float[]>> multiSummarize(float[][] points, int maxAllowed, double shrinkage,
int numberOfRepresentatives, long seed) {
ArrayList<Weighted<float[]>> weighted = new ArrayList<>();
for (float[] point : points) {
weighted.add(new Weighted<>(point, 1.0f));
}
BiFunction<float[], Float, ICluster<float[]>> clusterInitializer = (a, b) -> MultiCenter.initialize(a, b,
shrinkage, numberOfRepresentatives);
return summarize(weighted, maxAllowed, 4 * maxAllowed, 1, true, DEFAULT_SEPARATION_RATIO_FOR_MERGE,
Summarizer::L2distance, clusterInitializer, seed, true, null);
}
}
| 487 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/runner/ArgumentParser.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.runner;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.checkNotNull;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Function;
/**
* A utility class for parsing command-line arguments.
*/
public class ArgumentParser {
public static final String ARCHIVE_NAME = "randomcutforest-core-1.0.jar";
private final String runnerClass;
private final String runnerDescription;
private final Map<String, Argument<?>> shortFlags;
private final Map<String, Argument<?>> longFlags;
private final IntegerArgument numberOfTrees;
private final IntegerArgument sampleSize;
private final IntegerArgument windowSize;
private final IntegerArgument shingleSize;
private final BooleanArgument shingleCyclic;
private final StringArgument delimiter;
private final BooleanArgument headerRow;
private final IntegerArgument randomSeed;
/**
* Create a new ArgumentParser.The runner class and runner description will be
* used in help text.
*
* @param runnerClass The name of the runner class where this argument
* parser is being invoked.
* @param runnerDescription A description of the runner class where this
* argument parser is being invoked.
*/
public ArgumentParser(String runnerClass, String runnerDescription) {
this.runnerClass = runnerClass;
this.runnerDescription = runnerDescription;
shortFlags = new HashMap<>();
longFlags = new HashMap<>();
numberOfTrees = new IntegerArgument("-n", "--number-of-trees", "Number of trees to use in the forest.", 100,
n -> checkArgument(n > 0, "number of trees should be greater than 0"));
addArgument(numberOfTrees);
sampleSize = new IntegerArgument("-s", "--sample-size", "Number of points to keep in sample for each tree.",
256, n -> checkArgument(n > 0, "sample size should be greater than 0"));
addArgument(sampleSize);
windowSize = new IntegerArgument("-w", "--window-size", "Window size of the sample or 0 for no window.", 0,
n -> checkArgument(n > 0, "window size should be greater than 0"));
addArgument(windowSize);
shingleSize = new IntegerArgument("-g", "--shingle-size", "Shingle size to use.", 1,
n -> checkArgument(n > 0, "shingle size should be greater than 0"));
addArgument(shingleSize);
shingleCyclic = new BooleanArgument("-c", "--shingle-cyclic",
"Set to 'true' to use cyclic shingles instead of linear shingles.", false);
addArgument(shingleCyclic);
delimiter = new StringArgument("-d", "--delimiter", "The character or string used as a field delimiter.", ",");
addArgument(delimiter);
headerRow = new BooleanArgument(null, "--header-row", "Set to 'true' if the data contains a header row.",
false);
addArgument(headerRow);
randomSeed = new IntegerArgument(null, "--random-seed", "Random seed to use in the Random Cut Forest", 42);
addArgument(randomSeed);
}
/**
* Add a new argument to this argument parser.
*
* @param argument An Argument instance for a command-line argument that should
* be parsed.
*/
protected void addArgument(Argument<?> argument) {
checkNotNull(argument, "argument should not be null");
checkArgument(argument.getShortFlag() == null || !shortFlags.containsKey(argument.getShortFlag()),
String.format("An argument mapping already exists for %s", argument.getShortFlag()));
checkArgument(!longFlags.containsKey(argument.getLongFlag()),
String.format("An argument mapping already exists for %s", argument.getLongFlag()));
if (argument.getShortFlag() != null) {
shortFlags.put(argument.getShortFlag(), argument);
}
longFlags.put(argument.getLongFlag(), argument);
}
/**
* Remove the argument with the given long flag from help messages. This allows
* subclasses to suppress arguments as needed. The argument will still exist in
* this object with its default value.
*
* @param longFlag The long flag corresponding to the argument being removed
*/
protected void removeArgument(String longFlag) {
Argument<?> argument = longFlags.get(longFlag);
if (argument != null) {
longFlags.remove(longFlag);
shortFlags.remove(argument.getShortFlag());
}
}
/**
* Parse the given array of command-line arguments.
*
* @param arguments An array of command-line arguments.
*/
public void parse(String... arguments) {
int i = 0;
while (i < arguments.length) {
String flag = arguments[i];
try {
if (shortFlags.containsKey(flag)) {
shortFlags.get(flag).parse(arguments[++i]);
} else if (longFlags.containsKey(flag)) {
longFlags.get(flag).parse(arguments[++i]);
} else if ("-h".equals(flag) || "--help".equals(flag)) {
printUsage();
Runtime.getRuntime().exit(0);
} else {
throw new IllegalArgumentException("Unknown argument: " + flag);
}
} catch (Exception e) {
printUsageAndExit("%s: %s", e.getClass().getName(), e.getMessage());
}
i++;
}
}
/**
* Print a usage message to STDOUT.
*/
public void printUsage() {
System.out.println(
String.format("Usage: java -cp %s %s [options] < input_file > output_file", ARCHIVE_NAME, runnerClass));
System.out.println();
System.out.println(runnerDescription);
System.out.println();
System.out.println("Options:");
longFlags.values().stream().map(Argument::getHelpMessage).sorted()
.forEach(msg -> System.out.println("\t" + msg));
System.out.println();
System.out.println("\t--help, -h: Print this help message and exit.");
}
/**
* Print an error message, the usage message, and exit the application.
*
* @param errorMessage An error message to show the user.
* @param formatObjects An array of format objects that will be interpolated
* into the error message using {@link String#format}.
*/
public void printUsageAndExit(String errorMessage, Object... formatObjects) {
System.err.println("Error: " + String.format(errorMessage, formatObjects));
printUsage();
System.exit(1);
}
/**
* @return the user-specified value of the number-of-trees parameter.
*/
public int getNumberOfTrees() {
return numberOfTrees.getValue();
}
/**
* @return the user-specified value of the sample-size parameter.
*/
public int getSampleSize() {
return sampleSize.getValue();
}
/**
* @return the user-specified value of the window-size parameter
*/
public int getWindowSize() {
return windowSize.getValue();
}
/**
* @return the user-specified value of the time-decay parameter
*/
public double getTimeDecay() {
if (getWindowSize() > 0) {
return 1.0 / getWindowSize();
} else {
return 0.0;
}
}
/**
* @return the user-specified value of the shingle-size parameter
*/
public int getShingleSize() {
return shingleSize.getValue();
}
/**
* @return the user-specified value of the shingle-cyclic parameter
*/
public boolean getShingleCyclic() {
return shingleCyclic.getValue();
}
/**
* @return the user-specified value of the delimiter parameter
*/
public String getDelimiter() {
return delimiter.getValue();
}
/**
* @return the user-specified value of the header-row parameter
*/
public boolean getHeaderRow() {
return headerRow.getValue();
}
/**
* @return the user-specified value of the random-seed parameter
*/
public int getRandomSeed() {
return randomSeed.getValue();
}
public static class Argument<T> {
private final String shortFlag;
private final String longFlag;
private final String description;
private final T defaultValue;
private final Function<String, T> parseFunction;
private final Consumer<T> validateFunction;
private T value;
public Argument(String shortFlag, String longFlag, String description, T defaultValue,
Function<String, T> parseFunction, Consumer<T> validateFunction) {
this.shortFlag = shortFlag;
this.longFlag = longFlag;
this.description = description;
this.defaultValue = defaultValue;
this.parseFunction = parseFunction;
this.validateFunction = validateFunction;
value = defaultValue;
}
public Argument(String shortFlag, String longFlag, String description, T defaultValue,
Function<String, T> parseFunction) {
this(shortFlag, longFlag, description, defaultValue, parseFunction, t -> {
});
}
public String getShortFlag() {
return shortFlag;
}
public String getLongFlag() {
return longFlag;
}
public String getDescription() {
return description;
}
public T getDefaultValue() {
return defaultValue;
}
public String getHelpMessage() {
if (shortFlag != null) {
return String.format("%s, %s: %s (default: %s)", longFlag, shortFlag, description, defaultValue);
} else {
return String.format("%s: %s (default: %s)", longFlag, description, defaultValue);
}
}
public void parse(String string) {
value = parseFunction.apply(string);
validateFunction.accept(value);
}
public T getValue() {
return value;
}
}
public static class StringArgument extends Argument<String> {
public StringArgument(String shortFlag, String longFlag, String description, String defaultValue,
Consumer<String> validateFunction) {
super(shortFlag, longFlag, description, defaultValue, x -> x, validateFunction);
}
public StringArgument(String shortFlag, String longFlag, String description, String defaultValue) {
super(shortFlag, longFlag, description, defaultValue, x -> x);
}
}
public static class BooleanArgument extends Argument<Boolean> {
public BooleanArgument(String shortFlag, String longFlag, String description, boolean defaultValue) {
super(shortFlag, longFlag, description, defaultValue, Boolean::parseBoolean);
}
}
public static class IntegerArgument extends Argument<Integer> {
public IntegerArgument(String shortFlag, String longFlag, String description, int defaultValue,
Consumer<Integer> validateFunction) {
super(shortFlag, longFlag, description, defaultValue, Integer::parseInt, validateFunction);
}
public IntegerArgument(String shortFlag, String longFlag, String description, int defaultValue) {
super(shortFlag, longFlag, description, defaultValue, Integer::parseInt);
}
}
public static class DoubleArgument extends Argument<Double> {
public DoubleArgument(String shortFlag, String longFlag, String description, double defaultValue,
Consumer<Double> validateFunction) {
super(shortFlag, longFlag, description, defaultValue, Double::parseDouble, validateFunction);
}
public DoubleArgument(String shortFlag, String longFlag, String description, double defaultValue) {
super(shortFlag, longFlag, description, defaultValue, Double::parseDouble);
}
}
}
| 488 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/runner/LineTransformer.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.runner;
import java.util.List;
import com.amazon.randomcutforest.RandomCutForest;
/**
* This interface is used by SimpleRunner to transform input lines into output
* lines.
*/
public interface LineTransformer {
/**
* For the given parsed input point, return a list of string values that should
* be written as output. The list of strings will be joined together using the
* user-specified delimiter.
*
* @param point A point value that was parsed from the input stream.
* @return a list of string values that should be written as output.
*/
List<String> getResultValues(double[] point);
/**
* @return a list of string values that should be written to the output when
* processing a line if there is no input point available. This method
* is invoked when shingling is enabled before the first shingle is
* full.
*/
List<String> getEmptyResultValue();
/**
* @return a list of column names to write to the output if headers are enabled.
*/
List<String> getResultColumnNames();
/**
* @return the RandomCutForest instance which is being used internally to
* process lines.
*/
RandomCutForest getForest();
}
| 489 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/runner/AnomalyScoreRunner.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.runner;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.List;
import com.amazon.randomcutforest.RandomCutForest;
/**
* A command-line application that computes anomaly scores. Points are read from
* STDIN and output is written to STDOUT. Output consists of the original input
* point with the anomaly score appended.
*/
public class AnomalyScoreRunner extends SimpleRunner {
public AnomalyScoreRunner() {
super(AnomalyScoreRunner.class.getName(),
"Compute scalar anomaly scores from the input rows and append them to the output rows.",
AnomalyScoreTransformer::new);
}
public static void main(String... args) throws IOException {
AnomalyScoreRunner runner = new AnomalyScoreRunner();
runner.parse(args);
System.out.println("Reading from stdin... (Ctrl-c to exit)");
runner.run(new BufferedReader(new InputStreamReader(System.in, StandardCharsets.UTF_8)),
new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8)));
System.out.println("Done.");
}
public static class AnomalyScoreTransformer implements LineTransformer {
private final RandomCutForest forest;
public AnomalyScoreTransformer(RandomCutForest forest) {
this.forest = forest;
}
@Override
public List<String> getResultValues(double... point) {
double score = forest.getAnomalyScore(point);
forest.update(point);
return Collections.singletonList(Double.toString(score));
}
@Override
public List<String> getEmptyResultValue() {
return Collections.singletonList("NA");
}
@Override
public List<String> getResultColumnNames() {
return Collections.singletonList("anomaly_score");
}
@Override
public RandomCutForest getForest() {
return forest;
}
}
}
| 490 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/runner/ImputeRunner.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.runner;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.StringJoiner;
/**
* A command-line application that imputes missing values. Points are read from
* STDIN and output is written to STDOUT. One output point is written for each
* input point. If the input point does not contain any missing value
* indicators, then it is copied as-is to the output. If an input point contains
* one or more missing value indicators, then the missing values are imputed and
* the imputed point is written to the output.
*/
public class ImputeRunner extends SimpleRunner {
private String missingValueMarker;
private int numberOfMissingValues;
private int[] missingIndexes;
public ImputeRunner() {
super(new ImputeArgumentParser(), UpdateOnlyTransformer::new);
}
public static void main(String... args) throws IOException {
ImputeRunner runner = new ImputeRunner();
runner.parse(args);
System.out.println("Reading from stdin... (Ctrl-c to exit)");
runner.run(new BufferedReader(new InputStreamReader(System.in, StandardCharsets.UTF_8)),
new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8)));
System.out.println("Done.");
}
@Override
protected void prepareAlgorithm(int dimensions) {
super.prepareAlgorithm(dimensions);
missingIndexes = new int[dimensions];
missingValueMarker = ((ImputeArgumentParser) argumentParser).getMissingValueMarker();
}
@Override
protected void processLine(String[] values, PrintWriter out) {
numberOfMissingValues = 0;
for (int i = 0; i < getPointSize(); i++) {
if (missingValueMarker.equals(values[i])) {
missingIndexes[numberOfMissingValues++] = i;
values[i] = "0";
}
}
if (numberOfMissingValues > 0) {
parsePoint(values);
double[] imputedPoint = algorithm.getForest().imputeMissingValues(pointBuffer, numberOfMissingValues,
missingIndexes);
StringJoiner joiner = new StringJoiner(argumentParser.getDelimiter());
Arrays.stream(imputedPoint).mapToObj(Double::toString).forEach(joiner::add);
out.println(joiner.toString());
} else {
super.processLine(values, out);
}
}
public static class ImputeArgumentParser extends ArgumentParser {
private final StringArgument missingValueMarker;
public ImputeArgumentParser() {
super(ImputeRunner.class.getName(),
"Read rows with missing values and write rows with missing values imputed.");
missingValueMarker = new StringArgument(null, "--missing-value-marker",
"String used to represent a missing value in the data.", "NA");
addArgument(missingValueMarker);
removeArgument("--shingle-size");
removeArgument("--shingle-cyclic");
}
public String getMissingValueMarker() {
return missingValueMarker.getValue();
}
}
}
| 491 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/runner/SimpleRunner.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.runner;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.List;
import java.util.StringJoiner;
import java.util.function.Function;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.util.ShingleBuilder;
/**
* A simple command-line application that parses command-line arguments, creates
* a RandomCutForest instance based on those arguments, reads values from STDIN
* and writes results to STDOUT.
*/
public class SimpleRunner {
protected final ArgumentParser argumentParser;
protected final Function<RandomCutForest, LineTransformer> algorithmInitializer;
protected LineTransformer algorithm;
protected ShingleBuilder shingleBuilder;
protected double[] pointBuffer;
protected double[] shingleBuffer;
protected int lineNumber;
/**
* Create a new SimpleRunner.
*
* @param runnerClass The name of the runner class. This will be
* displayed in the help text.
* @param runnerDescription A description of the runner class. This will be
* displayed in the help text.
* @param algorithmInitializer A factory method to create a new LineTransformer
* instance from a RandomCutForest.
*/
public SimpleRunner(String runnerClass, String runnerDescription,
Function<RandomCutForest, LineTransformer> algorithmInitializer) {
this(new ArgumentParser(runnerClass, runnerDescription), algorithmInitializer);
}
/**
* Create a new SimpleRunner.
*
* @param argumentParser A argument parser that will be used by this
* runner to parse command-line arguments.
* @param algorithmInitializer A factory method to create a new LineTransformer
* instance from a RandomCutForest.
*/
public SimpleRunner(ArgumentParser argumentParser,
Function<RandomCutForest, LineTransformer> algorithmInitializer) {
this.argumentParser = argumentParser;
this.algorithmInitializer = algorithmInitializer;
}
/**
* Parse the given command-line arguments.
*
* @param arguments An array of command-line arguments.
*/
public void parse(String... arguments) {
argumentParser.parse(arguments);
}
/**
* Read data from an input stream, apply the desired transformation, and write
* the result to an output stream.
*
* @param in An input stream where input values will be read.
* @param out An output stream where the result values will be written.
* @throws IOException if IO errors are encountered during reading or writing.
*/
public void run(BufferedReader in, PrintWriter out) throws IOException {
String line;
while ((line = in.readLine()) != null) {
lineNumber++;
String[] values = line.split(argumentParser.getDelimiter());
if (pointBuffer == null) {
prepareAlgorithm(values.length);
}
if (lineNumber == 1 && argumentParser.getHeaderRow()) {
writeHeader(values, out);
continue;
}
processLine(values, out);
}
finish(out);
out.flush();
}
/**
* Set up the internal RandomCutForest instance and line transformer.
*
* @param dimensions The number of dimensions in the input data.
*/
protected void prepareAlgorithm(int dimensions) {
pointBuffer = new double[dimensions];
shingleBuilder = new ShingleBuilder(dimensions, argumentParser.getShingleSize(),
argumentParser.getShingleCyclic());
shingleBuffer = new double[shingleBuilder.getShingledPointSize()];
RandomCutForest forest = RandomCutForest.builder().numberOfTrees(argumentParser.getNumberOfTrees())
.sampleSize(argumentParser.getSampleSize()).dimensions(shingleBuilder.getShingledPointSize())
.timeDecay(argumentParser.getTimeDecay()).randomSeed(argumentParser.getRandomSeed()).build();
algorithm = algorithmInitializer.apply(forest);
}
/**
* Write a header row to the output stream.
*
* @param values The array of values that are used to create the header. These
* values will be joined together using the user-specified
* delimiter.
* @param out The output stream where the header will be written.
*/
protected void writeHeader(String[] values, PrintWriter out) {
StringJoiner joiner = new StringJoiner(argumentParser.getDelimiter());
Arrays.stream(values).forEach(joiner::add);
algorithm.getResultColumnNames().forEach(joiner::add);
out.println(joiner.toString());
}
/**
* Process a single line of input data and write the result to the output
* stream.
*
* @param values An array of string values taken from the input stream. These
* values will be parsed into an array of doubles before being
* transformed and written to the output stream.
* @param out The output stream where the transformed line will be written.
*/
protected void processLine(String[] values, PrintWriter out) {
if (values.length != pointBuffer.length) {
throw new IllegalArgumentException(
String.format("Wrong number of values on line %d. Exected %d but found %d.", lineNumber,
pointBuffer.length, values.length));
}
parsePoint(values);
shingleBuilder.addPoint(pointBuffer);
List<String> result;
if (shingleBuilder.isFull()) {
shingleBuilder.getShingle(shingleBuffer);
result = algorithm.getResultValues(shingleBuffer);
} else {
result = algorithm.getEmptyResultValue();
}
StringJoiner joiner = new StringJoiner(argumentParser.getDelimiter());
Arrays.stream(values).forEach(joiner::add);
result.forEach(joiner::add);
out.println(joiner.toString());
}
/**
* Parse the array of string values into doubles and write them to an internal
* buffer.
*
* @param stringValues An array of string-encoded double values.
*/
protected void parsePoint(String[] stringValues) {
for (int i = 0; i < pointBuffer.length; i++) {
pointBuffer[i] = Double.parseDouble(stringValues[i]);
}
}
/**
* This method is used to write any final output to the output stream after the
* input stream has beeen fully processed.
*
* @param out The output stream where additional output text may be written.
*/
protected void finish(PrintWriter out) {
}
/**
* @return the size of the internal point buffer.
*/
protected int getPointSize() {
return pointBuffer != null ? pointBuffer.length : 0;
}
/**
* @return the size of the internal shingled point buffer.
*/
protected int getShingleSize() {
return shingleBuffer != null ? shingleBuffer.length : 0;
}
}
| 492 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/runner/UpdateOnlyTransformer.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.runner;
import java.util.Collections;
import java.util.List;
import com.amazon.randomcutforest.RandomCutForest;
/**
* A line transformer that updates its internal RandomCutForest instance but
* does not produce any output.
*/
public class UpdateOnlyTransformer implements LineTransformer {
private final RandomCutForest forest;
public UpdateOnlyTransformer(RandomCutForest forest) {
this.forest = forest;
}
@Override
public List<String> getResultValues(double... point) {
forest.update(point);
return Collections.emptyList();
}
@Override
public List<String> getEmptyResultValue() {
return Collections.emptyList();
}
@Override
public List<String> getResultColumnNames() {
return Collections.emptyList();
}
@Override
public RandomCutForest getForest() {
return forest;
}
}
| 493 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/runner/SimpleDensityRunner.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.runner;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.returntypes.DiVector;
/**
* A command-line application that computes directional density. Points are read
* from STDIN and output is written to STDOUT. Output consists of the original
* input point with the directional density vector appended.
*/
public class SimpleDensityRunner extends SimpleRunner {
public SimpleDensityRunner() {
super(SimpleDensityRunner.class.getName(),
"Compute directional density vectors from the input rows and append them to the output rows.",
SimpleDensityRunner.SimpleDensityTransformer::new);
}
public static void main(String... args) throws IOException {
SimpleDensityRunner runner = new SimpleDensityRunner();
runner.parse(args);
System.out.println("Reading from stdin... (Ctrl-c to exit)");
runner.run(new BufferedReader(new InputStreamReader(System.in, StandardCharsets.UTF_8)),
new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8)));
System.out.println("Done.");
}
public static class SimpleDensityTransformer implements LineTransformer {
private final RandomCutForest forest;
public SimpleDensityTransformer(RandomCutForest forest) {
this.forest = forest;
}
@Override
public List<String> getResultValues(double... point) {
DiVector densityFactors = forest.getSimpleDensity(point).getDirectionalDensity();
forest.update(point);
List<String> result = new ArrayList<>(2 * forest.getDimensions());
for (int i = 0; i < forest.getDimensions(); i++) {
result.add(String.format("%f", densityFactors.high[i]));
result.add(String.format("%f", densityFactors.low[i]));
}
return result;
}
@Override
public List<String> getEmptyResultValue() {
List<String> result = new ArrayList<>(2 * forest.getDimensions());
for (int i = 0; i < 2 * forest.getDimensions(); i++) {
result.add("NA");
}
return result;
}
@Override
public List<String> getResultColumnNames() {
List<String> result = new ArrayList<>(2 * forest.getDimensions());
for (int i = 0; i < forest.getDimensions(); i++) {
result.add(String.format("prob_mass_%d_up", i));
result.add(String.format("prob_mass_%d_down", i));
}
return result;
}
@Override
public RandomCutForest getForest() {
return forest;
}
}
}
| 494 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/runner/AnomalyAttributionRunner.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.runner;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.returntypes.DiVector;
/**
* A command-line application that computes anomaly attribution. Points are read
* from STDIN and output is written to STDOUT. Output consists of the original
* input point with the anomaly attribution vector appended.
*/
public class AnomalyAttributionRunner extends SimpleRunner {
public AnomalyAttributionRunner() {
super(AnomalyAttributionRunner.class.getName(),
"Compute directional anomaly scores from the input rows and append them to the output rows.",
AnomalyAttributionTransformer::new);
}
public static void main(String... args) throws IOException {
AnomalyAttributionRunner runner = new AnomalyAttributionRunner();
runner.parse(args);
System.out.println("Reading from stdin... (Ctrl-c to exit)");
runner.run(new BufferedReader(new InputStreamReader(System.in, StandardCharsets.UTF_8)),
new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8)));
System.out.println("Done.");
}
public static class AnomalyAttributionTransformer implements LineTransformer {
private final RandomCutForest forest;
public AnomalyAttributionTransformer(RandomCutForest forest) {
this.forest = forest;
}
@Override
public List<String> getResultValues(double... point) {
DiVector attribution = forest.getAnomalyAttribution(point);
forest.update(point);
List<String> result = new ArrayList<>(2 * forest.getDimensions());
for (int i = 0; i < attribution.getDimensions(); i++) {
result.add(Double.toString(attribution.low[i]));
result.add(Double.toString(attribution.high[i]));
}
return result;
}
@Override
public List<String> getEmptyResultValue() {
List<String> result = new ArrayList<>(2 * forest.getDimensions());
for (int i = 0; i < 2 * forest.getDimensions(); i++) {
result.add("NA");
}
return result;
}
@Override
public List<String> getResultColumnNames() {
List<String> result = new ArrayList<>(2 * forest.getDimensions());
for (int i = 0; i < forest.getDimensions(); i++) {
result.add("anomaly_low_" + i);
result.add("anomaly_high_" + i);
}
return result;
}
@Override
public RandomCutForest getForest() {
return forest;
}
}
}
| 495 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/IContextualStateMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state;
public interface IContextualStateMapper<Model, State, ContextState> {
State toState(Model model);
Model toModel(State state, ContextState contextState, long seed);
default Model toModel(State state, ContextState contextState) {
return toModel(state, contextState, 0L);
}
}
| 496 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/ExecutionContext.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state;
import java.io.Serializable;
import lombok.Data;
@Data
public class ExecutionContext implements Serializable {
private static final long serialVersionUID = 1L;
private boolean parallelExecutionEnabled;
private int threadPoolSize;
/**
* A string to define an "execution mode" that can be used to set multiple
* configuration options. This field is not currently in use.
*/
private String mode;
}
| 497 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/RandomCutForestMapper.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state;
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.checkNotNull;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.stream.Collectors;
import lombok.Getter;
import lombok.Setter;
import com.amazon.randomcutforest.ComponentList;
import com.amazon.randomcutforest.IComponentModel;
import com.amazon.randomcutforest.RandomCutForest;
import com.amazon.randomcutforest.config.Config;
import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.executor.PointStoreCoordinator;
import com.amazon.randomcutforest.executor.SamplerPlusTree;
import com.amazon.randomcutforest.sampler.CompactSampler;
import com.amazon.randomcutforest.sampler.IStreamSampler;
import com.amazon.randomcutforest.sampler.Weighted;
import com.amazon.randomcutforest.state.sampler.CompactSamplerMapper;
import com.amazon.randomcutforest.state.sampler.CompactSamplerState;
import com.amazon.randomcutforest.state.store.PointStoreMapper;
import com.amazon.randomcutforest.state.store.PointStoreState;
import com.amazon.randomcutforest.state.tree.CompactRandomCutTreeContext;
import com.amazon.randomcutforest.state.tree.CompactRandomCutTreeState;
import com.amazon.randomcutforest.state.tree.RandomCutTreeMapper;
import com.amazon.randomcutforest.store.IPointStore;
import com.amazon.randomcutforest.store.PointStore;
import com.amazon.randomcutforest.tree.ITree;
import com.amazon.randomcutforest.tree.RandomCutTree;
/**
* A utility class for creating a {@link RandomCutForestState} instance from a
* {@link RandomCutForest} instance and vice versa.
*/
@Getter
@Setter
public class RandomCutForestMapper
implements IContextualStateMapper<RandomCutForest, RandomCutForestState, ExecutionContext> {
/**
* A flag indicating whether the structure of the trees in the forest should be
* included in the state object. If true, then data describing the bounding
* boxes and cuts defining each tree will be written to the
* {@link RandomCutForestState} object produced by the mapper. Tree state is not
* saved by default.
*/
private boolean saveTreeStateEnabled = false;
/**
* A flag indicating whether the point store should be included in the
* {@link RandomCutForestState} object produced by the mapper. This is saved by
* default for compact trees
*/
private boolean saveCoordinatorStateEnabled = true;
/**
* A flag indicating whether the samplers should be included in the
* {@link RandomCutForestState} object produced by the mapper. This is saved by
* default for all trees.
*/
private boolean saveSamplerStateEnabled = true;
/**
* A flag indicating whether the executor context should be included in the
* {@link RandomCutForestState} object produced by the mapper. Executor context
* is not saved by defalt.
*/
private boolean saveExecutorContextEnabled = false;
/**
* If true, then the arrays are compressed via simple data dependent scheme
*/
private boolean compressionEnabled = true;
/**
* if true would require that the samplers populate the trees before the trees
* can be used gain. That woukld correspond to extra time, at the benefit of a
* smaller serialization.
*/
private boolean partialTreeStateEnabled = false;
/**
* Create a {@link RandomCutForestState} object representing the state of the
* given forest. If the forest is compact and the {@code saveTreeState} flag is
* set to true, then structure of the trees in the forest will be included in
* the state object. If the flag is set to false, then the state object will
* only contain the sampler data for each tree. If the
* {@code saveExecutorContext} is true, then the executor context will be
* included in the state object.
*
* @param forest A Random Cut Forest whose state we want to capture.
* @return a {@link RandomCutForestState} object representing the state of the
* given forest.
* @throws IllegalArgumentException if the {@code saveTreeState} flag is true
* and the forest is not compact.
*/
@Override
public RandomCutForestState toState(RandomCutForest forest) {
if (saveTreeStateEnabled) {
checkArgument(forest.isCompact(), "tree state cannot be saved for noncompact forests");
}
RandomCutForestState state = new RandomCutForestState();
state.setNumberOfTrees(forest.getNumberOfTrees());
state.setDimensions(forest.getDimensions());
state.setTimeDecay(forest.getTimeDecay());
state.setSampleSize(forest.getSampleSize());
state.setShingleSize(forest.getShingleSize());
state.setCenterOfMassEnabled(forest.isCenterOfMassEnabled());
state.setOutputAfter(forest.getOutputAfter());
state.setStoreSequenceIndexesEnabled(forest.isStoreSequenceIndexesEnabled());
state.setTotalUpdates(forest.getTotalUpdates());
state.setCompact(forest.isCompact());
state.setInternalShinglingEnabled(forest.isInternalShinglingEnabled());
state.setBoundingBoxCacheFraction(forest.getBoundingBoxCacheFraction());
state.setSaveSamplerStateEnabled(saveSamplerStateEnabled);
state.setSaveTreeStateEnabled(saveTreeStateEnabled);
state.setSaveCoordinatorStateEnabled(saveCoordinatorStateEnabled);
state.setPrecision(forest.getPrecision().name());
state.setCompressed(compressionEnabled);
state.setPartialTreeState(partialTreeStateEnabled);
if (saveExecutorContextEnabled) {
ExecutionContext executionContext = new ExecutionContext();
executionContext.setParallelExecutionEnabled(forest.isParallelExecutionEnabled());
executionContext.setThreadPoolSize(forest.getThreadPoolSize());
state.setExecutionContext(executionContext);
}
if (saveCoordinatorStateEnabled) {
PointStoreCoordinator<?> pointStoreCoordinator = (PointStoreCoordinator<?>) forest.getUpdateCoordinator();
PointStoreMapper mapper = new PointStoreMapper();
mapper.setCompressionEnabled(compressionEnabled);
mapper.setNumberOfTrees(forest.getNumberOfTrees());
PointStoreState pointStoreState = mapper.toState((PointStore) pointStoreCoordinator.getStore());
state.setPointStoreState(pointStoreState);
}
List<CompactSamplerState> samplerStates = null;
if (saveSamplerStateEnabled) {
samplerStates = new ArrayList<>();
}
List<ITree<Integer, ?>> trees = null;
if (saveTreeStateEnabled) {
trees = new ArrayList<>();
}
CompactSamplerMapper samplerMapper = new CompactSamplerMapper();
samplerMapper.setCompressionEnabled(compressionEnabled);
for (IComponentModel<?, ?> component : forest.getComponents()) {
SamplerPlusTree<Integer, ?> samplerPlusTree = (SamplerPlusTree<Integer, ?>) component;
CompactSampler sampler = (CompactSampler) samplerPlusTree.getSampler();
if (samplerStates != null) {
samplerStates.add(samplerMapper.toState(sampler));
}
if (trees != null) {
trees.add(samplerPlusTree.getTree());
}
}
state.setCompactSamplerStates(samplerStates);
if (trees != null) {
RandomCutTreeMapper treeMapper = new RandomCutTreeMapper();
List<CompactRandomCutTreeState> treeStates = trees.stream().map(t -> treeMapper.toState((RandomCutTree) t))
.collect(Collectors.toList());
state.setCompactRandomCutTreeStates(treeStates);
}
return state;
}
/**
* Create a {@link RandomCutForest} instance from a
* {@link RandomCutForestState}. If the state contains tree states, then trees
* will be constructed from the tree state objects. Otherwise, empty trees are
* created and populated from the sampler data. The resulting forest should be
* equal in distribution to the forest that the state object was created from.
*
* @param state A Random Cut Forest state object.
* @param executionContext An executor context that will be used to initialize
* new executors in the Random Cut Forest. If this
* argument is null, then the mapper will look for an
* executor context in the state object.
* @param seed A random seed.
* @return A Random Cut Forest corresponding to the state object.
* @throws NullPointerException if both the {@code executorContext} method
* argument and the executor context field in the
* state object are null.
*/
public RandomCutForest toModel(RandomCutForestState state, ExecutionContext executionContext, long seed) {
ExecutionContext ec;
if (executionContext != null) {
ec = executionContext;
} else {
checkNotNull(state.getExecutionContext(),
"The executor context in the state object is null, an executor context must be passed explicitly to toModel()");
ec = state.getExecutionContext();
}
RandomCutForest.Builder<?> builder = RandomCutForest.builder().numberOfTrees(state.getNumberOfTrees())
.dimensions(state.getDimensions()).timeDecay(state.getTimeDecay()).sampleSize(state.getSampleSize())
.centerOfMassEnabled(state.isCenterOfMassEnabled()).outputAfter(state.getOutputAfter())
.parallelExecutionEnabled(ec.isParallelExecutionEnabled()).threadPoolSize(ec.getThreadPoolSize())
.storeSequenceIndexesEnabled(state.isStoreSequenceIndexesEnabled()).shingleSize(state.getShingleSize())
.boundingBoxCacheFraction(state.getBoundingBoxCacheFraction()).compact(state.isCompact())
.internalShinglingEnabled(state.isInternalShinglingEnabled()).randomSeed(seed);
if (Precision.valueOf(state.getPrecision()) == Precision.FLOAT_32) {
return singlePrecisionForest(builder, state, null, null, null);
}
Random random = builder.getRandom();
PointStore pointStore = new PointStoreMapper().convertFromDouble(state.getPointStoreState());
ComponentList<Integer, float[]> components = new ComponentList<>();
PointStoreCoordinator<float[]> coordinator = new PointStoreCoordinator<>(pointStore);
coordinator.setTotalUpdates(state.getTotalUpdates());
CompactRandomCutTreeContext context = new CompactRandomCutTreeContext();
context.setPointStore(pointStore);
context.setMaxSize(state.getSampleSize());
checkArgument(state.isSaveSamplerStateEnabled(), " conversion cannot proceed without samplers");
List<CompactSamplerState> samplerStates = state.getCompactSamplerStates();
CompactSamplerMapper samplerMapper = new CompactSamplerMapper();
for (int i = 0; i < state.getNumberOfTrees(); i++) {
CompactSampler compactData = samplerMapper.toModel(samplerStates.get(i));
RandomCutTree tree = RandomCutTree.builder().capacity(state.getSampleSize()).pointStoreView(pointStore)
.storeSequenceIndexesEnabled(state.isStoreSequenceIndexesEnabled())
.outputAfter(state.getOutputAfter()).centerOfMassEnabled(state.isCenterOfMassEnabled())
.randomSeed(random.nextLong()).build();
CompactSampler sampler = CompactSampler.builder().capacity(state.getSampleSize())
.timeDecay(state.getTimeDecay()).randomSeed(random.nextLong()).build();
sampler.setMaxSequenceIndex(compactData.getMaxSequenceIndex());
sampler.setMostRecentTimeDecayUpdate(compactData.getMostRecentTimeDecayUpdate());
for (Weighted<Integer> sample : compactData.getWeightedSample()) {
Integer reference = sample.getValue();
Integer newReference = tree.addPoint(reference, sample.getSequenceIndex());
if (newReference.intValue() != reference.intValue()) {
pointStore.incrementRefCount(newReference);
pointStore.decrementRefCount(reference);
}
sampler.addPoint(newReference, sample.getWeight(), sample.getSequenceIndex());
}
components.add(new SamplerPlusTree<>(sampler, tree));
}
return new RandomCutForest(builder, coordinator, components, random);
}
/**
* Create a {@link RandomCutForest} instance from a {@link RandomCutForestState}
* using the executor context in the state object. See
* {@link #toModel(RandomCutForestState, ExecutionContext, long)}.
*
* @param state A Random Cut Forest state object.
* @param seed A random seed.
* @return A Random Cut Forest corresponding to the state object.
* @throws NullPointerException if the executor context field in the state
* object are null.
*/
public RandomCutForest toModel(RandomCutForestState state, long seed) {
return toModel(state, null, seed);
}
/**
* Create a {@link RandomCutForest} instance from a {@link RandomCutForestState}
* using the executor context in the state object. See
* {@link #toModel(RandomCutForestState, ExecutionContext, long)}.
*
* @param state A Random Cut Forest state object.
* @return A Random Cut Forest corresponding to the state object.
* @throws NullPointerException if the executor context field in the state
* object are null.
*/
public RandomCutForest toModel(RandomCutForestState state) {
return toModel(state, null);
}
public RandomCutForest singlePrecisionForest(RandomCutForest.Builder<?> builder, RandomCutForestState state,
IPointStore<Integer, float[]> extPointStore, List<ITree<Integer, float[]>> extTrees,
List<IStreamSampler<Integer>> extSamplers) {
checkArgument(builder != null, "builder cannot be null");
checkArgument(extTrees == null || extTrees.size() == state.getNumberOfTrees(), "incorrect number of trees");
checkArgument(extSamplers == null || extSamplers.size() == state.getNumberOfTrees(),
"incorrect number of samplers");
checkArgument(extSamplers != null | state.isSaveSamplerStateEnabled(), " need samplers ");
checkArgument(extPointStore != null || state.isSaveCoordinatorStateEnabled(), " need coordinator state ");
Random random = builder.getRandom();
ComponentList<Integer, float[]> components = new ComponentList<>();
CompactRandomCutTreeContext context = new CompactRandomCutTreeContext();
IPointStore<Integer, float[]> pointStore = (extPointStore == null)
? new PointStoreMapper().toModel(state.getPointStoreState())
: extPointStore;
PointStoreCoordinator<float[]> coordinator = new PointStoreCoordinator<>(pointStore);
coordinator.setTotalUpdates(state.getTotalUpdates());
context.setPointStore(pointStore);
context.setMaxSize(state.getSampleSize());
RandomCutTreeMapper treeMapper = new RandomCutTreeMapper();
List<CompactRandomCutTreeState> treeStates = state.isSaveTreeStateEnabled()
? state.getCompactRandomCutTreeStates()
: null;
CompactSamplerMapper samplerMapper = new CompactSamplerMapper();
List<CompactSamplerState> samplerStates = state.isSaveSamplerStateEnabled() ? state.getCompactSamplerStates()
: null;
for (int i = 0; i < state.getNumberOfTrees(); i++) {
IStreamSampler<Integer> sampler = (extSamplers != null) ? extSamplers.get(i)
: samplerMapper.toModel(samplerStates.get(i), random.nextLong());
ITree<Integer, float[]> tree;
if (extTrees != null) {
tree = extTrees.get(i);
} else if (treeStates != null) {
tree = treeMapper.toModel(treeStates.get(i), context, random.nextLong());
sampler.getSample().forEach(s -> tree.addPointToPartialTree(s.getValue(), s.getSequenceIndex()));
tree.setConfig(Config.BOUNDING_BOX_CACHE_FRACTION, treeStates.get(i).getBoundingBoxCacheFraction());
tree.validateAndReconstruct();
} else {
// using boundingBoxCahce for the new tree
tree = new RandomCutTree.Builder().capacity(state.getSampleSize()).randomSeed(random.nextLong())
.pointStoreView(pointStore).boundingBoxCacheFraction(state.getBoundingBoxCacheFraction())
.centerOfMassEnabled(state.isCenterOfMassEnabled())
.storeSequenceIndexesEnabled(state.isStoreSequenceIndexesEnabled()).build();
sampler.getSample().forEach(s -> tree.addPoint(s.getValue(), s.getSequenceIndex()));
}
components.add(new SamplerPlusTree<>(sampler, tree));
}
builder.precision(Precision.FLOAT_32);
return new RandomCutForest(builder, coordinator, components, random);
}
}
| 498 |
0 | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest | Create_ds/random-cut-forest-by-aws/Java/core/src/main/java/com/amazon/randomcutforest/state/RandomCutForestState.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.randomcutforest.state;
import static com.amazon.randomcutforest.state.Version.V3_0;
import java.io.Serializable;
import java.util.List;
import lombok.Data;
import com.amazon.randomcutforest.state.sampler.CompactSamplerState;
import com.amazon.randomcutforest.state.store.PointStoreState;
import com.amazon.randomcutforest.state.tree.CompactRandomCutTreeState;
/**
* A class that encapsulates most of the data used in a RandomCutForest such
* that the forest can be serialized and deserialized.
*/
@Data
public class RandomCutForestState implements Serializable {
private static final long serialVersionUID = 1L;
private String version = V3_0;
private long totalUpdates;
private double timeDecay;
private int numberOfTrees;
private int sampleSize;
private int shingleSize;
private int dimensions;
private int outputAfter;
private boolean compressed;
private boolean partialTreeState;
private double boundingBoxCacheFraction;
private boolean storeSequenceIndexesEnabled;
private boolean compact;
private boolean internalShinglingEnabled;
private boolean centerOfMassEnabled;
private String precision;
private PointStoreState pointStoreState;
private List<CompactSamplerState> compactSamplerStates;
private List<CompactRandomCutTreeState> compactRandomCutTreeStates;
private ExecutionContext executionContext;
// Mapper options
private boolean saveTreeStateEnabled;
private boolean saveSamplerStateEnabled;
private boolean saveCoordinatorStateEnabled;
}
| 499 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.